diff --git a/.appveyor.yml b/.appveyor.yml new file mode 100644 index 00000000000..31cc2dc4be9 --- /dev/null +++ b/.appveyor.yml @@ -0,0 +1,9 @@ +version: 1-{branch}+{build} + +build_script: + - cmd: C:\MinGW\msys\1.0\bin\make + - cmd: rmdir /s /q .git +before_test: + - cmd: set PATH=%PATH%;C:\Program Files\erl8.3\erts-8.3\bin +test_script: + - cmd: C:\MinGW\msys\1.0\bin\make --keep-going test_windows diff --git a/.gitignore b/.gitignore index 9f5a6a2479e..fbebd7b802a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,14 +1,14 @@ -/.eunit -/.release -/docs -/ebin -/lib/*/ebin/* -/lib/*/tmp -/lib/elixir/src/elixir.app.src -/lib/elixir/src/*_lexer.erl +/doc/ +/lib/*/ebin/ +/lib/*/_build/ +/lib/*/tmp/ /lib/elixir/src/*_parser.erl -/lib/elixir/test/ebin -/rel/elixir +/lib/elixir/src/elixir.app.src +/lib/elixir/test/ebin/ +/man/elixir.1 +/man/iex.1 +/Docs-v*.zip +/Precompiled-v*.zip +/.eunit +.elixir.plt erl_crash.dump -.dialyzer_plt -.dialyzer.base_plt diff --git a/.travis.yml b/.travis.yml index dcb0d1f96b1..e33c804048b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,32 @@ language: erlang +sudo: false + +os: linux +otp_release: 18.0 + +matrix: + include: + - os: linux + otp_release: 18.1 + - os: linux + otp_release: 18.2 + - os: linux + otp_release: 18.3 + - os: linux + otp_release: 19.0 + - os: linux + otp_release: 19.1 + - os: linux + otp_release: 19.2 + - os: linux + otp_release: 19.3 + - os: linux + otp_release: 20.0-rc1 + script: "make compile && rm -rf .git && make test" + notifications: - irc: "irc.freenode.org#elixir-lang" recipients: - jose.valim@plataformatec.com.br - eric.meadows.jonsson@gmail.com -otp_release: - - 17.0 + diff --git a/CHANGELOG.md b/CHANGELOG.md index 104d62bc78f..7828c75f041 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,1133 +1,107 @@ -# Changelog +# Changelog for Elixir v1.5 -## v0.14.3-dev +## v1.5.0-dev -* Enhancements +### 1. Enhancements -* Bug fixes - * [Kernel] `|>`, `<<<`, `>>>` and `^^^` were made left associative in operator table - * [Kernel] `<`, `>`, `<=`, `>=` were given higher precedence than comparison ones (`==`, `!=`, etc) in operator table +#### Elixir -* Soft deprecations (no warnings emitted) + * [Access] Optimize Access.get/2 + * [Calendar] Limit `Calendar.ISO` up to year 10000 + * [Calendar] Add Rata Die format for conversions between Calendars and `Date.convert/2`, `Time.convert/2`, `NaiveDateTime.convert/2` and `DateTime.convert/2` (as well as bang variants) + * [Calendar] Add `:calendar` field to `Time` struct + * [Calendar] Add `Date.diff/2` + * [Enum] Add `Enum.chunk_by/4` and `Stream.chunk_by/4` + * [Exception] Add `Exception.blame/3` that adds metadata to exceptions + * [File] Add `File.read_link/1` and `File.read_link!/1` + * [File] Introduce `:trim_bom` option for `File.stream!/2` + * [Inspect] Add `:printable_limit` to control the limit of printable structures + * [Integer] Add `Integer.gcd/2` + * [Kernel] Use the new `debug_info` chunk in OTP 20. This provides a mechanism for tools to retrieve the Elixir AST from beam files + * [Kernel] `defoverridable/1` accepts a module name as argument and marks all callbacks as overridable + * [Kernel] Allow non-quoted Unicode atoms and variables according to Unicode Annex #31 (see Unicode Syntax document) + * [Kernel] Warn when a :__struct__ key is used when building/updating structs + * [Keyword] Add `replace/3` and `replace!/3` for replacing an existing key + * [List] `List.starts_with?/2` + * [Macro] Introduce `Macro.generate_arguments/2` + * [Map] Optimize `Map.merge/3` by choosing merge direction + * [Map] Add `replace/3` and `replace!/3` for replacing an existing key + * [MapSet] Reduce `MapSet` size when serialized to approximately half + * [Process] Add `Process.cancel_timer/2` + * [Registry] Support ETS guard conditions in `Registry.match/3` + * [Task] Support `:on_timeout` in `Task.async_stream` to control how tasks are terminated -* Deprecations +#### ExUnit -* Backwards incompatible changes + * [ExUnit] Show code snippet from test source file in case of test errors + * [ExUnit] Show the value of variables used in an assertion + * [ExUnit] Use `Exception.blame/3` when formatting test errors -## v0.14.2 (2014-06-29) +#### IEx -* Enhancements - * [Enum] Improve performance of `Enum.join/2` and `Enum.map_join/3` by using iolists - * [Kernel] Ensure compatibility with Erlang 17.1 - * [Kernel] Support `@external_resource` attribute to external dependencies to a module - * [Mix] Allow built Git dependencies to run on a system without Git by passing `--no-deps-check` - * [Mix] Add `MIX_ARCHIVES` env variable (it is recommended for Elixir build tools to swap this environment) - * [Task] Set `:proc_lib` initial call on task to aid debugging - * [Typespec] Delay typespec compilation to after expansion - * [URI] Allow `parse/1` now accepts `%URI{}` as argument and return the uri itself + * [IEx.Evaluator] Use `Exception.blame/3` when showing errors in the terminal + * [IEx.Helpers] Add `e/1` IEx helper to list all exports in a module + * [IEx.Info] Implement `IEx.Info` protocol for calendar types -* Bug fixes - * [CLI] Support paths inside archives in `-pa` and `-pz` options - * [IEx] Remove delay when printing data from the an application start callback - * [IEx] Ensure we show a consistent error when we cannot evaluate `.iex.exs` - * [Kernel] Ensure derived protocols are defined with a file - * [Kernel] Change precedence of `&` to not special case `/` - * [Kernel] Ensure we can only use variables and `\\` as arguments of bodyless clause +#### Logger -* Soft deprecations (no warnings emitted) - * [EEx] Using `EEx.TransformerEngine` and `EEx.AssignsEngine` are deprecated in favor of function composition with `Macro.prewalk/1` (see `EEx.SmartEngine` for an example) - * [Kernel] `Kernel.xor/2` is deprecated - * [Mix] `Mix.Generator.from_file/1` is deprecated in favor of passing `from_file: file` option to `embed_text/2` and `embed_template/2` (note though that `from_file/1` expects a path relative to the current file while the `from_file: file` expects one relative to the current working directory) + * [Logger] Add `metadata: :all` configuration to log all metadata -* Deprecations - * [Kernel] `size/1` is deprecated in favor of `byte_size/1` and `tuple_size/1` (this change was soft deprecated two releases ago) +#### Mix -* Backwards incompatible changes - * [CLI] Remove support for the `--gen-debug` option as its usage is not documented by OTP - * [Kernel] Sigils no longer balance start and end tokens, e.g. the sigil `~s(f(o)o)` is no longer valid as it finishes in the first closing `)` - * [Kernel] Variables set in `cond` clause heads are no longer available outside of that particular `cond` clause (this is the behaviour also found in `case`, `receive` and friends) - * [System] `build_info/0` now returns a map + * [mix escript.build] Strip debug information from escripts by default and add option `:strip_beam` which defaults to true + * [mix loadpaths] Ensure `--no-deps-check` do not trigger SCM callbacks (such as `git`) + * [mix local.hex] Add `--if-missing` flag to `local.hex` mix task + * [mix profile.cprof] Add `Mix.Tasks.Profile.Cprof` for count-based profiling -## v0.14.1 (2014-06-18) +### 2. Bug fixes -* Enhancements - * [Base] Decoding and encoding functions now accept the `:case` as an option - * [ExUnit] The test process now exits with `:shutdown` reason - * [GenEvent] `GenEvent.stream/2` now accepts `:sync` and `:async` modes - * [Node] Add `Node.start/3` and `Node.stop/0` - * [String] Updated Unicode database to 7.0 - * [Task] Log when tasks crash +#### Elixir -* Bug fixes - * [Enum] `Enum.slice/2` and `Enum.slice/3` always returns a list (and never nil) - * [Kernel] Disambiguate (w)erl to (w)erl.exe - * [Mix] Ensure umbrella project is recompiled when a dependency inside an umbrella child changes - * [OptionParser] Do not allow underscores in option names - * [Path] Fix path expansion of `"/.."` - * [Path] Do not match files starting with `.` in `Path.wildcard/2` by default - * [Process] `Process.info(pid, :registered_name)` returns `{:registered_name, nil}` if there is no registered name - * [String] `String.slice/2` and `String.slice/3` always returns a list (and never nil) - * [URI] `encode/1` does not escape reserved/unreserved characters by default nor encodes whitespace as `+` (check `URI.encode_www_form/1` and `URI.decode_www_form/1` for previous behaviour) + * [File] Support `:ram`/`:raw` files in `File.copy/2` + * [Kernel] Support guards on anonymous functions of zero arity + * [Kernel] Fix compilation of maps used as maps keys inside matches + * [Record] Properly escape quoted expressions passed to `defrecord` + * [String] Consider Unicode non-characters valid according to the specification in `String.valid?/1` -* Deprecations - * [Mix] `:escript_*` options were moved into a single `:escript` group +#### ExUnit -* Backwards incompatible changes - * [GenEvent] `GenEvent.stream/2` defaults to `:sync` mode - * [Kernel] Remove `get_in/1` + * [ExUnit] Properly account failed tests when `setup_all` fails -## v0.14.0 (2014-06-08) +#### IEx -* Enhancements - * [ExUnit] Add `on_exit/1` callbacks that are guaranteed to run once the test process exits and always in another process - * [Kernel] Store documentation in the abstract code to avoid loading them when the module is loaded - * [Kernel] Add `get_in/2`, `put_in/3`, `update_in/3` and `get_and_update_in/3` to handle nested data structure operations - * [Kernel] Add `get_in/1`, `put_in/2`, `update_in/2` and `get_and_update_in/2` to handle nested data structure operations via paths - * [Mix] Add `Mix.Config` to ease definition of configuration files - * [Mix] Add `mix loadconfig` task that can be called multiple times to load external configs - * [Mix] Support `--config` option on `mix run` - * [Mix] Support `HTTP_PROXY` and `HTTPS_PROXY` on Mix url commands - * [Mix] Support `--names` options in `mix help` which emit only names (useful for autocompletion) - * [Protocol] Add `Protocol.consolidate/2`, `Protocol.consolidated?/1` and a `mix compile.protocols` task for protocol consolidation - * [Protocol] Add `Protocol.derive/3` for runtime deriving of a struct - * [String] Add `String.chunk/2` - * [Struct] Add support for `@derive` before `defstruct/2` definitions + * [IEx] Skip autocompletion of module names that are invalid without being quoted + * [IEx] Do not start oldshell alongside IEx -* Bug fixes - * [File] `File.rm` now consistently deletes read-only across operating systems - * [Kernel] Ensure Mix `_build` structure works on Windows when copying projects - * [Kernel] Ensure `1.0E10` (with uppercase E) is also valid syntax - * [Mix] Fix `mix do` task for Windows' powershell users - * [Path] Fix `Path.absname("/")` and `Path.expand("/")` to return the absolute path `"/"`. +#### Mix -* Soft deprecations (no warnings emitted) - * [Kernel] `size/1` is deprecated, please use `byte_size/1` or `tuple_size/1` instead - * [ExUnit] `teardown/2` and `teardown_all/2` are deprecated in favor of `on_exit/1` callbacks + * [mix compile.elixir] Store multiple sources in case of module conflicts. This solves an issue where `_build` would get corrupted when compiling Elixir projects with module conflicts -* Deprecations - * [Access] `Access.access/2` is deprecated in favor of `Access.get/2` - * [Dict] `Dict.Behaviour` is deprecated in favor of `Dict` - * [Kernel] `Application.Behaviour`, `GenEvent.Behaviour`, `GenServer.Behaviour` and `Supervisor.Behaviour` are deprecated in favor of `Application`, `GenEvent`, `GenServer` and `Supervisor` - * [Kernel] `defexception/3` is deprecated in favor of `defexception/1` - * [Kernel] `raise/3` is deprecated in favor of `reraise/2` - * [Kernel] `set_elem/3` is deprecated in favor of `put_elem/3` - * [Kernel] Passing an atom `var!/1` is deprecated, variables can be built dynamically with `Macro.var/2` - * [Mix] Exceptions that define a `:mix_error` field to be compatible with Mix are no longer supported. Instead please provide a `:mix` field and use `Mix.raise/1` and `Mix.raise/2` +### 3. Soft deprecations (no warnings emitted) -* Backwards incompatible changes - * [Access] `Kernel.access/2` no longer exists and the `Access` protocol now requires `get/2` (instead of `access/2`) and `get_and_update/3` to be implemented - * [Kernel] Retrieving docs as `module.__info__(:docs)` is no longer supported, please use `Code.get_docs/2` instead - * [Kernel] `Code.compiler_options/1` no longer accepts custom options, only the ones specified by Elixir (use mix config instead) - * [Mix] `mix new` no longer generates a supevision tree by default, please pass `--sup` instead - * [Task] Tasks are automatically linked to callers and a failure in the task will crash the caller directly +#### Elixir -## v0.13.3 (2014-05-24) + * [Kernel] `not left in right` is soft-deprecated in favor of `left not in right` -* Enhancements - * [OptionParser] Add `:strict` option that only parses known switches - * [OptionParser] Add `next/2` useful for manual parsing of options - * [Macro] Add `Macro.prewalk/2/3` and `Macro.postwalk/2/3` - * [Kernel] `GenEvent`, `GenServer`, `Supervisor`, `Agent` and `Task` modules added - * [Kernel] Make deprecations compiler warnings to avoid the same deprecation being printed multiple times +### 4. Deprecations -* Bug fixes - * [Enum] Fix `Enum.join/2` and `Enum.map_join/3` for empty binaries at the beginning of the collection - * [ExUnit] Ensure the formatter doesn't error when printing :EXITs - * [Kernel] Rename `ELIXIR_ERL_OPTS` to `ELIXIR_ERL_OPTIONS` for consistency with `ERL_COMPILER_OPTIONS` - * [OptionParser] Parse `-` as a plain argument - * [OptionParser] `--` is always removed from argument list on `parse/2` and when it is the leading entry on `parse_head/2` - * [Regex] Properly escape regex (previously regex controls were double escaped) +#### Elixir -* Soft deprecations (no warnings emitted) - * [Dict] `Dict.Behaviour` is deprecated in favor of `Dict` - * [Kernel] `Application.Behaviour`, `GenEvent.Behaviour`, `GenServer.Behaviour` and `Supervisor.Behaviour` are deprecated in favor of `Application`, `GenEvent`, `GenServer` and `Supervisor` - * [Kernel] `defexception/3` is deprecated in favor of `defexception/1` - * [Kernel] `raise/3` is deprecated in favor of `reraise/2` - * [Kernel] `set_elem/3` is deprecated in favor of `put_elem/3` + * `Atom.to_char_list/1`, `Float.to_char_list/1`, `Integer.to_char_list/1`, `Integer.to_char_list/2`, `Kernel.to_char_list/1`, `List.Chars.to_char_list/1`, `String.to_char_list/1` have been deprecated in favor of their `to_charlist` version. This aligns with the naming conventions in both Erlang and Elixir + * [Calendar] Deprecate `NaiveDateTime` and `DateTime` in `Date.to_iso8601/1`, `Date.to_erl/1`, `Time.to_iso8601/1` and `Time.to_erl/1` to avoid loss of precision + * [Enum] Deprecate `Enum.filter_map/3` in favor of `Enum.filter/2` + `Enum.map/2` or for-comprehensions + * [GenEvent] Deprecate `GenEvent` and provide alternatives in its docs + * [Kernel] Using `()` to mean `nil` is deprecated + * [Kernel] `:as_char_lists value` in `Inspect.Opts.t/0` type, in favor of `:as_charlists` + * [Kernel] `:char_lists` key in `Inspect.Opts.t/0` type, in favor of `:charlists` + * [Module] Using Erlang parse transforms via `@compile {:parse_transform, _}` is deprecated + * [Stream] Deprecate `Stream.filter_map/3` in favor of `Stream.filter/2` + `Stream.map/2` + * [String] `String.ljust/3` and `String.rjust/3` are deprecated in favor of `String.pad_leading/3` and `String.pad_trailing/3` with a binary padding + * [String] `String.strip/1` and `String.strip/2` are deprecated in favor of `String.trim/1` and `String.trim/2` + * [String] `String.lstrip/1` and `String.rstrip/1` are deprecated in favor of `String.trim_leading/1` and `String.trim_trailing/1` + * [String] `String.lstrip/2` and `String.rstrip/2` are deprecated in favor of `String.trim_leading/2` and `String.trim_trailing/2` with a binary as second argument + * [Typespec] `char_list/0` type is deprecated in favor of `charlist/0` -* Soft deprecations for conversions (no warnings emitted) - * [Kernel] `atom_to_binary/1` and `atom_to_list/1` are deprecated in favor of `Atom.to_string/1` and `Atom.to_char_list/1` - * [Kernel] `bitstring_to_list/1` and `list_to_bitstring/1` are deprecated in favor of the `:erlang` ones - * [Kernel] `binary_to_atom/1`, `binary_to_existing_atom/1`, `binary_to_float/1`, `binary_to_integer/1` and `binary_to_integer/2` are deprecated in favor of conversion functions in `String` - * [Kernel] `float_to_binary/*` and `float_to_list/*` are deprecated in favor of `Float.to_string/*` and `Float.to_char_list/*` - * [Kernel] `integer_to_binary/*` and `integer_to_list/*` are deprecated in favor of `Integer.to_string/*` and `Integer.to_char_list/*` - * [Kernel] `iodata_to_binary/1` and `iodata_length/1` are deprecated `IO.iodata_to_binary/1` and `IO.iodata_length/1` - * [Kernel] `list_to_atom/1`, `list_to_existing_atom/1`, `list_to_float/1`, `list_to_integer/1`, `list_to_integer/2` and `list_to_tuple/1` are deprecated in favor of conversion functions in `List` - * [Kernel] `tuple_to_list/1` is deprecated in favor of `Tuple.to_list/1` - * [List] `List.from_char_data/1` and `List.from_char_data!/1` deprecated in favor of `String.to_char_list/1` - * [String] `String.from_char_data/1` and `String.from_char_data!/1` deprecated in favor of `List.to_string/1` +## v1.4 -* Deprecations - * [Kernel] `is_exception/1`, `is_record/1` and `is_record/2` are deprecated in favor of `Exception.exception?1`, `Record.record?/1` and `Record.record?/2` - * [Kernel] `defrecord/3` is deprecated in favor of structs - * [Kernel] `:hygiene` in `quote` is deprecated - * [Mix] `Mix.project/0` is deprecated in favor of `Mix.Project.config/0` - * [Process] `Process.spawn/1`, `Process.spawn/3`, `Process.spawn_link/1`, `Process.spawn_link/3`, `Process.spawn_monitor/1`, `Process.spawn_monitor/3`, `Process.send/2` and `Process.self/0` are deprecated in favor of the ones in `Kernel` - -* Backwards incompatible changes - * [Exception] Exceptions now generate structs instead of records - * [OptionParser] Errors on parsing returns the switch and value as binaries (unparsed) - * [String] `String.to_char_list/1` (previously deprecated) no longer returns a tuple but the char list only and raises in case of failure - -## v0.13.2 (2014-05-11) - -* Enhancements - * [Application] Add an Application module with common functions to work with OTP applications - * [Exception] Add `Exception.message/1`, `Exception.format_banner/1`, `Exception.format_exit/1` and `Exception.format/1` - * [File] Add `File.ln_s/1` - * [Mix] `mix deps.clean` now works accross environments - * [Mix] Support line numbers in `mix test`, e.g. test/some/file_test.exs:12 - * [Mix] Use `@file` attributes to detect dependencies in between `.ex` and external files. This means changing an `.eex` file will no longer recompile the whole project only the files that depend directly on it - * [Mix] Support application configurations in `config/config.exs` - * [Mix] Support user-wide configuration with `~/.mix/config.exs` - * [Mix] `mix help` now uses ANSI formatting to print guides - * [Regex] Support functions in `Regex.replace/4` - * [String] Support `:parts` in `String.split/3` - -* Bug fixes - * [Code] Ensure we don't lose the caller stacktrace on code evaluation - * [IEx] Exit signals now exits the IEx evaluator and a new one is spawned on its place - * [IEx] Ensure we don't prune too much stacktrace when reporting failures - * [IEx] Fix an issue where `iex.bat` on Windows was not passing the proper parameters forward - * [Kernel] Ensure modules defined on root respect defined aliases - * [Kernel] Do not wrap single lists in `:__block__` - * [Kernel] Ensure emitted beam code works nicely with dialyzer - * [Kernel] Do not allow a module named `Elixir` to be defined - * [Kernel] Create remote funs even if mod is a variable in capture `&mod.fun/arity` - * [Kernel] Improve compiler message when duplicated modules are detected - * [Mix] Generate `.gitignore` for `--umbrella` projects - * [Mix] Verify if a git dependency in deps has a proper git checkout and clean it automatically when it doesn't - * [Mix] Ensure `mix test` works with `IEx.pry/0` - * [System] Convert remaining functions in System to rely on char data - -* Soft deprecations (no warnings emitted) - * [Exception] `exception.message` is deprecated in favor `Exception.message/1` for retrieving exception messages - * [Kernel] `is_exception/1`, `is_record/1` and `is_record/2` are deprecated in favor of `Exception.exception?1`, `Record.record?/1` and `Record.record?/2` - * [Mix] `Mix.project/0` is deprecated in favor of `Mix.Project.config/0` - * [Process] `Process.spawn/1`, `Process.spawn/3`, `Process.spawn_link/1`, `Process.spawn_link/3`, `Process.spawn_monitor/1`, `Process.spawn_monitor/3`, `Process.send/2` and `Process.self/0` are deprecated in favor of the ones in `Kernel` - -* Deprecations - * [IEx] IEx.Options is deprecated in favor of `IEx.configure/1` and `IEx.configuration/0` - * [Kernel] `lc` and `bc` comprehensions are deprecated in favor of `for` - * [Macro] `Macro.safe_terms/1` is deprecated - * [Process] `Process.delete/0` is deprecated - * [Regex] Deprecate `:global` option in `Regex.split/3` in favor of `parts: :infinity` - * [String] Deprecate `:global` option in `String.split/3` in favor of `parts: :infinity` - -* Backwards incompatible changes - * [ExUnit] `ExUnit.Test` and `ExUnit.TestCase` has been converted to structs - * [ExUnit] The test and callback context has been converted to maps - * [Kernel] `File.Stat`, `HashDict`, `HashSet`, `Inspect.Opts`, `Macro.Env`, `Range`, `Regex` and `Version.Requirement` have been converted to structs. This means `is_record/2` checks will no longer work, instead, you can pattern match on them using `%Range{}` and similar - * [URI] The `URI.Info` record has now become the `URI` struct - * [Version] The `Version.Schema` record has now become the `Version` struct - -## v0.13.1 (2014-04-27) - -* Enhancements - * [Mix] Support `MIX_EXS` as configuration for running the current mix.exs file - * [Mix] Support Hex out of the box. This means users do not need to install Hex directly, instead, Mix will prompt whenever there is a need to have Hex installed - -* Bug fixes - * [ExUnit] Ensure doctest failures are properly reported - * [Kernel] Fix a bug where comprehensions arguments were not properly take into account in the variable scope - * [Mix] Fix issue on rebar install when the endpoint was redirecting to a relative uri - -* Soft deprecations (no warnings emitted) - * [Kernel] `iolist_size` and `iolist_to_binary` are deprecated in favor of `iodata_length` and `iodata_to_binary` - * [String] `String.to_char_list/1` is deprecated in favor of `List.from_char_data/1` - * [String] `String.from_char_list/1` is deprecated in favor of `String.from_char_data/1` - -* Deprecations - * [Mix] `:env` key in project configuration is deprecated - * [Regex] `Regex.groups/1` is deprecated in favor of `Regex.names/1` - -* Backwards incompatible changes - * [Macro] `Macro.unpipe/1` now returns tuples and `Macro.pipe/2` was removed in favor of `Macro.pipe/3` which explicitly expects the second element of the tuple returned by the new `Macro.unpipe/1` - * [Path] The functions in Path now only emit strings as result, regardless if the input was a char list or a string - * [Path] Atoms are no longer supported in Path functions - * [Regex] Regexes are no longer unicode by default. Instead, they must be explicitly marked with the `u` option - -## v0.13.0 (2014-04-20) - -* Enhancements - * [Base] Add `Base` module which does conversions to bases 16, 32, hex32, 64 and url64 - * [Code] Add `Code.eval_file/2` - * [Collectable] Add the `Collectable` protocol that empowers `Enum.into/2` and `Stream.into/2` and the `:into` option in comprehensions - * [Collectable] Implement `Collectable` for lists, dicts, bitstrings, functions and provide both `File.Stream` and `IO.Stream` - * [EEx] Add `handle_body/1` callback to `EEx.Engine` - * [Enum] Add `Enum.group_by/2`, `Enum.into/2`, `Enum.into/3`, `Enum.traverse/2` and `Enum.sum/2` - * [ExUnit] Randomize cases and tests suite runs, allow seed configuration and the `--seed` flag via `mix test` - * [ExUnit] Support `--only` for filtering when running tests with `mix test` - * [ExUnit] Raise an error if another `capture_io` process already captured the device - * [ExUnit] Improve formatter to show source code and rely on lhs and rhs (instead of expected and actual) - * [IEx] Allow prompt configuration with the `:prompt` option - * [IEx] Use werl on Windows - * [Kernel] Support `ERL_PATH` in `bin/elixir` - * [Kernel] Support interpolation in keyword syntax - * [Map] Add a Map module and support 17.0 maps and structs - * [Mix] Add dependency option `:only` to specify the dependency environment. `mix deps.get` and `mix deps.update` works accross all environment unless `--only` is specified - * [Mix] Add `Mix.Shell.prompt/1` - * [Mix] Ensure the project is compiled in case Mix' CLI cannot find a task - * [Node] Add `Node.ping/1` - * [Process] Include `Process.send/3` and support the `--gen-debug` option - * [Regex] Regexes no longer need the "g" option when there is a need to use named captures - * [Stream] Add `Stream.into/2` and `Stream.into/3` - * [StringIO] Add a `StringIO` module that allows a String to be used as IO device - * [System] Add `System.delete_env/1` to remove a variable from the environment - -* Bug fixes - * [CLI] Ensure `--app` is handled as an atom before processing - * [ExUnit] Ensure `ExUnit.Assertions` does not emit compiler warnings for `assert_receive` - * [Kernel] Ensure the same pid is not queued twice in the parallel compiler - * [Macro] `Macro.to_string/2` considers proper precedence when translating `!(foo > bar)` into a string - * [Mix] Automatically recompile on outdated Elixir version and show proper error messages - * [Mix] Ensure generated `.app` file includes core dependencies - * [Mix] Allow a dependency with no SCM to be overridden - * [Mix] Allow queries in `mix local.install` URL - * [OptionParser] Do not recognize undefined aliases as switches - -* Soft deprecations (no warnings emitted) - * [Kernel] `lc` and `bc` comprehensions are deprecated in favor of `for` - * [ListDict] `ListDict` is deprecated in favor of `Map` - * [Record] `defrecord/2`, `defrecordp/3`, `is_record/1` and `is_record/2` macros in Kernel are deprecated. Instead, use the new macros and API defined in the `Record` module - -* Deprecations - * [Dict] `Dict.empty/1`, `Dict.new/1` and `Dict.new/2` are deprecated - * [Exception] `Exception.normalize/1` is deprecated in favor of `Exception.normalize/2` - -* Backwards incompatible changes - * [ExUnit] Formatters are now required to be a GenEvent and `ExUnit.run/2` returns a map with results - -## v0.12.5 (2014-03-09) - -* Bug fixes - * [Kernel] Ensure `try` does not generate an after clause. Generating an after clause forbade clauses in the `else` part from being tail recursive. This should improve performance and memory consumption of `Stream` functions - * [Mix] Automatically recompile on outdated Elixir version and show proper error messages - -* Deprecations - * [File] `File.stream_to!/3` is deprecated - * [GenFSM] `GenFSM` is deprecated - * [Kernel] `%` for sigils is deprecated in favor of `~` - * [Kernel] `is_range/1` and `is_regex/1` are deprecated in favor of `Range.range?/1` and `Regex.regex?/1` - * [Stream] `Stream.after/1` is deprecated - * [URI] `URI.decode_query/1` is deprecated in favor of `URI.decode_query/2` with explicit dict argument - * [URI] Passing lists as key or values in `URI.encode_query/1` is deprecated - -* Backwards incompatible changes - * [Mix] Remove `MIX_GIT_FORCE_HTTPS` as Git itself already provides mechanisms for doing so - -## v0.12.4 (2014-02-12) - -* Enhancements - * [Mix] `mix deps.get` and `mix deps.update` no longer compile dependencies afterwards. Instead, they mark the dependencies which are going to be automatically compiled next time `deps.check` is invoked (which is done automatically by most mix tasks). This means users should have a better workflow when migrating in between environments - -* Deprecations - * [Kernel] `//` for default arguments is deprecated in favor of `\\` - * [Kernel] Using `%` for sigils is deprecated in favor of `~`. This is a soft deprecation, no warnings will be emitted for it in this release - * [Kernel] Using `^` inside function clause heads is deprecated, please use a guard instead - -* Backwards incompatible changes - * [ExUnit] `CaptureIO` returns an empty string instead of nil when there is no capture - * [Version] The `Version` module now only works with SemVer. The functions `Version.parse/1` and `Version.parse_requirement/1` now return `{:ok,res} | :error` for the cases you want to handle non SemVer cases manually. All other functions will trigger errors on non semantics versions - -## v0.12.3 (2014-02-02) - -* Enhancements - * [Kernel] Warnings now are explicitly tagged with "warning:" in messages - * [Kernel] Explicit functions inlined by the compiler, including operators. This means that `Kernel.+/2` will now expand to `:erlang.+/2` and so on - * [Mix] Do not fail if a Mix dependency relies on an outdated Elixir version - * [Process] Add `Process.send/2` and `Process.send_after/3` - * [Version] Add `Version.compare/2` - -* Bug fixes - * [Atom] Inspect `:...` and `:foo@bar` without quoting - * [Keyword] The list `[1, 2, three: :four]` now correctly expands to `[1, 2, {:three, :four}]` - * [Kernel] Ensure undefined `@attributes` shows proper stacktrace in warnings - * [Kernel] Guarantee nullary funs/macros are allowed in guards - * [Process] Ensure monitoring functions are inlined by the compiler - -* Deprecations - * [IEx] The helper `m/0` has been deprecated. The goal is to group all runtime statistic related helpers into a single module - * [Kernel] `binary_to_term/1`, `binary_to_term/2`, `term_to_binary/1` and `term_to_binary/2` are deprecated in favor of their counterparts in the `:erlang` module - * [Kernel] `//` for default arguments is deprecated in favor of `\\`. This is a soft deprecation, no warnings will be emitted for it in this release - * [Kernel] Deprecated `@behavior` in favor of `@behaviour` - * [Record] `to_keywords`, `getter` and `list getter` functionalities in `defrecordp` are deprecated - * [Record] `Record.import/2` is deprecated - -* Backwards incompatible changes - * [Dict] Implementations of `equal?/2` and `merge/2` in `HashDict` and `ListDict` are no longer polymorphic. To get polymorphism, use the functions in `Dict` instead - * [File] `File.cp/3` and `File.cp_r/3` no longer carry Unix semantics where the function behaves differently if the destination is an existing previous directory or not. It now always copies source to destination, doing it recursively in the latter - * [IEx] IEx now loads the `.iex.exs` file instead of `.iex` - * [Kernel] Remove `**` from the list of allowed operators - * [Kernel] Limit sigils delimiters to one of the following: `<>`, `{}`, `[]`, `()`, `||`, `//`, `"` and `'` - * [Range] `Range` is no longer a record, instead use `first .. last` if you need pattern matching - * [Set] Implementations of `difference/2`, `disjoint?/2`, `equal?/2`, `intersection/2`, `subset?/2` and `union/2` in `HashSet` are no longer polymorphic. To get polymorphism, use the functions in `Set` instead - -## v0.12.2 (2014-01-15) - -* Enhancements - * [EEx] Allow `EEx.AssignsEngine` to accept any Dict - * [Enum] Add `Enum.flat_map_reduce/3` - * [ExUnit] Support `@moduletag` in ExUnit cases - * [Kernel] Improve stacktraces to be relative to the compilation path and include the related application - * [Stream] Add `Stream.transform/3` - -* Bug fixes - * [ExUnit] `:include` in ExUnit only has effect if a test was previously excluded with `:exclude` - * [ExUnit] Only run `setup_all` and `teardown_all` if there are tests in the case - * [Kernel] Ensure bitstring modifier arguments are expanded - * [Kernel] Ensure compiler does not block on missing modules - * [Kernel] Ensure `<>/2` works only with binaries - * [Kernel] Fix usage of string literals inside `<<>>` when `utf8`/`utf16`/`utf32` is used as specifier - * [Mix] Ensure mix properly copies _build dependencies on Windows - -* Deprecations - * [Enum] Deprecate `Enum.first/1` in favor of `Enum.at/2` and `List.first/1` - * [Kernel] Deprecate continuable heredocs. In previous versions, Elixir would continue parsing on the same line the heredoc started, this behaviour has been deprecated - * [Kernel] `is_alive/0` is deprecated in favor of `Node.alive?` - * [Kernel] `Kernel.inspect/2` with `Inspect.Opts[]` is deprecated in favor of `Inspect.Algebra.to_doc/2` - * [Kernel] `Kernel.inspect/2` with `:raw` option is deprecated, use `:records` option instead - * [Kernel] Deprecate `<-/2` in favor of `send/2` - -* Backwards incompatible changes - * [String] Change `String.next_grapheme/1` and `String.next_codepoint/1` to return `nil` on string end - -## v0.12.1 (2014-01-04) - -* Enhancements - * [ExUnit] Support `:include` and `:exclude` configuration options to filter which tests should run based on their tags. Those options are also supported via `mix test` as `--include` and `--exclude` - * [ExUnit] Allow doctests to match against `#MyModule<>` - -* Bug fixes - * [CLI] Abort when a pattern given to elixirc does not match any file - * [Float] Fix `Float.parse/1` to handle numbers of the form "-0.x" - * [IEx] Improve error message for `IEx.Helpers.r` when module does not exist - * [Mix] Ensure `deps.get` updates origin if lock origin and dep origin do not match - * [Mix] Use relative symlinks in _build - * [Typespec] Fix conversion of unary ops from typespec format to ast - * [Typespec] Fix handling of `tuple()` and `{}` - -* Deprecations - * [Kernel] Do not leak clause heads. Previously, a variable defined in a case/receive head clauses would leak to the outer scope. This behaviour is deprecated and will be removed in the next release. - * [Kernel] Deprecate `__FILE__` in favor of `__DIR__` or `__ENV__.file` - -* Backwards incompatible changes - * [GenFSM] GenServer now stops on unknown event/sync_event requests - * [GenServer] GenServer now stops on unknown call/cast requests - * [Kernel] Change how `->` is represented in AST. Now each clause is represented by its own AST node which makes composition easier. See commit 51aef55 for more information. - -## v0.12.0 (2013-12-15) - -* Enhancements - * [Exception] Allow `exception/1` to be overridden and promote it as the main mechanism to customize exceptions - * [File] Add `File.stream_to!/3` - * [Float] Add `Float.floor/1`, `Float.ceil/1` and `Float.round/3` - * [Kernel] Add `List.delete_at/2` and `List.updated_at/3` - * [Kernel] Add `Enum.reverse/2` - * [Kernel] Implement `defmodule/2`, `@/1`, `def/2` and friends in Elixir itself. `case/2`, `try/2` and `receive/1` have been made special forms. `var!/1`, `var!/2` and `alias!/1` have also been implemented in Elixir and demoted from special forms - * [Record] Support dynamic fields in `defrecordp` - * [Stream] Add `Stream.resource/3` - * [Stream] Add `Stream.zip/2`, `Stream.filter_map/3`, `Stream.each/2`, `Stream.take_every/2`, `Stream.chunk/2`, `Stream.chunk/3`, `Stream.chunk/4`, `Stream.chunk_by/2`, `Stream.scan/2`, `Stream.scan/3`, `Stream.uniq/2`, `Stream.after/2` and `Stream.run/1` - * [Stream] Support `Stream.take/2` and `Stream.drop/2` with negative counts - -* Bug fixes - * [HashDict] Ensure a `HashDict` stored in an attribute can be accessed via the attribute - * [Enum] Fix bug in `Enum.chunk/4` where you'd get an extra element when the enumerable was a multiple of the counter and a pad was given - * [IEx] Ensure `c/2` helper works with full paths - * [Kernel] `quote location: :keep` now only affects definitions in order to keep the proper trace in definition exceptions - * [Mix] Also symlink `include` directories in _build dependencies - * [Version] Fix `Version.match?/2` with `~>` and versions with alphanumeric build info (like `-dev`) - -* Deprecations - * [Enum] `Enumerable.count/1` and `Enumerable.member?/2` should now return tagged tuples. Please see `Enumerable` docs for more info - * [Enum] Deprecate `Enum.chunks/2`, `Enum.chunks/4` and `Enum.chunks_by/2` in favor of `Enum.chunk/2`, `Enum.chunk/4` and `Enum.chunk_by/2` - * [File] `File.binstream!/3` is deprecated. Simply use `File.stream!/3` which is able to figure out if `stream` or `binstream` operations should be used - * [Macro] `Macro.extract_args/1` is deprecated in favor of `Macro.decompose_call/1` - -* Backwards incompatible changes - * [Enum] Behaviour of `Enum.drop/2` and `Enum.take/2` has been switched when given negative counts - * [Enum] Behaviour of `Enum.zip/2` has been changed to stop as soon as the first enumerable finishes - * [Enum] `Enumerable.reduce/3` protocol has changed to support suspension. Please see `Enumerable` docs for more info - * [Mix] Require `:escript_main_module` to be set before generating escripts - * [Range] `Range.Iterator` protocol has changed in order to work with the new `Enumerable.reduce/3`. Please see `Range.Iterator` docs for more info - * [Stream] The `Stream.Lazy` structure has changed to accumulate functions and accumulators as we go (its inspected representation has also changed) - * [Typespec] `when` clauses were moved to the outer part of the spec and should be in the keywords format. So `add(a, b) when is_subtype(a, integer) and is_subtype(b, integer) :: integer` should now be written as `add(a, b) :: integer when a: integer, b: integer` - -## v0.11.2 (2013-11-14) - -* Enhancements - * [Mix] Add `mix iex` that redirects users to the proper `iex -S mix` command - * [Mix] Support `build_per_environment: true` in project configuration that manages a separete build per environment, useful when you have per-environment behaviour/compilation - -* Backwards incompatible changes - * [Mix] Mix now compiles files to `_build`. Projects should update just fine, however documentation and books may want to update to the latest information - -## v0.11.1 (2013-11-07) - -* Enhancements - * [Mix] Improve dependency convergence by explicitly checking each requirement instead of expecting all requirements to be equal - * [Mix] Support optional dependencies with `optional: true`. Optional dependencies are downloaded for the current project but they are automatically skipped when such project is used as a dependency - -* Bug fixes - * [Kernel] Set compilation status per ParallelCompiler and not globally - * [Mix] Ensure Mix does not load previous dependencies versions before `deps.get`/`deps.update` - * [Mix] Ensure umbrella apps are sorted before running recursive commands - * [Mix] Ensure umbrella apps run in the same environment as the parent project - * [Mix] Ensure dependency tree is topsorted before compiling - * [Mix] Raise error when duplicated projects are pushed into the stack - * [URI] Allow lowercase escapes in URI - -* Backwards incompatible changes - * [Mix] Setting `:load_paths` in your project configuration is deprecated - -## v0.11.0 (2013-11-02) - -* Enhancements - * [Code] Eval now returns variables from other contexts - * [Dict] Document and enforce all dicts use the match operator (`===`) when checking for keys - * [Enum] Add `Enum.slice/2` with a range - * [Enum] Document and enforce `Enum.member?/2` to use the match operator (`===`) - * [IEx] Split `IEx.Evaluator` from `IEx.Server` to allow custom evaluators - * [IEx] Add support for `IEx.pry` which halts a given process for inspection - * [IO] Add specs and allow some IO APIs to receive any data that implements `String.Chars` - * [Kernel] Improve stacktraces on command line interfaces - * [Kernel] Sigils can now handle balanced tokens as in `%s(f(o)o)` - * [Kernel] Emit warnings when an alias is not used - * [Macro] Add `Macro.pipe/3` and `Macro.unpipe/1` for building pipelines - * [Mix] Allow umbrella children to share dependencies between them - * [Mix] Allow mix to be escriptize'd - * [Mix] Speed mix projects compilation by relying on more manifests information - * [Protocol] Protocols now provide `impl_for/1` and `impl_for!/1` functions which receive a structure and returns its respective implementation, otherwise returns nil or an error - * [Set] Document and enforce all sets use the match operator (`===`) when checking for keys - * [String] Update to Unicode 6.3.0 - * [String] Add `String.slice/2` with a range - -* Bug fixes - * [Exception] Ensure `defexception` fields can be set dynamically - * [Kernel] Guarantee aliases hygiene is respected when the current module name is not known upfront - * [Kernel] `Kernel.access/2` no longer flattens lists - * [Mix] Ensure cyclic dependencies are properly handled - * [String] Implement the extended grapheme cluster algorithm for `String` operations - -* Deprecations - * [Kernel] `pid_to_list/1`, `list_to_pid/1`, `binary_to_atom/2`, `binary_to_existing_atom/2` and `atom_to_binary/2` are deprecated in favor of their counterparts in the `:erlang` module - * [Kernel] `insert_elem/3` and `delete_elem/2` are deprecated in favor of `Tuple.insert_at/3` and `Tuple.delete_at/2` - * [Kernel] Use of `in` inside matches (as in `x in [1,2,3] -> x`) is deprecated in favor of the guard syntax (`x when x in [1,2,3]`) - * [Macro] `Macro.expand_all/2` is deprecated - * [Protocol] `@only` and `@except` in protocols are now deprecated - * [Protocol] Protocols no longer fallback to `Any` out of the box (this functionality needs to be explicitly enabled by setting `@fallback_to_any` to true) - * [String] `String.to_integer/1` and `String.to_float/1` are deprecated in favor of `Integer.parse/1` and `Float.parse/1` - -* Backwards incompatible changes - * [CLI] Reading `.elixirrc` has been dropped in favor of setting env vars - * [Kernel] `Kernel.access/2` now expects the second argument to be a compile time list - * [Kernel] `fn -> end` quoted expression is no longer wrapped in a `do` keyword - * [Kernel] Quoted variables from the same module must be explicitly shared. Previously, if a function returned `quote do: a = 1`, another function from the same module could access it as `quote do: a`. This has been fixed and the variables must be explicitly shared with `var!(a, __MODULE__)` - * [Mix] Umbrella apps now treat children apps as dependencies. This means all dependencies will be checked out in the umbrela `deps` directory. On upgrade, child apps need to point to the umbrella project by setting `deps_path: "../../deps_path", lockfile: "../../mix.lock"` in their project config - * [Process] `Process.group_leader/2` args have been reversed so the "subject" comes first - * [Protocol] Protocol no longer dispatches to `Number`, but to `Integer` and `Float` - -## v0.10.3 (2013-10-02) - -* Enhancements - * [Enum] Add `Enum.take_every/2` - * [IEx] IEx now respects signals sent from the Ctrl+G menu - * [Kernel] Allow documentation for types with `@typedoc` - * [Mix] Allow apps to be selected in umbrella projects - * [Record] Generated record functions `new` and `update` also take options with strings as keys - * [Stream] Add `Stream.unfold/1` - -* Bug fixes - * [Dict] Fix a bug when a HashDict was marked as equal when one was actually a subset of the other - * [EEx] Solve issue where `do` blocks inside templates were not properly aligned - * [ExUnit] Improve checks and have better error reports on poorly aligned doctests - * [Kernel] Fix handling of multiple heredocs on the same line - * [Kernel] Provide better error messages for match, guard and quoting errors - * [Kernel] Make `Kernel.raise/2` a macro to avoid messing up stacktraces - * [Kernel] Ensure `&()` works on quoted blocks with only one expression - * [Mix] Address an issue where a dependency was not compiled in the proper order when specified in different projects - * [Mix] Ensure `compile: false` is a valid mechanism for disabling the compilation of dependencies - * [Regex] Fix bug on `Regex.scan/3` when capturing groups and the regex has no groups - * [String] Fix a bug with `String.split/2` when given an empty pattern - * [Typespec] Guarantee typespecs error reports point to the proper line - -* Deprecations - * [Kernel] The previous partial application syntax (without the `&` operator) has now been deprecated - * [Regex] `Regex.captures/3` is deprecated in favor of `Regex.named_captures/3` - * [String] `String.valid_codepoint?/1` is deprecated in favor of pattern matching with `<<_ :: utf8 >>` - -* Backwards incompatible changes - * [IEx] The `r/0` helper has been removed as it caused surprising behaviour when many modules with dependencies were accumulated - * [Mix] `Mix.Version` was renamed to `Version` - * [Mix] `File.IteratorError` was renamed to `IO.StreamError` - * [Mix] `mix new` now defaults to the `--sup` option, use `--bare` to get the previous behaviour - -## v0.10.2 (2013-09-03) - -* Enhancements - * [CLI] Add `--verbose` to elixirc, which now is non-verbose by default - * [Dict] Add `Dict.Behaviour` as a convenience to create your own dictionaries - * [Enum] Add `Enum.split/2`, `Enum.reduce/2`, `Enum.flat_map/2`, `Enum.chunk/2`, `Enum.chunk/4`, `Enum.chunk_by/2`, `Enum.concat/1` and `Enum.concat/2` - * [Enum] Support negative indices in `Enum.at/fetch/fetch!` - * [ExUnit] Show failures on CLIFormatter as soon as they pop up - * [IEx] Allow for strings in `h` helper - * [IEx] Helpers `r` and `c` can handle erlang sources - * [Integer] Add `odd?/1` and `even?/1` - * [IO] Added support to specifying a number of bytes to stream to `IO.stream`, `IO.binstream`, `File.stream!` and `File.binstream!` - * [Kernel] Include file and line on error report for overriding an existing function/macro - * [Kernel] Convert external functions into quoted expressions. This allows record fields to contain functions as long as they point to an `&Mod.fun/arity` - * [Kernel] Allow `foo?` and `bar!` as valid variable names - * [List] Add `List.replace_at/3` - * [Macro] Improve printing of the access protocol on `Macro.to_string/1` - * [Macro] Add `Macro.to_string/2` to support annotations on the converted string - * [Mix] Automatically recompile a project if the Elixir version changes - * [Path] Add `Path.relative_to_cwd/2` - * [Regex] Allow erlang `re` options when compiling Elixir regexes - * [Stream] Add `Stream.concat/1`, `Stream.concat/2` and `Stream.flat_map/2` - * [String] Add regex pattern support to `String.replace/3` - * [String] Add `String.ljust/2`, `String.rjust/2`, `String.ljust/3` and `String.rjust/3` - * [URI] `URI.parse/1` supports IPv6 addresses - -* Bug fixes - * [Behaviour] Do not compile behaviour docs if docs are disabled on compilation - * [ExUnit] Doctests no longer eat too much space and provides detailed reports for poorly indented lines - * [File] Fix a bug where `File.touch(file, datetime)` was not setting the proper datetime when the file did not exist - * [Kernel] Limit `inspect` results to 50 items by default to avoid printing too much data - * [Kernel] Return a readable error on oversized atoms - * [Kernel] Allow functions ending with `?` or `!` to be captured - * [Kernel] Fix default shutdown of child supervisors to `:infinity` - * [Kernel] Fix regression when calling a function/macro ending with bang, followed by `do/end` blocks - * [List] Fix bug on `List.insert_at/3` that added the item at the wrong position for negative indexes - * [Macro] `Macro.escape/2` can now escape improper lists - * [Mix] Fix `Mix.Version` matching on pre-release info - * [Mix] Ensure `watch_exts` trigger full recompilation on change with `mix compile` - * [Mix] Fix regression on `mix clean --all` - * [String] `String.strip/2` now supports removing unicode characters - * [String] `String.slice/3` still returns the proper result when there is no length to be extracted - * [System] `System.get_env/0` now returns a list of tuples as previously advertised - -* Deprecations - * [Dict] `Dict.update/3` is deprecated in favor of `Dict.update!/3` - * [Enum] `Enum.min/2` and `Enum.max/2` are deprecated in favor of `Enum.min_by/2` and `Enum.max_by/2` - * [Enum] `Enum.join/2` and `Enum.map_join/3` with a char list are deprecated - * [IO] `IO.stream(device)` and `IO.binstream(device)` are deprecated in favor of `IO.stream(device, :line)` and `IO.binstream(device, :line)` - * [Kernel] `list_to_binary/1`, `binary_to_list/1` and `binary_to_list/3` are deprecated in favor of `String.from_char_list!/1` and `String.to_char_list!/1` for characters and `:binary.list_to_bin/1`, `:binary.bin_to_list/1` and `:binary.bin_to_list/3` for bytes - * [Kernel] `to_binary/1` is deprecated in favor of `to_string/1` - * [Kernel] Deprecate `def/4` and friends in favor of `def/2` with unquote and friends - * [Kernel] Deprecate `%b` and `%B` in favor of `%s` and `%S` - * [List] `List.concat/2` is deprecated in favor of `Enum.concat/2` - * [Macro] `Macro.unescape_binary/1` and `Macro.unescape_binary/2` are deprecated in favor of `Macro.unescape_string/1` and `Macro.unescape_string/2` - * [Mix] `:umbrella` option for umbrella paths has been deprecated in favor of `:in_umbrella` - -* Backwards incompatible changes - * [IO] IO functions now only accept iolists as arguments - * [Kernel] `Binary.Chars` was renamed to `String.Chars` - * [Kernel] The previous ambiguous import syntax `import :functions, Foo` was removed in favor of `import Foo, only: :functions` - * [OptionParser] `parse` and `parse_head` now returns a tuple with three elements instead of two - -## v0.10.1 (2013-08-03) - -* Enhancements - * [Behaviour] Add support for `defmacrocallback/1` - * [Enum] Add `Enum.shuffle/1` - * [ExUnit] The `:trace` option now also reports run time for each test - * [ExUnit] Add support for `:color` to enable/disable ANSI coloring - * [IEx] Add the `clear` helper to clear the screen. - * [Kernel] Add the capture operator `&` - * [Kernel] Add support for `GenFSM.Behaviour` - * [Kernel] Functions now points to the module and function they were defined when inspected - * [Kernel] A documentation attached to a function that is never defined now prints warnings - * [List] Add `List.keysort/2` - * [Mix] `:test_helper` project configuration did not affect `mix test` and was therefore removed. A `test/test_helper.exs` file is still necessary albeit it doesn't need to be automatically required in each test file - * [Mix] Add manifests for yecc, leex and Erlang compilers, making it easier to detect dependencies in between compilers and providing a more useful clean behaviour - * [Mix] `mix help` now outputs information about the default mix task - * [Mix] Add `--no-deps-check` option to `mix run`, `mix compile` and friends to not check dependency status - * [Mix] Add support for `MIX_GIT_FORCE_HTTPS` system environment that forces HTTPS for known providers, useful when the regular git port is blocked. This configuration does not affect the `mix.lock` results - * [Mix] Allow coverage tool to be pluggable via the `:test_coverage` configuration - * [Mix] Add `mix cmd` as a convenience to run a command recursively in child apps in an umbrella application - * [Mix] Support `umbrella: true` in dependencies as a convenience for setting up umbrella path deps - * [Mix] `mix run` now behaves closer to the `elixir` command and properly mangles the ARGV - * [String] Add `Regex.scan/3` now supports capturing groups - * [String] Add `String.reverse/1` - -* Bug fixes - * [Behaviour] Ensure callbacks are stored in the definition order - * [CLI] Speed up boot time on Elixir .bat files - * [IEx] Reduce cases where IEx parser can get stuck - * [Kernel] Improve error messages when the use of an operator has no effect - * [Kernel] Fix a bug where warnings were not being generated when imported macros conflicted with local functions or macros - * [Kernel] Document that `on_definition` can only be a function as it is evaluated inside the function context - * [Kernel] Ensure `%w` sigils with no interpolation are fully expanded at compile time - * [Mix] `mix deps.update`, `mix deps.clean` and `mix deps.unlock` no longer change all dependencies unless `--all` is given - * [Mix] Always run ` mix loadpaths` on `mix app.start`, even if `--no-compile` is given - * [OptionParser] Do not add boolean flags to the end result if they were not given - * [OptionParser] Do not parse non-boolean flags as booleans when true or false are given - * [OptionParser] Ensure `:keep` and `:integer`|`:float` can be given together as options - * [OptionParser] Ensure `--no-flag` sets `:flag` to false when `:flag` is a registered boolean switch - -* Deprecations - * [Kernel] `function(Mod.fun/arity)` and `function(fun/arity)` are deprecated in favor of `&Mod.fun/arity` and `&fun/arity` - * [Kernel] `function/3` is deprecated in favor of `Module.function/3` - * [Kernel] `Kernel.ParallelCompiler` now receives a set of callbacks instead of a single one - * [Mix] `:test_coverage` option now expect keywords arguments and the `--cover` flag is now treated as a boolean - -* Backwards incompatible changes - * [Regex] `Regex.scan/3` now always returns a list of lists, normalizing the result, instead of list with mixed lists and binaries - * [System] `System.halt/2` was removed since the current Erlang implementation of such function is bugged - -## v0.10.0 (2013-07-15) - -* Enhancements - * [ExUnit] Support `trace: true` option which gives detailed reporting on test runs - * [HashDict] Optimize `HashDict` to store pairs in a cons cell reducing storage per key by half - * [Kernel] Add pretty printing support for inspect - * [Kernel] Add document algebra library used as the foundation for pretty printing - * [Kernel] Add `defrecordp/3` that enables specifying the first element of the tuple - * [Kernel] Add the `Set` API and a hash based implementation via `HashSet` - * [Kernel] Add `Stream` as composable, lazy-enumerables - * [Mix] `mix archive` now includes the version of the generated archive - * [Mix] Mix now requires explicit dependency overriding to be given with `override: true` - * [Mix] Projects can now define an `:elixir` key to outline supported Elixir versions - * [Typespec] Improve error messages to contain file, line and the typespec itself - -* Bug fixes - * [CLI] Elixir can now run on Unix directories with `:` in its path - * [Kernel] `match?/2` does not leak variables to outer scope - * [Kernel] Keep `head|tail` format when splicing at the tail - * [Kernel] Ensure variables defined in the module body are not passed to callbacks - * [Mix] On dependencies conflict, show from where each source is coming from - * [Mix] Empty projects no longer leave empty ebin files on `mix compile` - * [Module] Calling `Module.register_attribute/3` no longer automatically changes it to persisted or accumulated - -* Deprecations - * [Enum] Receiving the index of iteration in `Enum.map/2` and `Enum.each/2` is deprecated in favor of `Stream.with_index/1` - * [File] `File.iterator/1` and `File.biniterator/1` are deprecated in favor of `IO.stream/1` and `IO.binstream/1` - * [File] `File.iterator!/2` and `File.biniterator!/2` are deprecated in favor of `File.stream!/2` and `File.binstream!/2` - * [Kernel] Deprecate recently added `quote binding: ...` in favor of the clearer `quote bind_quoted: ...` - * [Kernel] Deprecate `Kernel.float/1` in favor of a explicit conversion - * [Mix] Deprecate `mix run EXPR` in favor of `mix run -e EXPR` - * [Record] `Record.__index__/2` deprecated in favor of `Record.__record__(:index, key)` - -* Backwards incompatible changes - * [Kernel] The `Binary.Inspect` protocol has been renamed to `Inspect` - * [Kernel] Tighten up the grammar rules regarding parentheses omission, previously the examples below would compile but now they raise an error message: - - do_something 1, is_list [], 3 - [1, is_atom :foo, 3] - - * [Module] Calling `Module.register_attribute/3` no longer automatically changes it to persisted or accumulated - * [Record] First element of a record via `defrecordp` is now the `defrecordp` name and no longer the current atom - * [URI] Remove custom URI parsers in favor of `URI.default_port/2` - -## v0.9.3 (2013-06-23) - -* Enhancements - * [File] Add `File.chgrp`, `File.chmod` and `File.chown` - * [Kernel] Add `--warnings-as-errors` to Elixir's compiler options - * [Kernel] Print warnings to stderr - * [Kernel] Warn on undefined module attributes - * [Kernel] Emit warning for `x in []` in guards - * [Kernel] Add `binding/0` and `binding/1` for retrieving bindings - * [Kernel] `quote` now allows a binding as an option - * [Macro] Add `Macro.expand_once/2` and `Macro.expand_all/2` - * [Mix] Implement `Mix.Version` for basic versioning semantics - * [Mix] Support creation and installation of archives (.ez files) - * [Mix] `github: ...` shortcut now uses the faster `git` schema instead of `https` - * [Record] Allow types to be given to `defrecordp` - -* Bug fixes - * [Kernel] The elixir executable on Windows now supports the same options as the UNIX one - * [Kernel] Improve error messages on default clauses clash - * [Kernel] `__MODULE__.Foo` now returns `Foo` when outside of a Module - * [Kernel] Improve error messages when default clauses from different definitions collide - * [Kernel] `^x` variables should always refer to the value before the expression - * [Kernel] Allow `(x, y) when z` in function clauses and try expressions - * [Mix] Mix now properly evaluates rebar scripts - -* Deprecations - * [Code] `Code.string_to_ast/1` has been deprecated in favor of `Code.string_to_quoted/1` - * [Macro] `Macro.to_binary/1` has been deprecated in favor of `Macro.to_string/1` - * [Typespec] Deprecate `(fun(...) -> ...)` in favor of `(... -> ...)` - -* Backwards incompatible changes - * [Bitwise] Precedence of operators used by the Bitwise module were changed, check `elixir_parser.yrl` for more information - * [File] `rm_rf` and `cp_r` now returns a tuple with three elements on failures - * [Kernel] The quoted representation for `->` clauses changed from a tuple with two elements to a tuple with three elements to support metadata - * [Kernel] Sigils now dispatch to `sigil_$` instead of `__$__` where `$` is the sigil character - * [Macro] `Macro.expand/2` now expands until final form. Although this is backwards incompatible, it is very likely you do not need to change your code, since expansion until its final form is recommended, particularly if you are expecting an atom out of it - * [Mix] No longer support beam files on `mix local` - -## v0.9.2 (2013-06-13) - -* Enhancements - * [ExUnit] `capture_io` now captures prompt by default - * [Mix] Automatically import git dependencies from Rebar - * [Mix] Support for dependencies directly from the umbrella application - * [Regex] Add `Regex.escape` - * [String] Add `String.contains?` - * [URI] Implement `Binary.Chars` (aka `to_binary`) for `URI.Info` - -* Bug fixes - * [HashDict] Ensure HashDict uses exact match throughout its implementation - * [IEx] Do not interpret ANSI codes in IEx results - * [IEx] Ensure `--cookie` is set before accessing remote shell - * [Kernel] Do not ignore nil when dispatching protocols to avoid infinite loops - * [Mix] Fix usage of shell expressions in `Mix.Shell.cmd` - * [Mix] Start the application by default on escripts - -* Deprecations - * [Regex] `Regex.index/2` is deprecated in favor `Regex.run/3` - * [Kernel] `super` no longer supports implicit arguments - -* Backwards incompatible changes - * [Kernel] The `=~` operator now returns true or false instead of an index - -## v0.9.1 (2013-05-30) - -* Enhancements - * [IEx] Limit the number of entries kept in history and allow it to be configured - * [Kernel] Add `String.start_with?` and `String.end_with?` - * [Typespec] Allow keywords, e.g. `[foo: integer, bar: boolean | module]`, in typespecs - -* Bug fixes - * [Dict] `Enum.to_list` and `Dict.to_list` now return the same results for dicts - * [IEx] Enable shell customization via the `IEx.Options` module - * [Kernel] Fix a bug where `unquote_splicing` did not work on the left side of a stab op - * [Kernel] Unused functions with cyclic dependencies are now also warned as unused - * [Mix] Fix a bug where `mix deps.get` was not retrieving nested dependencies - * [Record] Fix a bug where nested records cannot be defined - * [Record] Fix a bug where a record named Record cannot be defined - -## v0.9.0 (2013-05-23) - -* Enhancements - * [ExUnit] `ExUnit.CaptureIO` now accepts an input to be used during capture - * [IEx] Add support for .iex files that are loaded during shell's boot process - * [IEx] Add `import_file/1` helper - -* Backwards incompatible changes - * [Enum] `Enum.Iterator` was replaced by the more composable and functional `Enumerable` protocol which supports reductions - * [File] `File.iterator/1` and `File.biniterator/1` have been removed in favor of the safe `File.iterator!/1` and `File.biniterator!/1` ones - * [Kernel] Erlang R15 is no longer supported - * [Kernel] Elixir modules are now represented as `Elixir.ModuleName` (using `.` instead of `-` as separator) - -## v0.8.3 (2013-05-22) - -* Enhancements - * [CLI] Flags `-p` and `-pr` fails if pattern match no files - * [CLI] Support `--hidden` and `--cookie` flags for distributed Erlang - * [Enum] Add `Enum.to_list/1`, `Enum.member?/2`, `Enum.uniq/2`, `Enum.max/1`, `Enum.max/2`, `Enum.min/1` and `Enum.min/2` - * [ExUnit] Add `ExUnit.CaptureIO` for IO capturing during tests - * [ExUnit] Consider load time on ExUnit time reports - * [IEx] Support `ls` with colored output - * [IEx] Add `#iex:break` to break incomplete expressions - * [Kernel] Add `Enum.at`, `Enum.fetch` and `Enum.fetch!` - * [Kernel] Add `String.to_integer` and `String.to_float` - * [Kernel] Add `Dict.take`, `Dict.drop`, `Dict.split`, `Dict.pop` and `Dict.fetch!` - * [Kernel] Many optimizations for code compilation - * [Kernel] `in` can be used with right side expression outside guards - * [Kernel] Add `Node.get_cookie/0` and `Node.set_cookie/2` - * [Kernel] Add `__DIR__` - * [Kernel] Expand macros and attributes on quote, import, alias and require - * [Kernel] Improve warnings related to default arguments - * [Keyword] Add `Keyword.delete_first/2` - * [Mix] Add `local.rebar` to download a local copy of rebar, and change `deps.compile` to use it if needed - * [Mix] Support umbrella applications - * [Mix] Load beam files available at `MIX_PATH` on CLI usage - * [String] Add `String.valid?` and `String.valid_character?` - -* Bug fixes - * [ExUnit] Handle exit messages from in ExUnit - * [ExUnit] Failures on ExUnit's setup_all now invalidates all tests - * [Kernel] Ensure we don't splice keyword args unecessarily - * [Kernel] Private functions used by private macros no longer emit an unused warning - * [Kernel] Ensure Elixir won't trip on empty receive blocks - * [Kernel] `String.slice` now returns an empty string when out of range by 1 - * [Mix] Generate manifest files after compilation to avoid depending on directory timestamps and to remove unused .beam files - * [Path] `Path.expand/2` now correctly expands `~` in the second argument - * [Regex] Fix badmatch with `Regex.captures(%r/(.)/g, "cat")` - * [URI] Downcase host and scheme and URIs - -* Deprecations - * [Code] `Code.eval` is deprecated in favor of `Code.eval_string` - * [Exception] `Exception.format_entry` is deprecated in favor of `Exception.format_stacktrace_entry` - * [ExUnit] `assert left inlist right` is deprecated in favor of `assert left in right` - * [IO] `IO.getb` is deprecated in favor of `IO.getn` - * [List] `List.member?/2` is deprecated in favor of `Enum.member?/2` - * [Kernel] `var_context` in quote was deprecated in favor of `context` - * [Kernel] `Enum.at!` and `Dict.get!` is deprecated in favor of `Enum.fetch!` and `Dict.fetch!` - -* Backwards incompatible changes - * [Dict] `List.Dict` was moved to `ListDict` - * [IO] `IO.gets`, `IO.getn` and friends now return binaries when reading from stdio - * [Kernel] Precedence of `|>` has changed to lower to support constructs like `1..5 |> Enum.to_list` - * [Mix] `mix escriptize` now receives arguments as binaries - -## v0.8.2 (2013-04-20) - -* Enhancements - * [ExUnit] Use ANSI escape codes in CLI output - * [ExUnit] Include suite run time on CLI results - * [ExUnit] Add support to doctests, allowing test cases to be generated from code samples - * [File] Add `File.ls` and `File.ls!` - * [IEx] Support `pwd` and `cd` helpers - * [Kernel] Better error reporting for invalid bitstring generators - * [Kernel] Improve meta-programming by allowing `unquote` on `def/2`, `defp/2`, `defmacro/2` and `defmacrop/2` - * [Kernel] Add support to R16B new functions: `insert_elem/3` and `delete_elem/2` - * [Kernel] Import conflicts are now lazily handled. If two modules import the same functions, it will fail only if the function is invoked - * [Mix] Support `--cover` on mix test and `test_coverage` on Mixfiles - * [Record] Each record now provides `Record.options` with the options supported by its `new` and `update` functions - -* Bug fixes - * [Binary] inspect no longer escapes standalone hash `#` - * [IEx] The `h` helper can now retrieve docs for special forms - * [Kernel] Record optimizations were not being triggered in functions inside the record module - * [Kernel] Aliases defined inside macros should be carried over - * [Kernel] Fix a bug where nested records could not use the Record[] syntax - * [Path] Fix a bug on `Path.expand` when expanding paths starting with `~` - -* Deprecations - * [Kernel] `setelem/3` is deprecated in favor of `set_elem/3` - * [Kernel] `function(:is_atom, 1)` is deprecated in favor of `function(is_atom/1)` - -* Backwards incompatible changes - * [Kernel] `unquote` now only applies to the closest quote. If your code contains a quote that contains another quote that calls unquote, it will no longer work. Use `Macro.escape` instead and pass your quoted contents up in steps, for example: - - quote do - quote do: unquote(x) - end - - should become: - - quote do - unquote(Macro.escape(x)) - end - -## v0.8.1 (2013-02-17) - -* Enhancements - * [ExUnit] Tests can now receive metadata set on setup/teardown callbacks - * [ExUnit] Add support to ExUnit.CaseTemplate to share callbacks in between test cases - * [IO] Add `IO.ANSI` to make it easy to write ANSI escape codes - * [Kernel] Better support for Unicode lists - * [Kernel] Reduce variables footprint in `case`/`receive` clauses - * [Kernel] Disable native compilation when on_load attributes is present to work around an Erlang bug - * [Macro] `Macro.expand` also considers macros from the current `__ENV__` module - * [Mix] Improve support for compilation of `.erl` files - * [Mix] Add support for compilation of `.yrl` and `.xrl` files - * [OptionParser] Switches are now overridden by default but can be kept in order if chosen - * [Typespec] Better error reporting for invalid typespecs - -* Bug fixes - * [Mix] Allow Mix projects to be generated with just one letter - -* Backwards incompatible changes - * [Kernel] `before_compile` and `after_compile` callbacks now receive the environment as first argument instead of the module - -* Deprecations - * [ExUnit] Explicitly defined test/setup/teardown functions are deprecated - * [Kernel] Tidy up and clean `quote` API - * [Kernel] Old `:local.(args)` syntax is deprecated - * [Process] `Process.self` is deprecated in favor `Kernel.self` - -## v0.8.0 (2013-01-28) - -* Enhancements - * [Binary] Support `<< "string" :: utf8 >>` as in Erlang - * [Binary] Support `\a` escape character in binaries - * [Binary] Support syntax shortcut for specifying size in bit syntax - * [CLI] Support `--app` option to start an application and its dependencies - * [Dict] Support `put_new` in `Dict` and `Keyword` - * [Dict] Add `ListDict` and a faster `HashDict` implementation - * [ExUnit] ExUnit now supports multiple runs in the same process - * [ExUnit] Failures in ExUnit now shows a tailored stacktrace - * [ExUnit] Introduce `ExUnit.ExpectationError` to provide better error messages - * [Kernel] Introduce `Application.Behaviour` to define application module callbacks - * [Kernel] Introduce `Supervisor.Behaviour` to define supervisors callbacks - * [Kernel] More optimizations were added to Record handling - * [Kernel] `?\x` and `?\` are now supported ways to retrieve a codepoint - * [Kernel] Octal numbers can now be defined as `0777` - * [Kernel] Improve macros hygiene regarding variables, aliases and imports - * [Mix] Mix now starts the current application before run, iex, test and friends - * [Mix] Mix now provides basic support for compiling `.erl` files - * [Mix] `mix escriptize` only generates escript if necessary and accept `--force` and `--no-compile` as options - * [Path] Introduce `Path` module to hold filesystem paths related functions - * [String] Add `String.capitalize` and `String.slice` - * [System] Add `System.tmp_dir`, `System.cwd` and `System.user_home` - -* Bug fixes - * [Kernel] `import` with `only` accepts functions starting with underscore - * [String] `String.first` and `String.last` return nil for empty binaries - * [String] `String.rstrip` and `String.lstrip` now verify if argument is a binary - * [Typespec] Support `...` inside typespec's lists - -* Backwards incompatible changes - * [Kernel] The AST now allows metadata to be attached to each node. This means the second item in the AST is no longer an integer (representing the line), but a keywords list. Code that relies on the line information from AST or that manually generate AST nodes need to be properly updated - -* Deprecations - * [Dict] Deprecate `Binary.Dict` and `OrdDict` in favor of `HashDict` and `ListDict` - * [File] Deprecate path related functions in favor of the module `Path` - * [Kernel] The `/>` operator has been deprecated in favor of `|>` - * [Mix] `Mix.Project.sources` is deprecated in favor of `Mix.Project.config_files` - * [Mix] `mix iex` is no longer functional, please use `iex -S mix` - * [OptionParser] `:flags` option was deprecated in favor of `:switches` to support many types - -## v0.7.2 (2012-12-04) - -* Enhancements - * [CLI] `--debug-info` is now true by default - * [ExUnit] Make ExUnit exit happen in two steps allowing developers to add custom `at_exit` hooks - * [IEx] Many improvements to helpers functions `h/1`, `s/1` and others - * [Kernel] Functions defined with `fn` can now handle many clauses - * [Kernel] Raise an error if clauses with different arities are defined in the same function - * [Kernel] `function` macro now accepts arguments in `M.f/a` and `f/a` formats - * [Macro] Improvements to `Macro.to_binary` - * [Mix] Mix now echoes the output as it comes when executing external commands such as git or rebar - * [Mix] Mix now validates `application` callback's values - * [Record] Record accessors are now optimized and can be up to 6x faster in some cases - * [String] Support `\xXX` and `\x{HEX}` escape sequences in strings, char lists and regexes - -* Bug fixes - * [Bootstrap] Compiling Elixir source no longer fails if environment variables contain utf-8 entries - * [IEx] IEx will now wait for all command line options to be processed before starting - * [Kernel] Ensure proper stacktraces when showing deprecations - -* Deprecations - * [Enum] `Enum.qsort` is deprecated in favor of `Enum.sort` - * [List] `List.sort` and `List.uniq` have been deprecated in favor of their `Enum` counterparts - * [Record] Default-based generated functions are deprecated - * [Typespec] Enhancements and deprecations to the `@spec/@callback` and the fun type syntax - -## v0.7.1 (2012-11-18) - -* Enhancements - * [IEx] Only show documented functions and also show docs for default generated functions - * [IO] Add `IO.binread`, `IO.binwrite` and `IO.binreadline` to handle raw binary file operations - * [ExUnit] Add support for user configuration at `HOME/.ex_unit.exs` - * [ExUnit] Add support for custom formatters via a well-defined behaviour - * [Kernel] Add support for `defrecordp` - * [Kernel] Improved dialyzer support - * [Kernel] Improved error messages when creating functions with aliases names - * [Mix] Improve SCM behaviour to allow more robust integration - * [Mix] Changing deps information on `mix.exs` forces users to fetch new dependencies - * [Mix] Support (parallel) requires on mix run - * [Mix] Support `-q` when running tests to compile only changed files - * [String] Support `String.downcase` and `String.upcase` according to Unicode 6.2.0 - * [String] Add support for graphemes in `String.length`, `String.at` and others - * [Typespec] Support `@opaque` as attribute - * [Typespec] Define a default type `t` for protocols and records - * [Typespec] Add support for the access protocol in typespecs - -* Bug fixes - * [Kernel] Fix an issue where variables inside clauses remained unassigned - * [Kernel] Ensure `defoverridable` functions can be referred in many clauses - * [Kernel] Allow keywords as function names when following a dot (useful when integrating with erlang libraries) - * [File] File is opened by default on binary mode instead of utf-8 - -* Deprecations - * [Behaviour] `defcallback/1` is deprecated in favor of `defcallback/2` which matches erlang `@callbacks` - * [Enum] `Enum.times` is deprecated in favor of using ranges - * [System] `halt` moved to `System` module - -## v0.7.0 (2012-10-20) - -* Enhancements - * [Behaviour] Add Behaviour with a simple callback DSL to define callbacks - * [Binary] Add a Dict binary that converts its keys to binaries on insertion - * [Binary] Optimize `Binary.Inspect` and improve inspect for floats - * [CLI] Support `--detached` option - * [Code] `Code.string_to_ast` supports `:existing_atoms_only` as an option in order to guarantee no new atoms is generated when parsing the code - * [EEx] Support `<%%` and `<%#` tags - * [ExUnit] Support `after_spawn` callbacks which are invoked after each process is spawned - * [ExUnit] Support context data in `setup_all`, `setup`, `teardown` and `teardown_all` callbacks - * [IEx] Support `after_spawn` callbacks which are invoked after each process is spawned - * [Kernel] Better error messages when invalid options are given to `import`, `alias` or `require` - * [Kernel] Allow partial application on literals, for example: `{&1, &2}` to build tuples or `[&1|&2]` to build cons cells - * [Kernel] Added `integer_to_binary` and `binary_to_integer` - * [Kernel] Added `float_to_binary` and `binary_to_float` - * [Kernel] Many improvements to `unquote` and `unquote_splicing`. For example, `unquote(foo).unquote(bar)(args)` is supported and no longer need to be written via `apply` - * [Keyword] Keyword list is no longer ordered according to Erlang terms but the order in which they are specified - * [List] Add `List.keyreplace` and `List.keystore` - * [Macro] Support `Macro.safe_term` which returns `:ok` if an expression does not execute code and is made only of raw data types - * [Mix] Add support for environments - the current environment can be set via `MIX_ENV` - * [Mix] Add support for handling and fetching dependencies' dependencies - * [Module] Support module creation via `Module.create` - * [Range] Support decreasing ranges - * [Record] Improvements to the Record API, added `Record.defmacros` - * [Regex] Add `:return` option to `Regex.run` and `Regex.scan` - * [String] Add a String module responsible for handling UTf-8 binaries - -* Bug fixes - * [File] `File.cp` and `File.cp_r` now preserves the file's mode - * [IEx] Fix a bug where printing to `:stdio` on `IEx` was causing it to hang - * [Macro] Fix a bug where quoted expressions were not behaving the same as their non-quoted counterparts - * [Mix] `mix deps.get [DEPS]` now only gets the specified dependencies - * [Mix] Mix now exits with status 1 in case of failures - * [Protocol] Avoid false positives on protocol dispatch (a bug caused the dispatch to be triggered to an invalid protocol) - -* Backwards incompatible changes - * [ExUnit] `setup` and `teardown` callbacks now receives the test name as second argument - * [Kernel] Raw function definition with `def/4`, `defp/4`, `defmacro/4`, `defmacrop/4` now evaluates all arguments. The previous behaviour was accidental and did not properly evaluate all arguments - * [Kernel] Change tuple-related (`elem` and `setelem`), Enum functions (`find_index`, `nth!` and `times`) and List functions (List.key*) to zero-index - -* Deprecations - * [Code] `Code.require_file` and `Code.load_file` now expect the full name as argument - * [Enum] `List.reverse/1` and `List.zip/2` were moved to `Enum` - * [GenServer] Rename `GenServer.Behavior` to `GenServer.Behaviour` - * [Kernel] Bitstring syntax now uses `::` instead of `|` - * [Kernel] `Erlang.` syntax is deprecated in favor of simply using atoms - * [Module] `Module.read_attribute` and `Module.add_attribute` deprecated in favor of `Module.get_attribute` and `Module.put_attribute` which mimics Dict API - -## v0.6.0 (2012-08-01) - -* Backwards incompatible changes - * [Kernel] Compile files now follow `Elixir-ModuleName` convention to solve issues with Erlang embedded mode. This removes the `__MAIN__` pseudo-variable as modules are now located inside `Elixir` namespace - * [Kernel] `__using__` callback triggered by `use` now receives just one argument. Caller information can be accessed via macros using `__CALLER__` - * [Kernel] Comprehensions syntax changed to be more compatible with Erlang behaviour - * [Kernel] loop and recur are removed in favor of recursion with named functions - * [Module] Removed data functions in favor of unifying the attributes API - -* Deprecations - * [Access] The semantics of the access protocol were reduced from a broad query API to simple data structure key-based access - * [ExUnit] Some assertions are deprecated in favor of simply using `assert()` - * [File] `File.read_info` is deprecated in favor of `File.stat` - * [IO] `IO.print` is deprecated in favor of `IO.write` - * [Kernel] Deprecate `__LINE__` and `__FUNCTION__` in favor of `__ENV__.line` and `__ENV__.function` - * [Kernel] Deprecate `in_guard` in favor of `__CALLER__.in_guard?` - * [Kernel] `refer` is deprecated in favor of `alias` - * [Module] `Module.add_compile_callback(module, target, callback)` is deprecated in favor of `Module.put_attribute(module, :before_compile, {target, callback})` - * [Module] `Module.function_defined?` is deprecated in favor of `Module.defines?` - * [Module] `Module.defined_functions` is deprecated in favor of `Module.definitions_in` - -* Enhancements - * [Enum] Enhance Enum protocol to support `Enum.count` - * [Enum] Optimize functions when a list is given as collection - * [Enum] Add `find_index`, `nth!` and others - * [ExUnit] Support setup and teardown callbacks - * [IEx] IEx now provides autocomplete if the OS supports tty - * [IEx] IEx now supports remsh - * [IEx] Elixir now defaults to compile with documentation and `d` can be used in IEx to print modules and functions documentation - * [IEx] Functions `c` and `m` are available in IEx to compile and print available module information. Functions `h` and `v` are available to show history and print previous commands values - * [IO/File] Many improvements to `File` and `IO` modules - * [Kernel] Operator `!` is now allowed in guard clauses - * [Kernel] Introduce operator `=~` for regular expression matches - * [Kernel] Compiled docs now include the function signature - * [Kernel] `defmodule` do not start a new variable scope, this improves meta-programming capabilities - * [Kernel] quote special form now supports line and unquote as options - * [Kernel] Document the macro `@` and allow attributes to be read inside functions - * [Kernel] Add support to the `%R` sigil. The same as `%r`, but without interpolation or escaping. Both implementations were also optimized to generate the regex at compilation time - * [Kernel] Add `__ENV__` which returns a `Macro.Env` record with information about the compilation environment - * [Kernel] Add `__CALLER__` inside macros which returns a `Macro.Env` record with information about the calling site - * [Macro] Add `Macro.expand`, useful for debugging what a macro expands to - * [Mix] First Mix public release - * [Module] Add support to `@before_compile` and `@after_compile` callbacks. The first receives the module name while the latter receives the module name and its object code - * [OptionParser] Make OptionParser public, add support to flags and improved switch parsing - * [Range] Add a Range module with support to `in` operator (`x in 1..3`) and iterators - * [Record] Allow `Record[_: value]` to set a default value to all records fields, as in Erlang - * [Record] Records now provide a `to_keywords` function - * [Regex] Back references are now properly supported - * [System] Add `System.find_executable` - -## v0.5.0 (2012-05-24) - -* First official release +The CHANGELOG for v1.4 releases can be found [in the v1.4 branch](https://github.com/elixir-lang/elixir/blob/v1.4/CHANGELOG.md). diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000000..0378acb302c --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,56 @@ +# Code of Conduct + +Contact: elixir-lang-conduct@googlegroups.com + +## Why have a Code of Conduct? + +As contributors and maintainers of this project, we are committed to providing a friendly, safe and welcoming environment for all, regardless of age, disability, gender, nationality, race, religion, sexuality, or similar personal characteristic. + +The goal of the Code of Conduct is to specify a baseline standard of behavior so that people with different social values and communication styles can talk about Elixir effectively, productively, and respectfully, even in face of disagreements. The Code of Conduct also provides a mechanism for resolving conflicts in the community when they arise. + +## Our Values + +These are the values Elixir developers should aspire to: + + * Be friendly and welcoming + * Be patient + * Remember that people have varying communication styles and that not everyone is using their native language. (Meaning and tone can be lost in translation.) + * Be thoughtful + * Productive communication requires effort. Think about how your words will be interpreted. + * Remember that sometimes it is best to refrain entirely from commenting. + * Be respectful + * In particular, respect differences of opinion. It is important that we resolve disagreements and differing views constructively. + * Avoid destructive behavior + * Derailing: stay on topic; if you want to talk about something else, start a new conversation. + * Unconstructive criticism: don't merely decry the current state of affairs; offer (or at least solicit) suggestions as to how things may be improved. + * Snarking (pithy, unproductive, sniping comments). + +The following actions are explicitly forbidden: + + * Insulting, demeaning, hateful, or threatening remarks. + * Discrimination based on age, disability, gender, nationality, race, religion, sexuality, or similar personal characteristic. + * Bullying or systematic harassment. + * Unwelcome sexual advances. + * Incitement to any of these. + +## Where does the Code of Conduct apply? + +If you participate in or contribute to the Elixir ecosystem in any way, you are encouraged to follow the Code of Conduct while doing so. + +Explicit enforcement of the Code of Conduct applies to the official mediums operated by the Elixir project: + +* The official GitHub projects and code reviews. +* The official elixir-lang mailing lists. +* The #elixir-lang IRC channel on Freenode. + +Other Elixir activities (such as conferences, meetups, and other unofficial forums) are encouraged to adopt this Code of Conduct. Such groups must provide their own contact information. + +Project maintainers may remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by emailing: elixir-lang-conduct@googlegroups.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. **All reports will be kept confidential**. + +**The goal of the Code of Conduct is to resolve conflicts in the most harmonious way possible**. We hope that in most cases issues may be resolved through polite discussion and mutual agreement. Bannings and other forceful measures are to be employed only as a last resort. **Do not** post about the issue publicly or try to rally sentiment against a particular individual or group. + +## Acknowledgements + +This document was based on the Code of Conduct from the Go project with parts derived from Django's Code of Conduct, Rust's Code of Conduct and the Contributor Covenant. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 7497c70c66a..00000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,286 +0,0 @@ -# Contributing to Elixir - -Please take a moment to review this document in order to make the contribution -process easy and effective for everyone involved! - -## Using the issue tracker - -Use the issues tracker for: - -* [bug reports](#bugs-reports) -* [submitting pull requests](#pull-requests) - -Please **do not** use the issues tracker for personal support requests nor feature requests. Support requests should be send to: - -* [the elixir-talk mailing list](http://groups.google.com/group/elixir-lang-talk) -* [Stack Overflow](http://stackoverflow.com/questions/ask?tags=elixir) -* [#elixir-lang](irc://chat.freenode.net/elixir-lang) - -Feature requests can be discussed on [the elixir-core mailing list](http://groups.google.com/group/elixir-lang-core). - -We do our best to keep the issues tracker tidy and organized, making it useful -for everyone. For example, we classify open issues per application and perceived -difficulty of the issue, making it easier for developers to -[contribute to Elixir](#contributing). - -## Bug reports - -A bug is a _demonstrable problem_ that is caused by the code in the repository. -Good bug reports are extremely helpful - thank you! - -Guidelines for bug reports: - -1. **Use the GitHub issue search** — check if the issue has already been - reported. - -2. **Check if the issue has been fixed** — try to reproduce it using the - `master` branch in the repository. - -3. **Isolate and report the problem** — ideally create a reduced test - case. - -Please try to be as detailed as possible in your report. Include information about -your Operating System, your Erlang and Elixir versions. Please provide steps to -reproduce the issue as well as the outcome you were expecting! All these details -will help developers to fix any potential bugs. - -Example: - -> Short and descriptive example bug report title -> -> A summary of the issue and the environment in which it occurs. If suitable, -> include the steps required to reproduce the bug. -> -> 1. This is the first step -> 2. This is the second step -> 3. Further steps, etc. -> -> `` - a link to the reduced test case (e.g. a GitHub Gist) -> -> Any other information you want to share that is relevant to the issue being -> reported. This might include the lines of code that you have identified as -> causing the bug, and potential solutions (and your opinions on their -> merits). - -## Feature requests - -Feature requests are welcome and should be discussed on [the elixir-core mailing list](http://groups.google.com/group/elixir-lang-core). But take a moment to find -out whether your idea fits with the scope and aims of the project. It's up to *you* -to make a strong case to convince the community of the merits of this feature. -Please provide as much detail and context as possible. - -## Contributing - -We incentivize everyone to contribute to Elixir and help us tackle -existing issues! To do so, there are a few things you need to know -about the code. First, Elixir code is divided in applications inside -the `lib` folder: - -* `elixir` - Contains Elixir's kernel and stdlib - -* `eex` - Template engine that allows you to embed Elixir - -* `ex_unit` - Simple test framework that ships with Elixir - -* `iex` — IEx, Elixir's interactive shell - -* `mix` — Elixir's build tool - -You can run all tests in the root directory with `make test` and you can -also run tests for a specific framework `make test_#{NAME}`, for example, -`make test_ex_unit`. - -In case you are changing a single file, you can compile and run tests only -for that particular file for fast development cycles. For example, if you -are changing the String module, you can compile it and run its tests as: - - $ bin/elixirc lib/elixir/lib/string.ex -o lib/elixir/ebin - $ bin/elixir lib/elixir/test/elixir/string_test.exs - -After your changes are done, please remember to run the full suite with -`make test`. - -From time to time, your tests may fail in an existing Elixir checkout and -may require a clean start by running `make clean compile`. You can always -check [the official build status on Travis-CI](https://travis-ci.org/elixir-lang/elixir). - -With tests running and passing, you are ready to contribute to Elixir and -send your pull requests. - -### Building on Windows - -There are a few extra steps you'll need to take for contributing from Windows. -Basically, once you have Erlang 17, Git, and MSYS from MinGW on your system, -you're all set. Specifically, here's what you need to do to get up and running: - -1. Install [Git](http://www.git-scm.com/download/win), -[Erlang](http://www.erlang.org/download.html), and the -[MinGW Installation Manager](http://sourceforge.net/projects/mingw/files/latest/download?source=files). -2. Use the MinGW Installation Manager to install the msys-bash, msys-make, and -msys-grep packages. -3. Add `;C:\Program Files (x86)\Git\bin;C:\Program Files\erl6.0\bin;C:\Program Files\erl6.0\erts-6.0\bin;C:\MinGW\msys\1.0\bin` -to your "Path" environment variable . (This is under Control Panel > System -and Security > System > Advanced system settings > Environment Variables > -System variables) - -You can now work in the Command Prompt similar to how you would on other OS'es, -except for some things (like creating symlinks) you'll need to run the Command -Prompt as an Administrator. - -## Contributing Documentation - -Code documentation (`@doc`, `@moduledoc`, `@typedoc`) has a special convention: -the first paragraph is considered to be a short summary. - -For functions, macros and callbacks say what it will do. For example write -something like: - -```elixir -@doc """ -Returns only those elements for which `fun` is true. - -... -""" -def filter(collection, fun) ... -``` - -For modules, protocols and types say what it is. For example write -something like: - -```elixir -defmodule File.Stat do - @moduledoc """ - Information about a file. - - ... - """ - - defstruct [...] -end -``` - -Keep in mind that the first paragraph might show up in a summary somewhere, long -texts in the first paragraph create very ugly summaries. As a rule of thumb -anything longer than 80 characters is too long. - -Try to keep unneccesary details out of the first paragraph, it's only there to -give a user a quick idea of what the documented "thing" does/is. The rest of the -documentation string can contain the details, for example when a value and when -`nil` is returned. - -If possible include examples, preferably in a form that works with doctests. For -example: - -```elixir -@doc """ -Return only those elements for which `fun` is true. - -## Examples - - iex> Enum.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) - [2] - -""" -def filter(collection, fun) ... -``` - -This makes it easy to test the examples so that they don't go stale and examples -are often a great help in explaining what a function does. - -## Pull requests - -Good pull requests - patches, improvements, new features - are a fantastic -help. They should remain focused in scope and avoid containing unrelated -commits. - -**IMPORTANT**: By submitting a patch, you agree that your work will be -licensed under the license used by the project. - -If you have any large pull request in mind (e.g. implementing features, -refactoring code, etc), **please ask first** otherwise you risk spending -a lot of time working on something that the project's developers might -not want to merge into the project. - -Please adhere to the coding conventions in the project (indentation, -accurate comments, etc.) and don't forget to add your own tests and -documentation. When working with git, we recommend the following process -in order to craft an excellent pull request: - -1. [Fork](http://help.github.com/fork-a-repo/) the project, clone your fork, - and configure the remotes: - - ```bash - # Clone your fork of the repo into the current directory - git clone https://github.com//elixir - # Navigate to the newly cloned directory - cd elixir - # Assign the original repo to a remote called "upstream" - git remote add upstream https://github.com/elixir-lang/elixir - ``` - -2. If you cloned a while ago, get the latest changes from upstream: - - ```bash - git checkout master - git pull upstream master - ``` - -3. Create a new topic branch (off of `master`) to contain your feature, change, - or fix. - - **IMPORTANT**: Making changes in `master` is discouraged. You should always - keep your local `master` in sync with upstream `master` and make your - changes in topic branches. - - ```bash - git checkout -b - ``` - -4. Commit your changes in logical chunks. Keep your commit messages organized, - with a short description in the first line and more detailed information on - the following lines. Feel free to use Git's - [interactive rebase](https://help.github.com/articles/interactive-rebase) - feature to tidy up your commits before making them public. - -5. Make sure all the tests are still passing. - - ```bash - make test - ``` - - This command will compile the code in your branch and use that - version of Elixir to run the tests. This is needed to ensure your changes can - pass all the tests. - -6. Push your topic branch up to your fork: - - ```bash - git push origin - ``` - -7. [Open a Pull Request](https://help.github.com/articles/using-pull-requests/) - with a clear title and description. - -8. If you haven't updated your pull request for a while, you should consider - rebasing on master and resolving any conflicts. - - **IMPORTANT**: _Never ever_ merge upstream `master` into your branches. You - should always `git rebase` on `master` to bring your changes up to date when - necessary. - - ```bash - git checkout master - git pull upstream master - git checkout - git rebase master - ``` - -We have saved some excellent pull requests we have received in the past in case -you are looking for some examples: - -* https://github.com/elixir-lang/elixir/pull/992 -* https://github.com/elixir-lang/elixir/pull/1041 -* https://github.com/elixir-lang/elixir/pull/1058 -* https://github.com/elixir-lang/elixir/pull/1059 - -Thank you for your contributions! diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md new file mode 100644 index 00000000000..4249da3a025 --- /dev/null +++ b/ISSUE_TEMPLATE.md @@ -0,0 +1,18 @@ +### Precheck + +* Do not use the issues tracker for help or support (try Elixir Forum, Stack Overflow, IRC, etc.) +* For proposing a new feature, please start a discussion on the Elixir Core mailing list +* For bugs, do a quick search and make sure the bug has not yet been reported +* Finally, be nice and have fun! + +### Environment + +* Elixir & Erlang versions (elixir --version): +* Operating system: + +### Current behavior + +Include code samples, errors and stacktraces if appropriate. + +### Expected behavior + diff --git a/LEGAL b/LEGAL deleted file mode 100644 index 8948b8ad2f0..00000000000 --- a/LEGAL +++ /dev/null @@ -1,8 +0,0 @@ -LEGAL NOTICE INFORMATION ------------------------- - -All the files in this distribution are covered under either Elixir's -license (see the file LICENSE) except the files mentioned below that -contains sections that are under Erlang's License (EPL): - -lib/elixir/src/elixir_parser.erl (generated by build scripts) diff --git a/LICENSE b/LICENSE index d3a92d217c9..425c4c117e7 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2012-2013 Plataformatec. +Copyright 2012 Plataformatec Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index b660c202c21..98ee261cd4c 100644 --- a/Makefile +++ b/Makefile @@ -1,28 +1,31 @@ -REBAR := rebar +REBAR ?= "$(CURDIR)/rebar" +PREFIX ?= /usr/local +SHARE_PREFIX ?= $(PREFIX)/share +CANONICAL := master/ ELIXIRC := bin/elixirc --verbose --ignore-module-conflict ERLC := erlc -I lib/elixir/include ERL := erl -I lib/elixir/include -noshell -pa lib/elixir/ebin VERSION := $(strip $(shell cat VERSION)) Q := @ -PREFIX := /usr/local LIBDIR := lib +BINDIR := bin INSTALL = install INSTALL_DIR = $(INSTALL) -m755 -d INSTALL_DATA = $(INSTALL) -m644 INSTALL_PROGRAM = $(INSTALL) -m755 +GIT_REVISION = $(strip $(shell git rev-parse HEAD 2> /dev/null )) +GIT_TAG = $(strip $(shell head="$(call GIT_REVISION)"; git tag --points-at $$head 2> /dev/null | tail -1) ) -.PHONY: install compile erlang elixir dialyze test clean docs release_docs release_zip check_erlang_release +.PHONY: install compile erlang elixir build_plt clean_plt dialyze test clean clean_residual_files install_man clean_man docs Docs.zip Precompiled.zip zips .NOTPARALLEL: compile #==> Functions -# This check should work for older versions like R16B -# as well as new verions like 17.1 and 18 define CHECK_ERLANG_RELEASE - $(Q) erl -noshell -eval 'io:fwrite("~s", [erlang:system_info(otp_release)])' -s erlang halt | grep -q '^1[789]'; \ - if [ $$? != 0 ]; then \ - echo "At least Erlang 17.0 is required to build Elixir"; \ - exit 1; \ + $(Q) erl -noshell -eval '{V,_} = string:to_integer(erlang:system_info(otp_release)), io:fwrite("~s", [is_integer(V) and (V >= 18)])' -s erlang halt | grep -q '^true'; \ + if [ $$? != 0 ]; then \ + echo "At least Erlang 18.0 is required to build Elixir"; \ + exit 1; \ fi; endef @@ -32,7 +35,7 @@ $(1): lib/$(1)/ebin/Elixir.$(2).beam lib/$(1)/ebin/$(1).app lib/$(1)/ebin/$(1).app: lib/$(1)/mix.exs $(Q) mkdir -p lib/$(1)/_build/shared/lib/$(1) $(Q) cp -R lib/$(1)/ebin lib/$(1)/_build/shared/lib/$(1)/ - $(Q) cd lib/$(1) && ../../bin/elixir -e "Mix.start(:permanent, [])" -r mix.exs -e "Mix.Task.run('compile.app')" + $(Q) cd lib/$(1) && ../../bin/elixir -e 'Mix.start(:permanent, [])' -r mix.exs -e 'Mix.Task.run("compile.app")' $(Q) cp lib/$(1)/_build/shared/lib/$(1)/ebin/$(1).app lib/$(1)/ebin/$(1).app $(Q) rm -rf lib/$(1)/_build @@ -41,7 +44,7 @@ lib/$(1)/ebin/Elixir.$(2).beam: $(wildcard lib/$(1)/lib/*.ex) $(wildcard lib/$(1 @ rm -rf lib/$(1)/ebin $(Q) cd lib/$(1) && ../../$$(ELIXIRC) "lib/**/*.ex" -o ebin -test_$(1): $(1) +test_$(1): compile $(1) @ echo "==> $(1) (exunit)" $(Q) cd lib/$(1) && ../../bin/elixir -r "test/test_helper.exs" -pr "test/**/*_test.exs"; endef @@ -57,39 +60,41 @@ compile: lib/elixir/src/elixir.app.src erlang elixir lib/elixir/src/elixir.app.src: src/elixir.app.src $(Q) $(call CHECK_ERLANG_RELEASE) - $(Q) rm -rf lib/elixir/src/elixir.app.src + $(Q) rm -f lib/elixir/src/elixir.app.src $(Q) echo "%% This file is automatically generated from /src/elixir.app.src" \ - >lib/elixir/src/elixir.app.src + >lib/elixir/src/elixir.app.src $(Q) cat src/elixir.app.src >>lib/elixir/src/elixir.app.src erlang: - $(Q) cd lib/elixir && ../../$(REBAR) compile + $(Q) cd lib/elixir && $(REBAR) compile -# Since Mix depends on EEx and EEx depends on -# Mix, we first compile EEx without the .app -# file, then mix and then compile EEx fully -elixir: stdlib lib/eex/ebin/Elixir.EEx.beam mix ex_unit eex iex +# Since Mix depends on EEx and EEx depends on Mix, +# we first compile EEx without the .app file, +# then mix and then compile EEx fully +elixir: stdlib lib/eex/ebin/Elixir.EEx.beam mix ex_unit logger eex iex stdlib: $(KERNEL) VERSION -$(KERNEL): lib/elixir/lib/*.ex lib/elixir/lib/*/*.ex - $(Q) if [ ! -f $(KERNEL) ]; then \ - echo "==> bootstrap (compile)"; \ - $(ERL) -s elixir_compiler core -s erlang halt; \ +$(KERNEL): lib/elixir/lib/*.ex lib/elixir/lib/*/*.ex lib/elixir/lib/*/*/*.ex + $(Q) if [ ! -f $(KERNEL) ]; then \ + echo "==> bootstrap (compile)"; \ + $(ERL) -s elixir_compiler bootstrap -s erlang halt; \ fi @ echo "==> elixir (compile)"; $(Q) cd lib/elixir && ../../$(ELIXIRC) "lib/kernel.ex" -o ebin; $(Q) cd lib/elixir && ../../$(ELIXIRC) "lib/**/*.ex" -o ebin; $(Q) $(MAKE) unicode - $(Q) rm -rf lib/elixir/ebin/elixir.app - $(Q) cd lib/elixir && ../../$(REBAR) compile + $(Q) rm -f lib/elixir/ebin/elixir.app + $(Q) cd lib/elixir && $(REBAR) compile unicode: $(UNICODE) $(UNICODE): lib/elixir/unicode/* @ echo "==> unicode (compile)"; - @ echo "This step can take up to a minute to compile in order to embed the Unicode database" - $(Q) cd lib/elixir && ../../$(ELIXIRC) unicode/unicode.ex -o ebin; + $(Q) $(ELIXIRC) lib/elixir/unicode/unicode.ex -o lib/elixir/ebin; + $(Q) $(ELIXIRC) lib/elixir/unicode/properties.ex -o lib/elixir/ebin; + $(Q) $(ELIXIRC) lib/elixir/unicode/tokenizer.ex -o lib/elixir/ebin; $(eval $(call APP_TEMPLATE,ex_unit,ExUnit)) +$(eval $(call APP_TEMPLATE,logger,Logger)) $(eval $(call APP_TEMPLATE,eex,EEx)) $(eval $(call APP_TEMPLATE,mix,Mix)) $(eval $(call APP_TEMPLATE,iex,IEx)) @@ -97,60 +102,108 @@ $(eval $(call APP_TEMPLATE,iex,IEx)) install: compile @ echo "==> elixir (install)" $(Q) for dir in lib/*; do \ + rm -rf $(DESTDIR)$(PREFIX)/$(LIBDIR)/elixir/$$dir/ebin; \ $(INSTALL_DIR) "$(DESTDIR)$(PREFIX)/$(LIBDIR)/elixir/$$dir/ebin"; \ $(INSTALL_DATA) $$dir/ebin/* "$(DESTDIR)$(PREFIX)/$(LIBDIR)/elixir/$$dir/ebin"; \ done $(Q) $(INSTALL_DIR) "$(DESTDIR)$(PREFIX)/$(LIBDIR)/elixir/bin" $(Q) $(INSTALL_PROGRAM) $(filter-out %.ps1, $(filter-out %.bat, $(wildcard bin/*))) "$(DESTDIR)$(PREFIX)/$(LIBDIR)/elixir/bin" - $(Q) $(INSTALL_DIR) "$(DESTDIR)$(PREFIX)/bin" - $(Q) for file in "$(DESTDIR)$(PREFIX)"/$(LIBDIR)/elixir/bin/* ; do \ - ln -sf "../$(LIBDIR)/elixir/bin/$${file##*/}" "$(DESTDIR)$(PREFIX)/bin/" ; \ + $(Q) $(INSTALL_DIR) "$(DESTDIR)$(PREFIX)/$(BINDIR)" + $(Q) for file in "$(DESTDIR)$(PREFIX)"/$(LIBDIR)/elixir/bin/*; do \ + ln -sf "../$(LIBDIR)/elixir/bin/$${file##*/}" "$(DESTDIR)$(PREFIX)/$(BINDIR)/"; \ done + $(MAKE) install_man clean: - cd lib/elixir && ../../$(REBAR) clean + cd lib/elixir && $(REBAR) clean rm -rf ebin rm -rf lib/*/ebin - rm -rf lib/elixir/test/ebin - rm -rf lib/*/tmp - rm -rf lib/mix/test/fixtures/git_repo - rm -rf lib/mix/test/fixtures/deps_on_git_repo - rm -rf lib/mix/test/fixtures/git_rebar - rm -rf lib/elixir/src/elixir.app.src - -clean_exbeam: - $(Q) rm -f lib/*/ebin/Elixir.*.beam - -#==> Release tasks + rm -f lib/elixir/src/elixir.app.src + rm -f Docs-v*.zip + rm -f Precompiled-v*.zip + $(Q) $(MAKE) clean_residual_files -SOURCE_REF = $(shell head="$$(git rev-parse HEAD)" tag="$$(git tag --points-at $$head | tail -1)" ; echo "$${tag:-$$head}\c") -DOCS = bin/elixir ../ex_doc/bin/ex_doc "$(1)" "$(VERSION)" "lib/$(2)/ebin" -m "$(3)" -u "https://github.com/elixir-lang/elixir" --source-ref "$(call SOURCE_REF)" -o docs/$(2) -p http://elixir-lang.org/docs.html +clean_elixir: + $(Q) rm -f lib/*/ebin/Elixir.*.beam -docs: compile ../ex_doc/bin/ex_doc - $(Q) rm -rf docs - $(call DOCS,Elixir,elixir,Kernel) - $(call DOCS,EEx,eex,EEx) - $(call DOCS,Mix,mix,Mix) - $(call DOCS,IEx,iex,IEx) - $(call DOCS,ExUnit,ex_unit,ExUnit) +clean_residual_files: + rm -rf lib/*/_build/ + rm -rf lib/*/tmp/ + rm -rf lib/elixir/test/ebin/ + rm -rf lib/mix/test/fixtures/deps_on_git_repo/ + rm -rf lib/mix/test/fixtures/git_rebar/ + rm -rf lib/mix/test/fixtures/git_repo/ + rm -rf lib/mix/test/fixtures/git_sparse_repo/ + rm -f erl_crash.dump + $(Q) $(MAKE) clean_man + +#==> Documentation tasks + +LOGO_PATH = $(shell test -f ../docs/logo.png && echo "--logo ../docs/logo.png") +SOURCE_REF = $(shell tag="$(call GIT_TAG)" revision="$(call GIT_REVISION)"; echo "$${tag:-$$revision}\c") +DOCS_FORMAT = html +COMPILE_DOCS = bin/elixir ../ex_doc/bin/ex_doc "$(1)" "$(VERSION)" "lib/$(2)/ebin" -m "$(3)" -u "https://github.com/elixir-lang/elixir" --source-ref "$(call SOURCE_REF)" $(call LOGO_PATH) -o doc/$(2) -n https://hexdocs.pm/$(2)/$(CANONICAL) -p http://elixir-lang.org/docs.html -f "$(DOCS_FORMAT)" $(4) + +docs: compile ../ex_doc/bin/ex_doc docs_elixir docs_eex docs_mix docs_iex docs_ex_unit docs_logger + +docs_elixir: compile ../ex_doc/bin/ex_doc + @ echo "==> ex_doc (elixir)" + $(Q) rm -rf doc/elixir + $(call COMPILE_DOCS,Elixir,elixir,Kernel,-e "lib/elixir/pages/Behaviours.md" -e "lib/elixir/pages/Deprecations.md" -e "lib/elixir/pages/Guards.md" -e "lib/elixir/pages/Naming Conventions.md" -e "lib/elixir/pages/Operators.md" -e "lib/elixir/pages/Syntax Reference.md" -e "lib/elixir/pages/Typespecs.md" -e "lib/elixir/pages/Unicode Syntax.md" -e "lib/elixir/pages/Writing Documentation.md") + +docs_eex: compile ../ex_doc/bin/ex_doc + @ echo "==> ex_doc (eex)" + $(Q) rm -rf doc/eex + $(call COMPILE_DOCS,EEx,eex,EEx) + +docs_mix: compile ../ex_doc/bin/ex_doc + @ echo "==> ex_doc (mix)" + $(Q) rm -rf doc/mix + $(call COMPILE_DOCS,Mix,mix,Mix) + +docs_iex: compile ../ex_doc/bin/ex_doc + @ echo "==> ex_doc (iex)" + $(Q) rm -rf doc/iex + $(call COMPILE_DOCS,IEx,iex,IEx) + +docs_ex_unit: compile ../ex_doc/bin/ex_doc + @ echo "==> ex_doc (ex_unit)" + $(Q) rm -rf doc/ex_unit + $(call COMPILE_DOCS,ExUnit,ex_unit,ExUnit) + +docs_logger: compile ../ex_doc/bin/ex_doc + @ echo "==> ex_doc (logger)" + $(Q) rm -rf doc/logger + $(call COMPILE_DOCS,Logger,logger,Logger) ../ex_doc/bin/ex_doc: @ echo "ex_doc is not found in ../ex_doc as expected. See README for more information." @ false -release_zip: compile - rm -rf v$(VERSION).zip - zip -9 -r v$(VERSION).zip bin CHANGELOG.md LEGAL lib/*/ebin LICENSE README.md VERSION +#==> Zip tasks + +Docs.zip: docs + rm -f Docs-v$(VERSION).zip + zip -9 -r Docs-v$(VERSION).zip CHANGELOG.md doc NOTICE LICENSE README.md + @ echo "Docs file created $(CURDIR)/Docs-v$(VERSION).zip" -release_docs: docs - cd ../docs - rm -rf ../docs/master - mv docs ../docs/master +Precompiled.zip: build_man compile + rm -f Precompiled-v$(VERSION).zip + zip -9 -r Precompiled-v$(VERSION).zip bin CHANGELOG.md lib/*/ebin LICENSE man NOTICE README.md VERSION + @ echo "Precompiled file created $(CURDIR)/Precompiled-v$(VERSION).zip" -#==> Tests tasks +zips: Precompiled.zip Docs.zip + +#==> Test tasks test: test_erlang test_elixir +test_windows: test test_taskkill + +test_taskkill: + taskkill //IM erl.exe //F //T //FI "MEMUSAGE gt 0" + taskkill //IM epmd.exe //F //T //FI "MEMUSAGE gt 0" + TEST_ERL = lib/elixir/test/erlang TEST_EBIN = lib/elixir/test/ebin TEST_ERLS = $(addprefix $(TEST_EBIN)/, $(addsuffix .beam, $(basename $(notdir $(wildcard $(TEST_ERL)/*.erl))))) @@ -164,25 +217,61 @@ $(TEST_EBIN)/%.beam: $(TEST_ERL)/%.erl $(Q) mkdir -p $(TEST_EBIN) $(Q) $(ERLC) -o $(TEST_EBIN) $< -test_elixir: test_stdlib test_ex_unit test_doc_test test_mix test_eex test_iex - -test_doc_test: compile - @ echo "==> doctest (exunit)" - $(Q) cd lib/elixir && ../../bin/elixir -r "test/doc_test.exs"; +test_elixir: test_stdlib test_ex_unit test_logger test_mix test_eex test_iex test_stdlib: compile @ echo "==> elixir (exunit)" $(Q) exec epmd & exit - $(Q) cd lib/elixir && ../../bin/elixir -r "test/elixir/test_helper.exs" -pr "test/elixir/**/*_test.exs"; + $(Q) if [ "$(OS)" = "Windows_NT" ]; then \ + cd lib/elixir && cmd //C call ../../bin/elixir.bat -r "test/elixir/test_helper.exs" -pr "test/elixir/**/*_test.exs"; \ + else \ + cd lib/elixir && ../../bin/elixir -r "test/elixir/test_helper.exs" -pr "test/elixir/**/*_test.exs"; \ + fi + +#==> Dialyzer tasks + +DIALYZER_OPTS = --no_check_plt --fullpath -Werror_handling -Wunmatched_returns -Wunderspecs +PLT = .elixir.plt + +$(PLT): + @ echo "==> Building PLT with Elixir's dependencies..." + $(Q) dialyzer --output_plt $(PLT) --build_plt --apps erts kernel stdlib compiler syntax_tools parsetools tools ssl inets + +clean_plt: + $(Q) rm -f $(PLT) -.dialyzer.base_plt: - @ echo "==> Adding Erlang/OTP basic applications to a new base PLT" - $(Q) dialyzer --output_plt .dialyzer.base_plt --build_plt --apps erts kernel stdlib compiler tools syntax_tools parsetools +build_plt: clean_plt $(PLT) -dialyze: .dialyzer.base_plt - $(Q) rm -f .dialyzer_plt - $(Q) cp .dialyzer.base_plt .dialyzer_plt - @ echo "==> Adding Elixir to PLT..." - $(Q) dialyzer --plt .dialyzer_plt --add_to_plt -r lib/elixir/ebin lib/ex_unit/ebin lib/eex/ebin lib/iex/ebin lib/mix/ebin +dialyze: compile $(PLT) @ echo "==> Dialyzing Elixir..." - $(Q) dialyzer --plt .dialyzer_plt -r lib/elixir/ebin lib/ex_unit/ebin lib/eex/ebin lib/iex/ebin lib/mix/ebin + $(Q) dialyzer --plt $(PLT) $(DIALYZER_OPTS) lib/*/ebin + +#==> Man page tasks + +build_man: man/iex.1 man/elixir.1 + +man/iex.1: + $(Q) cp man/iex.1.in man/iex.1 + $(Q) sed -i.bak "/{COMMON}/r common" man/iex.1 + $(Q) sed -i.bak "/{COMMON}/d" man/iex.1 + $(Q) rm -f man/iex.1.bak + +man/elixir.1: + $(Q) cp man/elixir.1.in man/elixir.1 + $(Q) sed -i.bak "/{COMMON}/r common" man/elixir.1 + $(Q) sed -i.bak "/{COMMON}/d" man/elixir.1 + $(Q) rm -f man/elixir.1.bak + +clean_man: + rm -f man/elixir.1 + rm -f man/elixir.1.bak + rm -f man/iex.1 + rm -f man/iex.1.bak + +install_man: build_man + $(Q) mkdir -p $(DESTDIR)$(SHARE_PREFIX)/man/man1 + $(Q) $(INSTALL_DATA) man/elixir.1 $(DESTDIR)$(SHARE_PREFIX)/man/man1 + $(Q) $(INSTALL_DATA) man/elixirc.1 $(DESTDIR)$(SHARE_PREFIX)/man/man1 + $(Q) $(INSTALL_DATA) man/iex.1 $(DESTDIR)$(SHARE_PREFIX)/man/man1 + $(Q) $(INSTALL_DATA) man/mix.1 $(DESTDIR)$(SHARE_PREFIX)/man/man1 + $(MAKE) clean_man diff --git a/NOTICE b/NOTICE new file mode 100644 index 00000000000..01a35dac3ac --- /dev/null +++ b/NOTICE @@ -0,0 +1,22 @@ +LEGAL NOTICE INFORMATION +------------------------ + +All the files in this distribution are copyright (c) 2012 Plataformatec +covered under Elixir's license (see the file LICENSE) except the cases +below. + +== lib/elixir/src/elixir_parser.erl (generated by build scripts) + +Copyright Ericsson AB 1996-2015 + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/README.md b/README.md index 5bfeb70f5ff..741904d8364 100644 --- a/README.md +++ b/README.md @@ -1,57 +1,183 @@ ![Elixir](https://github.com/elixir-lang/elixir-lang.github.com/raw/master/images/logo/logo.png) ========= -[![Build Status](https://secure.travis-ci.org/elixir-lang/elixir.svg?branch=master "Build Status")](http://travis-ci.org/elixir-lang/elixir) +[![Travis build](https://secure.travis-ci.org/elixir-lang/elixir.svg?branch=master +"Build Status")](https://travis-ci.org/elixir-lang/elixir) +[![Windows build](https://ci.appveyor.com/api/projects/status/macwuxq7aiiv61g1?svg=true)](https://ci.appveyor.com/project/josevalim/elixir) -For more about Elixir, installation and documentation, [check Elixir's website](http://elixir-lang.org/). -## Usage +Elixir is a dynamic, functional language designed for building scalable and maintainable applications. -If you want to contribute to Elixir or run it from source, clone this repository to your machine, compile and test it: +For more about Elixir, installation and documentation, +[check Elixir's website](http://elixir-lang.org/). - $ git clone https://github.com/elixir-lang/elixir.git - $ cd elixir - $ make clean test +## Compiling from source -If Elixir fails to build (specifically when pulling in a new version via git), be sure to remove any previous build artifacts by running `make clean`, then `make test`. +To run Elixir from source, clone this repository to your machine, compile and test it: -If tests pass, you are ready to move on to the [Getting Started guide][1] or to try Interactive Elixir by running: `bin/iex` in your terminal. +```sh +git clone https://github.com/elixir-lang/elixir.git +cd elixir +make clean test +``` -However, if tests fail, it is likely you have an outdated Erlang version (Elixir requires Erlang 17.0 or later). You can check your Erlang version by calling `erl` in the command line. You will see some information as follows: +> Note: if you are running on Windows, +[this article includes important notes for compiling Elixir from source +on Windows](https://github.com/elixir-lang/elixir/wiki/Windows). - Erlang/OTP 17 [erts-6.0] [source-07b8f44] [64-bit] [smp:4:4] [async-threads:10] [hipe] [kernel-poll:false] +If Elixir fails to build (specifically when pulling in a new version via +`git`), be sure to remove any previous build artifacts by running +`make clean`, then `make test`. -If you have the correct version and tests still fail, feel free to [open an issue][2]. +If tests pass, you are ready to move on to the [Getting Started guide][1] +or to try Interactive Elixir by running `bin/iex` in your terminal. -## Building documentation +However, if tests fail, it is likely you have an outdated Erlang version +(Elixir requires Erlang 18.0 or later). You can check your Erlang version +by calling `erl` in the command line. You will see some information as follows: + + Erlang/OTP 18 [erts-7.0] [source] [smp:2:2] [async-threads:10] [hipe] [kernel-poll:false] + +If you have properly set up your dependencies and tests still fail, +you may want to open up a bug report, as explained next. -Building the documentation requires [ex_doc](https://github.com/elixir-lang/ex_doc) to be installed and built in the same containing folder as elixir. +## Bug reports - # After cloning and compiling Elixir - $ git clone git://github.com/elixir-lang/ex_doc.git - $ cd ex_doc && ../elixir/bin/mix compile - $ cd ../elixir && make docs +For reporting bugs, [visit our issues tracker][2] and follow the steps +for reporting a new issue. Please disclose security vulnerabilities +privately at elixir-security@googlegroups.com. ## Contributing -We appreciate any contribution to Elixir, so check out our [CONTRIBUTING.md](CONTRIBUTING.md) guide for more information. We usually keep a list of features and bugs [in the issue tracker][2]. +We welcome everyone to contribute to Elixir and help us tackle existing issues! +To do so, there are a few things you need to know about the code. First, Elixir +code is divided in applications inside the `lib` folder: + +* `elixir` - Contains Elixir's kernel and stdlib + +* `eex` - Template engine that allows you to embed Elixir + +* `ex_unit` - Simple test framework that ships with Elixir + +* `iex` - IEx, Elixir's interactive shell + +* `logger` - The built-in logger + +* `mix` - Elixir's build tool + +You can run all tests in the root directory with `make test` and you can +also run tests for a specific framework `make test_#{NAME}`, for example, +`make test_ex_unit`. If you just changed something in the Elixir's standard +library, you can run only that portion through `make test_stdlib`, as +`test_elixir` also runs tests for the other projects (EEx, ExUnit, etc.). + +In case you are changing a single file, you can compile and run tests only +for that particular file for fast development cycles. For example, if you +are changing the String module, you can compile it and run its tests as: + +```sh +bin/elixirc lib/elixir/lib/string.ex -o lib/elixir/ebin +bin/elixir lib/elixir/test/elixir/string_test.exs +``` + +To recompile (including Erlang modules): + +```sh +make compile +``` + +If your contribution fails the build during the bootstrapping of the language, +you can reproduce it locally by deleting all of Elixir beam files and compiling +again: + +```sh +make clean_elixir compile +``` + +Or to rebuild everything from scratch without running tests: + +```sh +make clean compile +``` + +More tasks can be found by reading the [Makefile](./Makefile). + +After your changes are done, please remember to run the full suite with +`make test`. + +From time to time, your tests may fail in an existing Elixir checkout and +may require a clean start by running `make clean compile`. You can always +check [the official build status on Travis-CI](https://travis-ci.org/elixir-lang/elixir). + +With tests running and passing, you are ready to contribute to Elixir and +[send a pull request](https://help.github.com/articles/using-pull-requests/). +We have saved some excellent pull requests we have received in the past in +case you are looking for some examples: + +* [Implement Enum.member? – Pull Request](https://github.com/elixir-lang/elixir/pull/992) +* [Add String.valid? – Pull Request](https://github.com/elixir-lang/elixir/pull/1058) +* [Implement capture_io for ExUnit – Pull Request](https://github.com/elixir-lang/elixir/pull/1059) + +We usually keep a list of enhancements and bugs [in the issue tracker][2]. +For proposing new features, please start a discussion in the +[Elixir Core mailing list][3]. Keep in mind that it is your responsibility +to argue and explain why a feature is useful and how it will impact the +codebase and the community. Finally, remember all interactions in our official +spaces follow our [Code of Conduct][7]. + +### Reviewing changes + +Once a pull request is sent, the Elixir team will review your changes. +We outline our process below to clarify the roles of everyone involved. + +All pull requests must be approved by two committers before being merged into +the repository. In case any changes are necessary, the team will leave +appropriate comments requesting changes to the code. + +The Elixir team may optionally assign someone to review a pull request. +In case someone is assigned, they must explicitly approve the code before +another team member can merge it. + +When review is completed, your pull request will be squashed and merged +into the repository. + +## Building documentation + +Building the documentation requires [ExDoc](https://github.com/elixir-lang/ex_doc) +to be installed and built alongside Elixir: + +```sh +# After cloning and compiling Elixir, in its parent directory: +git clone git://github.com/elixir-lang/ex_doc.git +cd ex_doc && ../elixir/bin/mix do deps.get, compile +cd ../elixir && make docs +``` + +This will produce documentation sets for `elixir`, `mix`, etc., under +the `doc` directory. If you are planning to contribute documentation, +[please check our best practices for writing documentation](https://hexdocs.pm/elixir/writing-documentation.html). -## Important links +## Development links -* \#elixir-lang on freenode IRC -* [Website][1] -* [Issue tracker][2] -* [elixir-talk Mailing list (questions)][3] -* [elixir-core Mailing list (development)][4] + * [Elixir Website][1] + * [Elixir Documentation][6] + * [Elixir Core Mailing list (development)][3] + * [Issues tracker][2] + * [Code of Conduct][7] + * **[#elixir-lang][4]** on [Freenode][5] IRC [1]: http://elixir-lang.org [2]: https://github.com/elixir-lang/elixir/issues - [3]: http://groups.google.com/group/elixir-lang-talk - [4]: http://groups.google.com/group/elixir-lang-core + [3]: https://groups.google.com/group/elixir-lang-core + [4]: https://webchat.freenode.net/?channels=#elixir-lang + [5]: http://www.freenode.net + [6]: http://elixir-lang.org/docs.html + [7]: CODE_OF_CONDUCT.md ## License "Elixir" and the Elixir logo are copyright (c) 2012 Plataformatec. -Elixir source code is released under Apache 2 License with some parts under Erlang's license (EPL). +Elixir source code is released under Apache 2 License. -Check [LEGAL](LEGAL) and [LICENSE](LICENSE) files for more information. +Check [NOTICE](NOTICE) and [LICENSE](LICENSE) files for more +information. diff --git a/RELEASE.md b/RELEASE.md index 965ace69e87..7d311b2168b 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,31 +1,36 @@ # Release process +## All releases + This document simply outlines the release process: -1. Remove `-dev` extension from VERSION +1. Ensure you are running on the oldest supported Erlang version + +2. Remove all `-dev` extension from versions (see below for all files) -2. Ensure CHANGELOG is updated and timestamp +3. Ensure CHANGELOG is updated and add current date -3. Commit changes above with title "Release vVERSION" and generate new tag +4. If a new `vMAJOR.MINOR`, update in [Deprecations](lib/elixir/pages/Deprecations.md) page the link + to [vVERSION]'s changelog (located at the end of the file) by replacing "master" with "vVERSION" -4. Run `make clean test` to ensure all tests pass from scratch and the CI is green +5. Commit changes above with title "Release vVERSION" and generate new tag -5. Push master and tags +6. Run `make clean test` to ensure all tests pass from scratch and the CI is green -6. Release new docs with `make release_docs`, move docs to `docs/stable` +7. Recompile an existing project (for example, Ecto) to ensure manifests can be upgraded -7. Release new zip with `make release_zip`, push new zip to GitHub Releases +8. Push branch and the new tag -8. Merge master into stable branch and push it +9. If a new `vMAJOR.MINOR`, create a new branch "vMAJOR.MINOR" and set `CANONICAL=` in Makefile before building docs -9. After release, bump versions, add `-dev` back and commit +10. Publish new zips with `make zips`, upload `Precompiled.zip` and `Docs.zip` to GitHub Releases -10. `make release_docs` once again and push it to `elixir-lang.org` +11. Add the release to `elixir.csv` and `_data/elixir-versions.yml` files in `elixir-lang/elixir-lang.github.com` -11. Also update `release` file in `elixir-lang.org` +12. After a new `vMAJOR.MINOR`, move back to master, bump versions, start new CHANGELOG, add `-dev` back and commit "Start vMAJOR.MINOR+1" ## Places where version is mentioned * VERSION -* CHANGELOG -* src/elixir.app.src +* CHANGELOG.md +* src/elixir.app.src (not lib/elixir/src/elixir.app.src) diff --git a/VERSION b/VERSION index 49ccc4f4b86..9df4287a1c4 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.14.3-dev \ No newline at end of file +1.5.0-dev \ No newline at end of file diff --git a/bin/elixir b/bin/elixir index d63c1d206fc..42f2a6ad5ee 100755 --- a/bin/elixir +++ b/bin/elixir @@ -2,25 +2,30 @@ if [ $# -eq 0 ] || [ "$1" = "--help" ] || [ "$1" = "-h" ]; then echo "Usage: `basename $0` [options] [.exs file] [data] - -v Prints version and exit - -e \"command\" Evaluates the given command (*) - -r \"file\" Requires the given files/patterns (*) - -S \"script\"   Finds and executes the given script - -pr \"file\" Requires the given files/patterns in parallel (*) - -pa \"path\" Prepends the given path to Erlang code path (*) - -pz \"path\" Appends the given path to Erlang code path (*) - --app \"app\" Start the given app and its dependencies (*) - --erl \"switches\" Switches to be passed down to erlang (*) - --name \"name\" Makes and assigns a name to the distributed node - --sname \"name\" Makes and assigns a short name to the distributed node - --cookie \"cookie\" Sets a cookie for this distributed node - --hidden Makes a hidden node - --detached Starts the Erlang VM detached from console - --no-halt Does not halt the Erlang VM after execution + -e COMMAND Evaluates the given command (*) + -r FILE Requires the given files/patterns (*) + -S SCRIPT   Finds and executes the given script in PATH + -pr FILE Requires the given files/patterns in parallel (*) + -pa PATH Prepends the given path to Erlang code path (*) + -pz PATH Appends the given path to Erlang code path (*) + + --app APP Starts the given app and its dependencies (*) + --cookie COOKIE Sets a cookie for this distributed node + --detached Starts the Erlang VM detached from console + --erl SWITCHES Switches to be passed down to Erlang (*) + --help, -h Prints this message and exits + --hidden Makes a hidden node + --logger-otp-reports BOOL Enables or disables OTP reporting + --logger-sasl-reports BOOL Enables or disables SASL reporting + --name NAME Makes and assigns a name to the distributed node + --no-halt Does not halt the Erlang VM after execution + --sname NAME Makes and assigns a short name to the distributed node + --version, -v Prints Elixir version and exits + --werl Uses Erlang's Windows shell GUI (Windows only) ** Options marked with (*) can be given more than once ** Options given after the .exs file or -- are passed down to the executed code -** Options can be passed to the erlang runtime using ELIXIR_ERL_OPTIONS or --erl" >&2 +** Options can be passed to the Erlang runtime using ELIXIR_ERL_OPTIONS or --erl" >&2 exit 1 fi @@ -35,6 +40,7 @@ readlink_f () { } MODE="elixir" +ERL_EXEC="erl" ERL="" I=1 @@ -66,11 +72,28 @@ while [ $I -le $# ]; do eval "VAL=\${$I}" ERL="$ERL `echo $PEEK | cut -c 2-` "$VAL"" ;; + --logger-otp-reports) + I=$(expr $I + 1) + eval "VAL=\${$I}" + if [ "$VAL" = 'true' ] || [ "$VAL" = 'false' ]; then + ERL="$ERL -logger handle_otp_reports "$VAL"" + fi + ;; + --logger-sasl-reports) + I=$(expr $I + 1) + eval "VAL=\${$I}" + if [ "$VAL" = 'true' ] || [ "$VAL" = 'false' ]; then + ERL="$ERL -logger handle_sasl_reports "$VAL"" + fi + ;; --erl) I=$(expr $I + 1) eval "VAL=\${$I}" ERL="$ERL "$VAL"" ;; + --werl) + USE_WERL=true + ;; *) break ;; @@ -80,13 +103,24 @@ done SELF=$(readlink_f "$0") SCRIPT_PATH=$(dirname "$SELF") -if [ "$MODE" != "iex" ]; then ERL="$ERL -noshell -s elixir start_cli"; fi + +if [ "$OSTYPE" = "cygwin" ]; then SCRIPT_PATH=$(cygpath -m "$SCRIPT_PATH"); fi +if [ "$MODE" != "iex" ]; then ERL="-noshell -s elixir start_cli $ERL"; fi + +# Check for terminal support +if [ "$OS" != "Windows_NT" ]; then + if test -t 1 -a -t 2; then ERL="-elixir ansi_enabled true $ERL"; fi +fi + +if [ "$OS" = "Windows_NT" ] && [ $USE_WERL ]; then + ERL_EXEC="werl" +fi if [ -z "$ERL_PATH" ]; then if [ -f "$SCRIPT_PATH/../releases/RELEASES" ] && [ -f "$SCRIPT_PATH/erl" ]; then - ERL_PATH="$SCRIPT_PATH"/erl + ERL_PATH="$SCRIPT_PATH"/"$ERL_EXEC" else - ERL_PATH=erl + ERL_PATH="$ERL_EXEC" fi fi diff --git a/bin/elixir.bat b/bin/elixir.bat index 3ed6dda1150..89c7d2c47ce 100644 --- a/bin/elixir.bat +++ b/bin/elixir.bat @@ -1,33 +1,40 @@ -@echo off -if "%1"=="" goto documentation -if "%1"=="--help" goto documentation -if "%1"=="-h" goto documentation -if "%1"=="/h" goto documentation +@if defined ELIXIR_CLI_ECHO (@echo on) else (@echo off) +setlocal +if ""%1""=="""" goto documentation +if /I ""%1""==""--help"" goto documentation +if /I ""%1""==""-h"" goto documentation +if /I ""%1""==""/h"" goto documentation +if ""%1""==""/?"" goto documentation goto parseopts :documentation echo Usage: %~nx0 [options] [.exs file] [data] echo. -echo -v Prints version and exit -echo -e command Evaluates the given command (*) -echo -r file Requires the given files/patterns (*) -echo -S script Finds and executes the given script -echo -pr file Requires the given files/patterns in parallel (*) -echo -pa path Prepends the given path to Erlang code path (*) -echo -pz path Appends the given path to Erlang code path (*) -echo --app app Start the given app and its dependencies (*) -echo --erl switches Switches to be passed down to erlang (*) -echo --name name Makes and assigns a name to the distributed node -echo --sname name Makes and assigns a short name to the distributed node -echo --cookie cookie Sets a cookie for this distributed node -echo --hidden Makes a hidden node -echo --detached Starts the Erlang VM detached from console -echo --no-halt Does not halt the Erlang VM after execution +echo -e COMMAND Evaluates the given command (*) +echo -r FILE Requires the given files/patterns (*) +echo -S SCRIPT Finds and executes the given script in PATH +echo -pr FILE Requires the given files/patterns in parallel (*) +echo -pa PATH Prepends the given path to Erlang code path (*) +echo -pz PATH Appends the given path to Erlang code path (*) +echo. +echo --app APP Starts the given app and its dependencies (*) +echo --cookie COOKIE Sets a cookie for this distributed node +echo --detached Starts the Erlang VM detached from console +echo --erl SWITCHES Switches to be passed down to Erlang (*) +echo --help, -h Prints this message and exits +echo --hidden Makes a hidden node +echo --logger-otp-reports BOOL Enables or disables OTP reporting +echo --logger-sasl-reports BOOL Enables or disables SASL reporting +echo --name NAME Makes and assigns a name to the distributed node +echo --no-halt Does not halt the Erlang VM after execution +echo --sname NAME Makes and assigns a short name to the distributed node +echo --version, -v Prints Elixir version and exits +echo --werl Uses Erlang's Windows shell GUI echo. echo ** Options marked with (*) can be given more than once echo ** Options given after the .exs file or -- are passed down to the executed code -echo ** Options can be passed to the erlang runtime using ELIXIR_ERL_OPTIONS or --erl -goto :EOF +echo ** Options can be passed to the Erlang runtime using ELIXIR_ERL_OPTIONS or --erl +goto end :parseopts @@ -46,50 +53,62 @@ set beforeExtra= rem Flag which determines whether or not to use werl vs erl set useWerl=0 +rem Designates which mode / Elixir component to run as +set runMode="elixir" + rem Recursive loop called for each parameter that parses the cmd line parameters :startloop set par="%1" shift if "%par%"=="" ( rem if no parameters defined - goto :expand_erl_libs + goto expand_erl_libs ) if "%par%"=="""" ( rem if no parameters defined - special case for parameter that is already quoted - goto :expand_erl_libs + goto expand_erl_libs ) rem ******* EXECUTION OPTIONS ********************** -IF "%par%"==""+iex"" (Set useWerl=1) -rem ******* ERLANG PARAMETERS ********************** -IF NOT "%par%"=="%par:--detached=%" (Set parsErlang=%parsErlang% -detached) -IF NOT "%par%"=="%par:--hidden=%" (Set parsErlang=%parsErlang% -hidden) -IF NOT "%par%"=="%par:--cookie=%" (Set parsErlang=%parsErlang% -setcookie %1 && shift) -IF NOT "%par%"=="%par:--sname=%" (Set parsErlang=%parsErlang% -sname %1 && shift) -IF NOT "%par%"=="%par:--name=%" (Set parsErlang=%parsErlang% -name %1 && shift) -IF NOT "%par%"=="%par:--erl=%" (Set beforeExtra=%beforeExtra% %~1 && shift) -rem ******* elixir parameters ********************** +if "%par%"==""--werl"" (set useWerl=1) +if "%par%"==""+iex"" (set runMode="iex") +rem ******* ELIXIR PARAMETERS ********************** rem Note: we don't have to do anything with options that don't take an argument -IF NOT "%par%"=="%par:-e=%" (shift) -IF NOT "%par%"=="%par:-r=%" (shift) -IF NOT "%par%"=="%par:-pr=%" (shift) -IF NOT "%par%"=="%par:-pa=%" (shift) -IF NOT "%par%"=="%par:-pz=%" (shift) -IF NOT "%par%"=="%par:--app=%" (shift) -IF NOT "%par%"=="%par:--remsh=%" (shift) +if """"=="%par:-e=%" (shift) +if """"=="%par:-r=%" (shift) +if """"=="%par:-pr=%" (shift) +if """"=="%par:-pa=%" (shift) +if """"=="%par:-pz=%" (shift) +if """"=="%par:--app=%" (shift) +if """"=="%par:--remsh=%" (shift) +rem ******* ERLANG PARAMETERS ********************** +if """"=="%par:--detached=%" (set parsErlang=%parsErlang% -detached) +if """"=="%par:--hidden=%" (set parsErlang=%parsErlang% -hidden) +if """"=="%par:--cookie=%" (set parsErlang=%parsErlang% -setcookie %1 && shift) +if """"=="%par:--sname=%" (set parsErlang=%parsErlang% -sname %1 && shift) +if """"=="%par:--name=%" (set parsErlang=%parsErlang% -name %1 && shift) +if """"=="%par:--logger-otp-reports=%" (set parsErlang=%parsErlang% -logger handle_otp_reports %1 && shift) +if """"=="%par:--logger-sasl-reports=%" (set parsErlang=%parsErlang% -logger handle_sasl_reports %1 && shift) +if """"=="%par:--erl=%" (set beforeExtra=%beforeExtra% %~1 && shift) goto:startloop rem ******* assume all pre-params are parsed ******************** :expand_erl_libs rem ******* expand all ebin paths as Windows does not support the ..\*\ebin wildcard ******************** -SETLOCAL enabledelayedexpansion +setlocal enabledelayedexpansion set ext_libs= for /d %%d in ("%originPath%..\lib\*.") do ( set ext_libs=!ext_libs! -pa "%%~fd\ebin" ) -SETLOCAL disabledelayedexpansion +setlocal disabledelayedexpansion + :run -IF %useWerl% EQU 1 ( - werl.exe %ext_libs% %ELIXIR_ERL_OPTIONS% %parsErlang% -s elixir start_cli %beforeExtra% -extra %* -) ELSE ( - erl.exe %ext_libs% -noshell %ELIXIR_ERL_OPTIONS% %parsErlang% -s elixir start_cli %beforeExtra% -extra %* +if not %runMode% == "iex" ( + set beforeExtra=-noshell -s elixir start_cli %beforeExtra% +) +if %useWerl% equ 1 ( + start werl.exe %ext_libs% %ELIXIR_ERL_OPTIONS% %parsErlang% %beforeExtra% -extra %* +) else ( + erl.exe %ext_libs% %ELIXIR_ERL_OPTIONS% %parsErlang% %beforeExtra% -extra %* ) +:end +endlocal diff --git a/bin/elixirc b/bin/elixirc index 109eca106cc..d677f6ef944 100755 --- a/bin/elixirc +++ b/bin/elixirc @@ -2,16 +2,19 @@ if [ $# -eq 0 ] || [ "$1" = "--help" ] || [ "$1" = "-h" ]; then echo "Usage: `basename $0` [elixir switches] [compiler switches] [.ex files] - -o The directory to output compiled files - --no-docs Do not attach documentation to compiled modules - --no-debug-info Do not attach debug info to compiled modules - --ignore-module-conflict - --warnings-as-errors Treat warnings as errors and return non-zero exit code - --verbose Print informational messages. + -o The directory to output compiled files + + --help, -h Prints this message and exits + --ignore-module-conflict Does not emit warnings if a module was previously defined + --no-debug-info Does not attach debug info to compiled modules + --no-docs Does not attach documentation to compiled modules + --verbose Prints compilation status + --version, -v Prints Elixir version and exits + --warnings-as-errors Treats warnings as errors and return non-zero exit code ** Options given after -- are passed down to the executed code -** Options can be passed to the erlang runtime using ELIXIR_ERL_OPTIONS -** Options can be passed to the erlang compiler using ERL_COMPILER_OPTIONS" >&2 +** Options can be passed to the Erlang runtime using ELIXIR_ERL_OPTIONS +** Options can be passed to the Erlang compiler using ERL_COMPILER_OPTIONS" >&2 exit 1 fi diff --git a/bin/elixirc.bat b/bin/elixirc.bat index 9d118975d25..1f1098ff326 100644 --- a/bin/elixirc.bat +++ b/bin/elixirc.bat @@ -1,10 +1,12 @@ -@echo off +@if defined ELIXIR_CLI_ECHO (@echo on) else (@echo off) +setlocal set argc=0 for %%A in (%*) do ( - if "%%A"=="--help" goto documentation - if "%%A"=="-h" goto documentation - if "%%A"=="/h" goto documentation - set /A argc+=1 + if /I "%%A"=="--help" goto documentation + if /I "%%A"=="-h" goto documentation + if /I "%%A"=="/h" goto documentation + if "%%A"=="/?" goto documentation + set /A argc+=1 ) if %argc%==0 goto documentation goto run @@ -12,15 +14,23 @@ goto run :documentation echo Usage: %~nx0 [elixir switches] [compiler switches] [.ex files] echo. -echo -o The directory to output compiled files -echo --no-docs Do not attach documentation to compiled modules -echo --no-debug-info Do not attach debug info to compiled modules -echo --ignore-module-conflict -echo --warnings-as-errors Treat warnings as errors and return non-zero exit code -echo --verbose Print informational messages. +echo -o The directory to output compiled files +echo. +echo --help, -h Prints this message and exits +echo --ignore-module-conflict Does not emit warnings if a module was previously defined +echo --no-debug-info Does not attach debug info to compiled modules +echo --no-docs Does not attach documentation to compiled modules +echo --verbose Prints compilation status +echo --version, -v Prints Elixir version and exits +echo --warnings-as-errors Treats warnings as errors and returns non-zero exit code echo. echo ** Options given after -- are passed down to the executed code -echo ** Options can be passed to the erlang runtime using ELIXIR_ERL_OPTIONS -echo ** Options can be passed to the erlang compiler using ERL_COMPILER_OPTIONS >&2 +echo ** Options can be passed to the Erlang runtime using ELIXIR_ERL_OPTIONS +echo ** Options can be passed to the Erlang compiler using ERL_COMPILER_OPTIONS +goto end + :run call "%~dp0\elixir.bat" +elixirc %* + +:end +endlocal diff --git a/bin/iex b/bin/iex index e5187f2b08d..700d7bc240a 100755 --- a/bin/iex +++ b/bin/iex @@ -2,23 +2,30 @@ if [ $# -gt 0 ] && ([ "$1" = "--help" ] || [ "$1" = "-h" ]); then echo "Usage: `basename $0` [options] [.exs file] [data] - -v Prints version - -e \"command\" Evaluates the given command (*) - -r \"file\" Requires the given files/patterns (*) - -S \"script\"   Finds and executes the given script - -pr \"file\" Requires the given files/patterns in parallel (*) - -pa \"path\" Prepends the given path to Erlang code path (*) - -pz \"path\" Appends the given path to Erlang code path (*) - --app \"app\" Start the given app and its dependencies (*) - --erl \"switches\" Switches to be passed down to erlang (*) - --name \"name\" Makes and assigns a name to the distributed node - --sname \"name\" Makes and assigns a short name to the distributed node - --cookie \"cookie\" Sets a cookie for this distributed node - --hidden Makes a hidden node - --detached Starts the Erlang VM detached from console - --remsh \"name\" Connects to a node using a remote shell - --dot-iex \"path\" Overrides default .iex.exs file and uses path instead; - path can be empty, then no file will be loaded + -e COMMAND Evaluates the given command (*) + -r FILE Requires the given files/patterns (*) + -S SCRIPT   Finds and executes the given script in PATH + -pr FILE Requires the given files/patterns in parallel (*) + -pa PATH Prepends the given path to Erlang code path (*) + -pz PATH Appends the given path to Erlang code path (*) + + --app APP Starts the given app and its dependencies (*) + --cookie COOKIE Sets a cookie for this distributed node + --detached Starts the Erlang VM detached from console + --erl SWITCHES Switches to be passed down to Erlang (*) + --help, -h Prints this message and exits + --hidden Makes a hidden node + --logger-otp-reports BOOL Enables or disables OTP reporting + --logger-sasl-reports BOOL Enables or disables SASL reporting + --name NAME Makes and assigns a name to the distributed node + --no-halt Does not halt the Erlang VM after execution + --sname NAME Makes and assigns a short name to the distributed node + --version, -v Prints IEx version and exits + --werl Uses Erlang's Windows shell GUI (Windows only) + + --dot-iex PATH Overrides default .iex.exs file and uses path instead; + path can be empty, then no file will be loaded + --remsh NAME Connects to a node using a remote shell ** Options marked with (*) can be given more than once ** Options given after the .exs file or -- are passed down to the executed code @@ -38,4 +45,4 @@ readlink_f () { SELF=$(readlink_f "$0") SCRIPT_PATH=$(dirname "$SELF") -exec "$SCRIPT_PATH"/elixir --no-halt --erl "-user Elixir.IEx.CLI" +iex "$@" +exec "$SCRIPT_PATH"/elixir --no-halt --erl "-noshell -user Elixir.IEx.CLI" +iex "$@" diff --git a/bin/iex.bat b/bin/iex.bat index 717714c3605..5ee7722afb0 100644 --- a/bin/iex.bat +++ b/bin/iex.bat @@ -1,2 +1,46 @@ -@echo off -call "%~dp0\elixir.bat" +iex --erl "-user Elixir.IEx.CLI" --no-halt %* +@if defined ELIXIR_CLI_ECHO (@echo on) else (@echo off) +setlocal +if /I ""%1""==""--help"" goto documentation +if /I ""%1""==""-h"" goto documentation +if /I ""%1""==""/h"" goto documentation +if ""%1""==""/?"" goto documentation +goto run + +:documentation +echo Usage: %~nx0 [options] [.exs file] [data] +echo. +echo -e COMMAND Evaluates the given command (*) +echo -r FILE Requires the given files/patterns (*) +echo -S SCRIPT Finds and executes the given script in PATH +echo -pr FILE Requires the given files/patterns in parallel (*) +echo -pa PATH Prepends the given path to Erlang code path (*) +echo -pz PATH Appends the given path to Erlang code path (*) +echo. +echo --app APP Starts the given app and its dependencies (*) +echo --cookie COOKIE Sets a cookie for this distributed node +echo --detached Starts the Erlang VM detached from console +echo --erl SWITCHES Switches to be passed down to Erlang (*) +echo --help, -h Prints this message and exits +echo --hidden Makes a hidden node +echo --logger-otp-reports BOOL Enables or disables OTP reporting +echo --logger-sasl-reports BOOL Enables or disables SASL reporting +echo --name NAME Makes and assigns a name to the distributed node +echo --no-halt Does not halt the Erlang VM after execution +echo --sname NAME Makes and assigns a short name to the distributed node +echo --version, -v Prints IEx version and exits +echo --werl Uses Erlang's Windows shell GUI (Windows only) +echo. +echo --dot-iex PATH Overrides default .iex.exs file and uses path instead; +echo path can be empty, then no file will be loaded +echo --remsh NAME Connects to a node using a remote shell +echo. +echo ** Options marked with (*) can be given more than once +echo ** Options given after the .exs file or -- are passed down to the executed code +echo ** Options can be passed to the Erlang VM using ELIXIR_ERL_OPTIONS or --erl +goto end + +:run +@if defined IEX_WITH_WERL (@set __ELIXIR_IEX_FLAGS=--werl) else (set __ELIXIR_IEX_FLAGS=) +call "%~dp0\elixir.bat" --no-halt --erl "-noshell -user Elixir.IEx.CLI" +iex %__ELIXIR_IEX_FLAGS% %* +:end +endlocal diff --git a/bin/mix.bat b/bin/mix.bat index 435a5257340..7e4a7a65386 100644 --- a/bin/mix.bat +++ b/bin/mix.bat @@ -1,2 +1,2 @@ -@echo off +@if defined ELIXIR_CLI_ECHO (@echo on) else (@echo off) call "%~dp0\elixir.bat" "%~dp0\mix" %* diff --git a/bin/mix.ps1 b/bin/mix.ps1 index 9a4a36005cf..369c4942113 100644 --- a/bin/mix.ps1 +++ b/bin/mix.ps1 @@ -1,27 +1,23 @@ -# Initialize with path to mix.bat relative to caller's working directory -$toCmd = '' + (Resolve-Path -relative (Split-Path $MyInvocation.MyCommand.Path)) + '\mix.bat' +# Store path to mix.bat as a FileInfo object +$mixBatPath = (Get-ChildItem (((Get-ChildItem $MyInvocation.MyCommand.Path).Directory.FullName) + '\mix.bat')) +$newArgs = @() -foreach ($arg in $args) +for ($i = 0; $i -lt $args.length; $i++) { - $toCmd += ' ' - - if ($arg -is [array]) + if ($args[$i] -is [array]) { # Commas created the array so we need to reintroduce those commas - for ($i = 0; $i -lt $arg.length; $i++) + for ($j = 0; $j -lt $args[$i].length - 1; $j++) { - $toCmd += $arg[$i] - if ($i -ne ($arg.length - 1)) - { - $toCmd += ', ' - } + $newArgs += ($args[$i][$j] + ',') } + $newArgs += $args[$i][-1] } else { - $toCmd += $arg + $newArgs += $args[$i] } } # Corrected arguments are ready to pass to batch file -cmd /c $toCmd \ No newline at end of file +& $mixBatPath $newArgs diff --git a/lib/eex/lib/eex.ex b/lib/eex/lib/eex.ex index fd0095db04b..254344cd33d 100644 --- a/lib/eex/lib/eex.ex +++ b/lib/eex/lib/eex.ex @@ -1,11 +1,15 @@ defmodule EEx.SyntaxError do - defexception [:message] + defexception [:message, :file, :line] + + def message(exception) do + "#{exception.file}:#{exception.line}: #{exception.message}" + end end defmodule EEx do @moduledoc ~S""" EEx stands for Embedded Elixir. It allows you to embed - Elixir code inside a string in a robust way: + Elixir code inside a string in a robust way. iex> EEx.eval_string "foo <%= bar %>", [bar: "baz"] "foo baz" @@ -31,14 +35,14 @@ defmodule EEx do ## Options - All functions in this module accepts EEx-related options. + All functions in this module accept EEx-related options. They are: - * `:line` - the line to be used as the template start. Defaults to 1. - * `:file` - the file to be used in the template. Defaults to the given - file the template is read from or to "nofile" when compiling - from a string. + * `:line` - the line to be used as the template start. Defaults to 1. + * `:file` - the file to be used in the template. Defaults to the given + file the template is read from or to "nofile" when compiling from a string. * `:engine` - the EEx engine to be used for compilation. + * `:trim` - trims whitespace left/right of quotation tags ## Engine @@ -59,9 +63,9 @@ defmodule EEx do All expressions that output something to the template **must** use the equals sign (`=`). Since everything in - Elixir is a macro, there are no exceptions for this rule. - For example, while some template languages would special- - case `if` clauses, they are treated the same in EEx and + Elixir is an expression, there are no exceptions for this rule. + For example, while some template languages would special-case + `if/2` clauses, they are treated the same in EEx and also require `=` in order to have their result printed: <%= if true do %> @@ -82,11 +86,11 @@ defmodule EEx do iex> EEx.eval_string "<%= @foo %>", assigns: [foo: 1] "1" - In other words, `<%= @foo %>` is simply translated to: + In other words, `<%= @foo %>` translates to: - <%= Dict.get assigns, :foo %> + <%= {:ok, v} = Access.fetch(assigns, :foo); v %> - The assigns extension is useful when the number of variables + The `assigns` extension is useful when the number of variables required by the template is not specified at compilation time. """ @@ -107,7 +111,7 @@ defmodule EEx do """ defmacro function_from_string(kind, name, source, args \\ [], options \\ []) do - quote bind_quoted: binding do + quote bind_quoted: binding() do info = Keyword.merge [file: __ENV__.file, line: __ENV__.line], options args = Enum.map args, fn arg -> {arg, [line: info[:line]], nil} end compiled = EEx.compile_string(source, info) @@ -144,7 +148,7 @@ defmodule EEx do """ defmacro function_from_file(kind, name, file, args \\ [], options \\ []) do - quote bind_quoted: binding do + quote bind_quoted: binding() do info = Keyword.merge options, [file: file, line: 1] args = Enum.map args, fn arg -> {arg, [line: 1], nil} end compiled = EEx.compile_file(file, info) @@ -159,24 +163,26 @@ defmodule EEx do end @doc """ - Get a string `source` and generate a quoted expression + Gets a string `source` and generate a quoted expression that can be evaluated by Elixir or compiled to a function. """ - def compile_string(source, options \\ []) do + @spec compile_string(String.t, Keyword.t) :: Macro.t | no_return + def compile_string(source, options \\ []) when is_binary(source) and is_list(options) do EEx.Compiler.compile(source, options) end @doc """ - Get a `filename` and generate a quoted expression + Gets a `filename` and generate a quoted expression that can be evaluated by Elixir or compiled to a function. """ - def compile_file(filename, options \\ []) do + @spec compile_file(String.t, Keyword.t) :: Macro.t | no_return + def compile_file(filename, options \\ []) when is_binary(filename) and is_list(options) do options = Keyword.merge options, [file: filename, line: 1] compile_string(File.read!(filename), options) end @doc """ - Get a string `source` and evaluate the values using the `bindings`. + Gets a string `source` and evaluate the values using the `bindings`. ## Examples @@ -184,24 +190,28 @@ defmodule EEx do "foo baz" """ - def eval_string(source, bindings \\ [], options \\ []) do + @spec eval_string(String.t, Keyword.t, Keyword.t) :: any + def eval_string(source, bindings \\ [], options \\ []) + when is_binary(source) and is_list(bindings) and is_list(options) do compiled = compile_string(source, options) do_eval(compiled, bindings, options) end @doc """ - Get a `filename` and evaluate the values using the `bindings`. + Gets a `filename` and evaluate the values using the `bindings`. ## Examples - # sample.ex + # sample.eex foo <%= bar %> # iex - EEx.eval_file "sample.ex", [bar: "baz"] #=> "foo baz" + EEx.eval_file "sample.eex", [bar: "baz"] #=> "foo baz" """ - def eval_file(filename, bindings \\ [], options \\ []) do + @spec eval_file(String.t, Keyword.t, Keyword.t) :: any + def eval_file(filename, bindings \\ [], options \\ []) + when is_binary(filename) and is_list(bindings) and is_list(options) do options = Keyword.put options, :file, filename compiled = compile_file(filename, options) do_eval(compiled, bindings, options) diff --git a/lib/eex/lib/eex/compiler.ex b/lib/eex/lib/eex/compiler.ex index c34ef566c60..ce64325517f 100644 --- a/lib/eex/lib/eex/compiler.ex +++ b/lib/eex/lib/eex/compiler.ex @@ -9,58 +9,84 @@ defmodule EEx.Compiler do and the engine together by handling the tokens and invoking the engine every time a full expression or text is received. """ - def compile(source, opts) do - file = opts[:file] || "nofile" - line = opts[:line] || 1 - tokens = EEx.Tokenizer.tokenize(source, line) - state = %{engine: opts[:engine] || @default_engine, - file: file, line: line, quoted: [], start_line: nil} - generate_buffer(tokens, "", [], state) + @spec compile(String.t, Keyword.t) :: Macro.t | no_return + def compile(source, opts) when is_binary(source) and is_list(opts) do + file = opts[:file] || "nofile" + line = opts[:line] || 1 + trim = opts[:trim] || false + case EEx.Tokenizer.tokenize(source, line, trim: trim) do + {:ok, tokens} -> + state = %{engine: opts[:engine] || @default_engine, init: nil, + file: file, line: line, quoted: [], start_line: nil} + init = state.engine.init(opts) + generate_buffer(tokens, init, [], %{state | init: init}) + {:error, line, message} -> + raise EEx.SyntaxError, line: line, file: file, message: message + end end - # Generates the buffers by handling each expression from the tokenizer + # Generates the buffers by handling each expression from the tokenizer. + # It returns Macro.t/0 or it raises. - defp generate_buffer([{:text, chars}|t], buffer, scope, state) do + defp generate_buffer([{:text, chars} | rest], buffer, scope, state) do buffer = state.engine.handle_text(buffer, IO.chardata_to_string(chars)) - generate_buffer(t, buffer, scope, state) + generate_buffer(rest, buffer, scope, state) end - defp generate_buffer([{:expr, line, mark, chars}|t], buffer, scope, state) do + defp generate_buffer([{:expr, line, mark, chars} | rest], buffer, scope, state) do expr = Code.string_to_quoted!(chars, [line: line, file: state.file]) - buffer = state.engine.handle_expr(buffer, mark, expr) - generate_buffer(t, buffer, scope, state) + buffer = state.engine.handle_expr(buffer, IO.chardata_to_string(mark), expr) + generate_buffer(rest, buffer, scope, state) end - defp generate_buffer([{:start_expr, start_line, mark, chars}|t], buffer, scope, state) do - {contents, line, t} = look_ahead_text(t, start_line, chars) - {contents, t} = generate_buffer(t, "", [contents|scope], - %{state | quoted: [], line: line, start_line: start_line}) - buffer = state.engine.handle_expr(buffer, mark, contents) - generate_buffer(t, buffer, scope, state) + defp generate_buffer([{:start_expr, start_line, mark, chars} | rest], buffer, scope, state) do + {contents, line, rest} = look_ahead_text(rest, start_line, chars) + {contents, rest} = + generate_buffer(rest, state.init, [contents | scope], + %{state | quoted: [], line: line, start_line: start_line}) + buffer = state.engine.handle_expr(buffer, IO.chardata_to_string(mark), contents) + generate_buffer(rest, buffer, scope, state) end - defp generate_buffer([{:middle_expr, line, _, chars}|t], buffer, [current|scope], state) do + defp generate_buffer([{:middle_expr, line, '', chars} | rest], buffer, [current | scope], state) do {wrapped, state} = wrap_expr(current, line, buffer, chars, state) - generate_buffer(t, "", [wrapped|scope], %{state | line: line}) + generate_buffer(rest, state.init, [wrapped | scope], %{state | line: line}) + end + + defp generate_buffer([{:middle_expr, line, modifier, chars} | t], buffer, scope, state) do + message = "unexpected beginning of EEx tag \"<%#{modifier}\" on \"<%#{modifier}#{chars}%>\", " <> + "please remove \"#{modifier}\" accordingly" + :elixir_errors.warn line, state.file, message + generate_buffer([{:middle_expr, line, '', chars} | t], buffer, scope, state) + # TODO: Make this an error on Elixir v2.0 since it accidentally worked previously. + # raise EEx.SyntaxError, message: message, file: state.file, line: line end - defp generate_buffer([{:end_expr, line, _, chars}|t], buffer, [current|_], state) do + defp generate_buffer([{:end_expr, line, '', chars} | rest], buffer, [current | _], state) do {wrapped, state} = wrap_expr(current, line, buffer, chars, state) tuples = Code.string_to_quoted!(wrapped, [line: state.start_line, file: state.file]) buffer = insert_quoted(tuples, state.quoted) - {buffer, t} + {buffer, rest} end - defp generate_buffer([{:end_expr, line, _, chars}|_], _buffer, [], _state) do - raise EEx.SyntaxError, message: "unexpected token: #{inspect chars} at line #{inspect line}" + defp generate_buffer([{:end_expr, line, modifier, chars} | _], _buffer, [_ | _], state) do + message = "unexpected beginning of EEx tag \"<%#{modifier}\" on end of expression \"<%#{modifier}#{chars}%>\", " <> + "please remove \"#{modifier}\" accordingly" + raise EEx.SyntaxError, message: message, file: state.file, line: line + end + + defp generate_buffer([{:end_expr, line, _, chars} | _], _buffer, [], state) do + raise EEx.SyntaxError, message: "unexpected end of expression <%#{chars}%>", + file: state.file, line: line end defp generate_buffer([], buffer, [], state) do state.engine.handle_body(buffer) end - defp generate_buffer([], _buffer, _scope, _state) do - raise EEx.SyntaxError, message: "unexpected end of string. expecting a closing <% end %>." + defp generate_buffer([], _buffer, _scope, state) do + raise EEx.SyntaxError, message: "unexpected end of string, expected a closing '<% end %>'", + file: state.file, line: state.line end # Creates a placeholder and wrap it inside the expression block @@ -68,23 +94,25 @@ defmodule EEx.Compiler do defp wrap_expr(current, line, buffer, chars, state) do new_lines = List.duplicate(?\n, line - state.line) key = length(state.quoted) - placeholder = '__EEX__(' ++ Integer.to_char_list(key) ++ ');' + placeholder = '__EEX__(' ++ Integer.to_charlist(key) ++ ');' {current ++ placeholder ++ new_lines ++ chars, - %{state | quoted: [{key, buffer}|state.quoted]}} + %{state | quoted: [{key, buffer} | state.quoted]}} end # Look text ahead on expressions - defp look_ahead_text([{:text, text}, {:middle_expr, line, _, chars}|t]=list, start, contents) do + defp look_ahead_text([{:text, text}, {:middle_expr, line, _, chars} | rest] = tokens, start, contents) do if only_spaces?(text) do - {contents ++ text ++ chars, line, t} + {contents ++ text ++ chars, line, rest} else - {contents, start, list} + {contents, start, tokens} end end - - defp look_ahead_text(t, start, contents) do - {contents, start, t} + defp look_ahead_text([{:middle_expr, line, _, chars} | rest], _start, contents) do + {contents ++ chars, line, rest} + end + defp look_ahead_text(tokens, start, contents) do + {contents, start, tokens} end defp only_spaces?(chars) do diff --git a/lib/eex/lib/eex/engine.ex b/lib/eex/lib/eex/engine.ex index 257f2f46d18..736ea2b2410 100644 --- a/lib/eex/lib/eex/engine.ex +++ b/lib/eex/lib/eex/engine.ex @@ -2,7 +2,9 @@ defmodule EEx.Engine do @moduledoc ~S""" Basic EEx engine that ships with Elixir. - An engine needs to implement three functions: + An engine needs to implement four functions: + + * `init(opts)` - returns the initial buffer * `handle_body(quoted)` - receives the final built quoted expression, should do final post-processing and return a @@ -25,38 +27,44 @@ defmodule EEx.Engine do default implementations for the functions above. """ - use Behaviour - - defcallback handle_body(Macro.t) :: Macro.t - defcallback handle_text(Macro.t, binary) :: Macro.t - defcallback handle_expr(Macro.t, binary, Macro.t) :: Macro.t + @callback init(opts :: Keyword.t) :: Macro.t + @callback handle_body(quoted :: Macro.t) :: Macro.t + @callback handle_text(buffer :: Macro.t, text :: String.t) :: Macro.t + @callback handle_expr(buffer :: Macro.t, marker :: String.t, expr :: Macro.t) :: Macro.t @doc false defmacro __using__(_) do quote do @behaviour EEx.Engine - def handle_body(body) do - EEx.Engine.handle_body(body) + def init(opts) do + EEx.Engine.init(opts) + end + + def handle_body(quoted) do + EEx.Engine.handle_body(quoted) end def handle_text(buffer, text) do EEx.Engine.handle_text(buffer, text) end - def handle_expr(buffer, mark, expr) do - EEx.Engine.handle_expr(buffer, mark, expr) + def handle_expr(buffer, marker, expr) do + EEx.Engine.handle_expr(buffer, marker, expr) end - defoverridable [handle_body: 1, handle_expr: 3, handle_text: 2] + defoverridable EEx.Engine end end @doc """ Handles assigns in quoted expressions. + A warning will be printed on missing assigns. + Future versions will raise. + This can be added to any custom engine by invoking - `handle_assign/3` with `Macro.prewalk/1`: + `handle_assign/1` with `Macro.prewalk/2`: def handle_expr(buffer, token, expr) do expr = Macro.prewalk(expr, &EEx.Engine.handle_assign/1) @@ -64,17 +72,40 @@ defmodule EEx.Engine do end """ - def handle_assign({:@, line, [{name, _, atom}]}) when is_atom(name) and is_atom(atom) do - quote line: line, do: Dict.get(var!(assigns), unquote(name)) + @spec handle_assign(Macro.t) :: Macro.t + def handle_assign({:@, meta, [{name, _, atom}]}) when is_atom(name) and is_atom(atom) do + line = meta[:line] || 0 + quote line: line, do: EEx.Engine.fetch_assign!(var!(assigns), unquote(name)) end - def handle_assign(arg) do arg end + @doc false + # TODO: Raise on 2.0 + @spec fetch_assign!(Access.t, Access.key) :: term | nil + def fetch_assign!(assigns, key) do + case Access.fetch(assigns, key) do + {:ok, val} -> + val + :error -> + keys = Enum.map(assigns, &elem(&1, 0)) + IO.warn "assign @#{key} not available in EEx template. " <> + "Please ensure all assigns are given as options. " <> + "Available assigns: #{inspect keys}" + nil + end + end + + @doc """ + Returns an empty string as initial buffer. + """ + def init(_opts) do + "" + end + @doc """ - The default implementation implementation simply returns the - given expression. + The default implementation simply returns the given expression. """ def handle_body(quoted) do quoted @@ -97,16 +128,16 @@ defmodule EEx.Engine do """ def handle_expr(buffer, "=", expr) do quote do - tmp = unquote(buffer) - tmp <> to_string(unquote(expr)) + tmp1 = unquote(buffer) + tmp1 <> String.Chars.to_string(unquote(expr)) end end def handle_expr(buffer, "", expr) do quote do - tmp = unquote(buffer) + tmp2 = unquote(buffer) unquote(expr) - tmp + tmp2 end end end diff --git a/lib/eex/lib/eex/smart_engine.ex b/lib/eex/lib/eex/smart_engine.ex index c59db81596f..6b5c398d15f 100644 --- a/lib/eex/lib/eex/smart_engine.ex +++ b/lib/eex/lib/eex/smart_engine.ex @@ -1,63 +1,3 @@ -defmodule EEx.TransformerEngine do - @moduledoc false - - @doc false - defmacro __using__(_) do - quote do - @behaviour EEx.Engine - - def handle_body(body) do - EEx.Engine.handle_body(body) - end - - def handle_text(buffer, text) do - EEx.Engine.handle_text(buffer, text) - end - - def handle_expr(buffer, mark, expr) do - EEx.Engine.handle_expr(buffer, mark, transform(expr)) - end - - defp transform({a, b, c}) do - {transform(a), b, transform(c)} - end - - defp transform({a, b}) do - {transform(a), transform(b)} - end - - defp transform(list) when is_list(list) do - for i <- list, do: transform(i) - end - - defp transform(other) do - other - end - - defoverridable [transform: 1, handle_body: 1, handle_expr: 3, handle_text: 2] - end - end -end - -defmodule EEx.AssignsEngine do - @moduledoc false - - @doc false - defmacro __using__(_) do - quote unquote: false do - defp transform({:@, line, [{name, _, atom}]}) when is_atom(name) and is_atom(atom) do - quote do: Dict.get(var!(assigns), unquote(name)) - end - - defp transform(arg) do - super(arg) - end - - defoverridable [transform: 1] - end - end -end - defmodule EEx.SmartEngine do @moduledoc """ The default engine used by EEx. @@ -71,9 +11,9 @@ defmodule EEx.SmartEngine do "1" In the example above, we can access the value `foo` under - the binding `assigns` using `@foo`. This is useful when - a template, after compiled, may receive different assigns - and the developer don't want to recompile it for each + the binding `assigns` using `@foo`. This is useful because + a template, after being compiled, can receive different + assigns and would not require recompilation for each variable set. Assigns can also be used when compiled to a function: diff --git a/lib/eex/lib/eex/tokenizer.ex b/lib/eex/lib/eex/tokenizer.ex index 44d5b681eed..b626bfa6eea 100644 --- a/lib/eex/lib/eex/tokenizer.ex +++ b/lib/eex/lib/eex/tokenizer.ex @@ -1,97 +1,116 @@ defmodule EEx.Tokenizer do @moduledoc false + @type content :: IO.chardata + @type line :: non_neg_integer + @type token :: {:text, content} | + {:expr | :start_expr | :middle_expr | :end_expr, line, '=' | '', content} + @doc """ - Tokenizes the given char list or binary. - It returns 4 different types of tokens as result: + Tokenizes the given charlist or binary. + + It returns {:ok, list} with the following tokens: - * `{:text, contents}` - * `{:expr, line, marker, contents}` - * `{:start_expr, line, marker, contents}` - * `{:middle_expr, line, marker, contents}` - * `{:end_expr, line, marker, contents}` + * `{:text, content}` + * `{:expr, line, marker, content}` + * `{:start_expr, line, marker, content}` + * `{:middle_expr, line, marker, content}` + * `{:end_expr, line, marker, content}` + Or `{:error, line, error}` in case of errors. """ - def tokenize(bin, line) when is_binary(bin) do - tokenize(String.to_char_list(bin), line) + @spec tokenize(binary | charlist, line, Keyword.t) :: {:ok, [token]} | {:error, line, String.t} + def tokenize(bin, line, opts \\ []) + + def tokenize(bin, line, opts) + when is_binary(bin) and is_integer(line) and line >= 0 and is_list(opts) do + tokenize(String.to_charlist(bin), line, opts) end - def tokenize(list, line) do - Enum.reverse(tokenize(list, line, [], [])) + def tokenize(list, line, opts) + when is_list(list) and is_integer(line) and line >= 0 and is_list(opts) do + tokenize(list, line, opts, [], []) end - defp tokenize('<%%' ++ t, line, buffer, acc) do - {buffer, new_line, rest} = tokenize_expr t, line, [?%, ?<|buffer] - tokenize rest, new_line, [?>, ?%|buffer], acc + defp tokenize('<%%' ++ t, line, opts, buffer, acc) do + tokenize t, line, opts, [?%, ?< | buffer], acc end - defp tokenize('<%#' ++ t, line, buffer, acc) do - {_, new_line, rest} = tokenize_expr t, line, [] - tokenize rest, new_line, buffer, acc + defp tokenize('<%#' ++ t, line, opts, buffer, acc) do + case expr(t, line, []) do + {:error, _, _} = error -> error + {:ok, _, new_line, rest} -> + {rest, new_line, buffer} = trim_if_needed(rest, new_line, opts, buffer, acc) + tokenize rest, new_line, opts, buffer, acc + end end - defp tokenize('<%' ++ t, line, buffer, acc) do + defp tokenize('<%' ++ t, line, opts, buffer, acc) do {marker, t} = retrieve_marker(t) - {expr, new_line, rest} = tokenize_expr t, line, [] - token = token_name(expr) - acc = tokenize_text(buffer, acc) - final = {token, line, marker, Enum.reverse(expr)} - tokenize rest, new_line, [], [final | acc] + case expr(t, line, []) do + {:error, _, _} = error -> error + {:ok, expr, new_line, rest} -> + token = token_name(expr) + {rest, new_line, buffer} = trim_if_needed(rest, new_line, opts, buffer, acc) + acc = tokenize_text(buffer, acc) + final = {token, line, marker, Enum.reverse(expr)} + tokenize rest, new_line, opts, [], [final | acc] + end end - defp tokenize('\n' ++ t, line, buffer, acc) do - tokenize t, line + 1, [?\n|buffer], acc + defp tokenize('\n' ++ t, line, opts, buffer, acc) do + tokenize t, line + 1, opts, [?\n | buffer], acc end - defp tokenize([h|t], line, buffer, acc) do - tokenize t, line, [h|buffer], acc + defp tokenize([h | t], line, opts, buffer, acc) do + tokenize t, line, opts, [h | buffer], acc end - defp tokenize([], _line, buffer, acc) do - tokenize_text(buffer, acc) + defp tokenize([], _line, _opts, buffer, acc) do + {:ok, Enum.reverse(tokenize_text(buffer, acc))} end # Retrieve marker for <% defp retrieve_marker('=' ++ t) do - {"=", t} + {'=', t} end defp retrieve_marker(t) do - {"", t} + {'', t} end # Tokenize an expression until we find %> - defp tokenize_expr([?%, ?>|t], line, buffer) do - {buffer, line, t} + defp expr([?%, ?> | t], line, buffer) do + {:ok, buffer, line, t} end - defp tokenize_expr('\n' ++ t, line, buffer) do - tokenize_expr t, line + 1, [?\n|buffer] + defp expr('\n' ++ t, line, buffer) do + expr t, line + 1, [?\n | buffer] end - defp tokenize_expr([h|t], line, buffer) do - tokenize_expr t, line, [h|buffer] + defp expr([h | t], line, buffer) do + expr t, line, [h | buffer] end - defp tokenize_expr([], _line, _buffer) do - raise EEx.SyntaxError, message: "missing token: %>" + defp expr([], line, _buffer) do + {:error, line, "missing token '%>'"} end # Receive an expression content and check # if it is a start, middle or an end token. # - # Start tokens finish with `do` and `fn ->` - # Middle tokens are marked with `->` or keywords - # End tokens contain only the end word + # Start tokens finish with "do" and "fn ->" + # Middle tokens are marked with "->" or keywords + # End tokens contain only the end word and optionally ")" - defp token_name([h|t]) when h in [?\s, ?\t] do + defp token_name([h | t]) when h in [?\s, ?\t, ?)] do token_name(t) end - defp token_name('od' ++ [h|_]) when h in [?\s, ?\t, ?)] do + defp token_name('od' ++ [h | _]) when h in [?\s, ?\t, ?)] do :start_expr end @@ -104,7 +123,7 @@ defmodule EEx.Tokenizer do # token and, if so, it is not followed by an "end" # token. If this is the case, we are on a start expr. case :elixir_tokenizer.tokenize(rest, 1, file: "eex", check_terminators: false) do - {:ok, _line, tokens} -> + {:ok, _line, _column, tokens} -> tokens = Enum.reverse(tokens) fn_index = fn_index(tokens) @@ -132,7 +151,7 @@ defmodule EEx.Tokenizer do Enum.find_index tokens, fn {:fn_paren, _} -> true {:fn, _} -> true - _ -> false + _ -> false end end @@ -158,4 +177,46 @@ defmodule EEx.Tokenizer do defp tokenize_text(buffer, acc) do [{:text, Enum.reverse(buffer)} | acc] end + + # If trim mode is enabled and the token is on a line with + # only itself and whitespace, trim the whitespace around it, + # including the line break following it if there is one. + defp trim_if_needed(rest, line, opts, buffer, acc) do + original = {rest, line, buffer} + if opts[:trim] do + case {trim_left(buffer, acc), trim_right(rest, line)} do + {{true, new_buffer}, {true, new_rest, new_line}} -> + {new_rest, new_line, new_buffer} + _ -> + original + end + else + original + end + end + + defp trim_left(buffer, acc) do + case {trim_whitespace(buffer), acc} do + {[?\n | _] = trimmed_buffer, _} -> {true, trimmed_buffer} + {[], []} -> {true, []} + _ -> {false, buffer} + end + end + + defp trim_right(rest, line) do + case trim_whitespace(rest) do + [?\r, ?\n | trimmed_rest] -> {true, trimmed_rest, line + 1} + [?\n | trimmed_rest] -> {true, trimmed_rest, line + 1} + [] -> {true, [], line} + _ -> {false, rest, line} + end + end + + defp trim_whitespace([h | t]) when h == ?\s or h == ?\t do + trim_whitespace(t) + end + + defp trim_whitespace(list) do + list + end end diff --git a/lib/eex/test/eex/smart_engine_test.exs b/lib/eex/test/eex/smart_engine_test.exs index 1003da8da15..ef3a898ccc9 100644 --- a/lib/eex/test/eex/smart_engine_test.exs +++ b/lib/eex/test/eex/smart_engine_test.exs @@ -1,7 +1,8 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule EEx.SmartEngineTest do - use ExUnit.Case, async: true + # TODO: Make this async: true once capture_io is removed + use ExUnit.Case test "evaluates simple string" do assert_eval "foo bar", "foo bar" @@ -15,12 +16,29 @@ defmodule EEx.SmartEngineTest do assert_eval "1", "<%= @foo %>", assigns: %{foo: 1} end + test "error with missing assigns" do + stderr = ExUnit.CaptureIO.capture_io(:stderr, fn -> + assert_eval "", "<%= @foo %>", assigns: %{} + end) + assert stderr =~ "assign @foo not available in EEx template" + end + test "evaluates with loops" do assert_eval "1\n2\n3\n", "<%= for x <- [1, 2, 3] do %><%= x %>\n<% end %>" end + test "preserves line numbers" do + result = EEx.compile_string("<%= @hello %>", engine: EEx.SmartEngine) + Macro.prewalk(result, fn + {_left, meta, _right} -> + assert Keyword.get(meta, :line, 0) in [0, 1] + _ -> + :ok + end) + end + defp assert_eval(expected, actual, binding \\ []) do - result = EEx.eval_string(actual, binding, file: __ENV__.file) + result = EEx.eval_string(actual, binding, file: __ENV__.file, engine: EEx.SmartEngine) assert result == expected end end diff --git a/lib/eex/test/eex/tokenizer_test.exs b/lib/eex/test/eex/tokenizer_test.exs index d58268b0903..7b5cbf4f1b3 100644 --- a/lib/eex/test/eex/tokenizer_test.exs +++ b/lib/eex/test/eex/tokenizer_test.exs @@ -5,23 +5,26 @@ defmodule EEx.TokenizerTest do require EEx.Tokenizer, as: T test "simple chars lists" do - assert T.tokenize('foo', 1) == [ {:text, 'foo'} ] + assert T.tokenize('foo', 1) == {:ok, [{:text, 'foo'}]} end test "simple strings" do - assert T.tokenize("foo", 1) == [ {:text, 'foo'} ] + assert T.tokenize("foo", 1) == {:ok, [{:text, 'foo'}]} end test "strings with embedded code" do - assert T.tokenize('foo <% bar %>', 1) == [ {:text, 'foo '}, {:expr, 1, "", ' bar '} ] + assert T.tokenize('foo <% bar %>', 1) == + {:ok, [{:text, 'foo '}, {:expr, 1, '', ' bar '}]} end test "strings with embedded equals code" do - assert T.tokenize('foo <%= bar %>', 1) == [ {:text, 'foo '}, {:expr, 1, "=", ' bar '} ] + assert T.tokenize('foo <%= bar %>', 1) == + {:ok, [{:text, 'foo '}, {:expr, 1, '=', ' bar '}]} end test "strings with more than one line" do - assert T.tokenize('foo\n<%= bar %>', 1) == [ {:text, 'foo\n'}, {:expr, 2, "=", ' bar '} ] + assert T.tokenize('foo\n<%= bar %>', 1) == + {:ok, [{:text, 'foo\n'}, {:expr, 2, '=', ' bar '}]} end test "strings with more than one line and expression with more than one line" do @@ -32,74 +35,129 @@ baz %> <% foo %> ''' - assert T.tokenize(string, 1) == [ + assert T.tokenize(string, 1) == {:ok, [ {:text, 'foo '}, - {:expr, 1, "=", ' bar\n\nbaz '}, + {:expr, 1, '=', ' bar\n\nbaz '}, {:text, '\n'}, - {:expr, 4, "", ' foo '}, + {:expr, 4, '', ' foo '}, {:text, '\n'} - ] + ]} end test "quotation" do - assert T.tokenize('foo <%% true %>', 1) == [ + assert T.tokenize('foo <%% true %>', 1) == {:ok, [ {:text, 'foo <% true %>'} - ] + ]} end test "quotation with do/end" do - assert T.tokenize('foo <%% true do %>bar<%% end %>', 1) == [ + assert T.tokenize('foo <%% true do %>bar<%% end %>', 1) == {:ok, [ {:text, 'foo <% true do %>bar<% end %>'} - ] + ]} + end + + test "quotation with interpolation" do + assert T.tokenize('a <%% b <%= c %> <%= d %> e %> f', 1) == {:ok, [ + {:text, 'a <% b '}, + {:expr, 1, '=', ' c '}, + {:text, ' '}, + {:expr, 1, '=', ' d '}, + {:text, ' e %> f'} + ]} + + assert T.tokenize('<%%% a <%%= b %> c %>', 1) == {:ok, [ + {:text, '<%% a <%= b %> c %>'} + ]} end test "comments" do - assert T.tokenize('foo <%# true %>', 1) == [ + assert T.tokenize('foo <%# true %>', 1) == {:ok, [ {:text, 'foo '} - ] + ]} end test "comments with do/end" do - assert T.tokenize('foo <%# true do %>bar<%# end %>', 1) == [ + assert T.tokenize('foo <%# true do %>bar<%# end %>', 1) == {:ok, [ {:text, 'foo bar'} - ] + ]} end test "strings with embedded do end" do - assert T.tokenize('foo <% if true do %>bar<% end %>', 1) == [ + assert T.tokenize('foo <% if true do %>bar<% end %>', 1) == {:ok, [ {:text, 'foo '}, - {:start_expr, 1, "", ' if true do '}, + {:start_expr, 1, '', ' if true do '}, {:text, 'bar'}, - {:end_expr, 1, "", ' end '} - ] + {:end_expr, 1, '', ' end '} + ]} end test "strings with embedded -> end" do - assert T.tokenize('foo <% cond do %><% false -> %>bar<% true -> %>baz<% end %>', 1) == [ + assert T.tokenize('foo <% cond do %><% false -> %>bar<% true -> %>baz<% end %>', 1) == {:ok, [ {:text, 'foo '}, - {:start_expr, 1, "", ' cond do '}, - {:middle_expr, 1, "", ' false -> '}, + {:start_expr, 1, '', ' cond do '}, + {:middle_expr, 1, '', ' false -> '}, {:text, 'bar'}, - {:middle_expr, 1, "", ' true -> '}, + {:middle_expr, 1, '', ' true -> '}, {:text, 'baz'}, - {:end_expr, 1, "", ' end '} - ] + {:end_expr, 1, '', ' end '} + ]} end test "strings with embedded keywords blocks" do - assert T.tokenize('foo <% if true do %>bar<% else %>baz<% end %>', 1) == [ + assert T.tokenize('foo <% if true do %>bar<% else %>baz<% end %>', 1) == {:ok, [ {:text, 'foo '}, - {:start_expr, 1, "", ' if true do '}, + {:start_expr, 1, '', ' if true do '}, {:text, 'bar'}, - {:middle_expr, 1, "", ' else '}, + {:middle_expr, 1, '', ' else '}, {:text, 'baz'}, - {:end_expr, 1, "", ' end '} - ] + {:end_expr, 1, '', ' end '} + ]} + end + + test "trim mode" do + template = '\t<%= if true do %> \n TRUE \n <% else %>\n FALSE \n <% end %> ' + assert T.tokenize(template, 1, trim: true) == {:ok, [ + {:start_expr, 1, '=', ' if true do '}, + {:text, ' TRUE \n'}, + {:middle_expr, 3, '', ' else '}, + {:text, ' FALSE \n'}, + {:end_expr, 5, '', ' end '} + ]} + end + + test "trim mode with comment" do + assert T.tokenize(' <%# comment %> \n123', 1, trim: true) == {:ok, [ + {:text, '123'} + ]} + end + + test "trim mode with CRLF" do + assert T.tokenize('0\r\n <%= 12 %> \r\n34', 1, trim: true) == {:ok, [ + {:text, '0\r\n'}, + {:expr, 2, '=', ' 12 '}, + {:text, '34'} + ]} + end + + test "trim mode set to false" do + assert T.tokenize(' <%= 12 %> \n', 1, trim: false) == {:ok, [ + {:text, ' '}, + {:expr, 1, '=', ' 12 '}, + {:text, ' \n'} + ]} + end + + test "trim mode no false positives" do + assert_not_trimmed = fn x -> assert T.tokenize(x, 1, trim: true) == T.tokenize(x, 1) end + + assert_not_trimmed.('foo <%= "bar" %> ') + assert_not_trimmed.('\n <%= "foo" %>bar') + assert_not_trimmed.(' <%% hello %> ') + assert_not_trimmed.(' <%= 01 %><%= 23 %>\n') end test "raise syntax error when there is start mark and no end mark" do - assert_raise EEx.SyntaxError, "missing token: %>", fn -> - T.tokenize('foo <% :bar', 1) - end + assert T.tokenize('foo <% :bar', 1) == {:error, 1, "missing token '%>'"} + assert T.tokenize('<%# true ', 1) == {:error, 1, "missing token '%>'"} end end diff --git a/lib/eex/test/eex_test.exs b/lib/eex/test/eex_test.exs index 5cd9318b33c..899ca050582 100644 --- a/lib/eex/test/eex_test.exs +++ b/lib/eex/test/eex_test.exs @@ -4,7 +4,7 @@ require EEx defmodule EExTest.Compiled do def before_compile do - fill_in_stacktrace + fill_in_stacktrace() {__ENV__.line, hd(tl(System.stacktrace))} end @@ -19,13 +19,13 @@ defmodule EExTest.Compiled do def file_sample(arg), do: private_file_sample(arg) def after_compile do - fill_in_stacktrace + fill_in_stacktrace() {__ENV__.line, hd(tl(System.stacktrace))} end @file "unknown" def unknown do - fill_in_stacktrace + fill_in_stacktrace() {__ENV__.line, hd(tl(System.stacktrace))} end @@ -50,325 +50,395 @@ defmodule EExTest do use ExUnit.Case, async: true doctest EEx - doctest EEx.AssignsEngine + doctest EEx.Engine + doctest EEx.SmartEngine - test "evaluates simple string" do - assert_eval "foo bar", "foo bar" - end + describe "evaluates" do + test "simple string" do + assert_eval "foo bar", "foo bar" + end - test "evaluates with embedded" do - assert_eval "foo bar", "foo <%= :bar %>" - end + test "Unicode" do + template = """ + • <%= "•" %> • + <%= "Jößé Vâlìm" %> Jößé Vâlìm + """ + assert_eval " • • •\n Jößé Vâlìm Jößé Vâlìm\n", template + end - test "evaluates with embedded and the binding" do - assert EEx.eval_string("foo <%= bar %>", [bar: 1]) == "foo 1" - end + test "trim mode" do + string = "<%= 123 %> \n456\n <%= 789 %>" + expected = "123456\n789" + assert_eval expected, string, [], trim: true + end - test "evaluates with embedded do end" do - assert_eval "foo bar", "foo <%= if true do %>bar<% end %>" - end + test "trim mode with middle expression" do + string = """ + <%= cond do %> + <% false -> %> + this + <% true -> %> + that + <% end %> + """ + expected = " that\n" + assert_eval expected, string, [], trim: true + end - test "evaluates with embedded do end and eval the expression" do - assert_eval "foo ", "foo <%= if false do %>bar<% end %>" - end + test "embedded code" do + assert_eval "foo bar", "foo <%= :bar %>" + end - test "evaluates with embedded do end and nested print expression" do - assert_eval "foo bar", "foo <%= if true do %><%= :bar %><% end %>" - end + test "embedded code with binding" do + assert EEx.eval_string("foo <%= bar %>", [bar: 1]) == "foo 1" + end - test "evaluates with embedded do end and nested expressions" do - assert_eval "foo bar baz", "foo <%= if true do %>bar <% Process.put(:eex_text, 1) %><%= :baz %><% end %>" - assert Process.get(:eex_text) == 1 - end + test "embedded code with do end when true" do + assert_eval "foo bar", "foo <%= if true do %>bar<% end %>" + end - test "evaluates with embedded middle expression" do - assert_eval "foo bar", "foo <%= if true do %>bar<% else %>baz<% end %>" - end + test "embedded code with do end when false" do + assert_eval "foo ", "foo <%= if false do %>bar<% end %>" + end - test "evaluates with embedded middle expression and eval the expression" do - assert_eval "foo baz", "foo <%= if false do %>bar<% else %>baz<% end %>" - end + test "embedded code with do end and expression" do + assert_eval "foo bar", "foo <%= if true do %><%= :bar %><% end %>" + end - test "evaluates with nested start expression" do - assert_eval "foo bar", "foo <%= if true do %><%= if true do %>bar<% end %><% end %>" - end + test "embedded code with do end and multiple expressions" do + assert_eval "foo bar baz", "foo <%= if true do %>bar <% Process.put(:eex_text, 1) %><%= :baz %><% end %>" + assert Process.get(:eex_text) == 1 + end - test "evaluates with nested middle expression" do - assert_eval "foo baz", "foo <%= if true do %><%= if false do %>bar<% else %>baz<% end %><% end %>" - end + test "embedded code with middle expression" do + assert_eval "foo bar", "foo <%= if true do %>bar<% else %>baz<% end %>" + end - test "evaluates with defined variable" do - assert_eval "foo 1", "foo <% bar = 1 %><%= bar %>" - end + test "embedded code with evaluated middle expression" do + assert_eval "foo baz", "foo <%= if false do %>bar<% else %>baz<% end %>" + end - test "evaluates with require code" do - assert_eval "foo 1,2,3", "foo <% require Enum, as: E %><%= E.join [1, 2, 3], \",\" %>" - end + test "embedded code with nested do end" do + assert_eval "foo bar", "foo <%= if true do %><%= if true do %>bar<% end %><% end %>" + end - test "evaluates with end of token" do - assert_eval "foo bar %>", "foo bar %>" - end + test "embedded code with nested do end with middle expression" do + assert_eval "foo baz", "foo <%= if true do %><%= if false do %>bar<% else %>baz<% end %><% end %>" + end - test "raises a syntax error when the token is invalid" do - assert_raise EEx.SyntaxError, "missing token: %>", fn -> - EEx.compile_string "foo <%= bar" + test "embedded code with parentheses after end in end token" do + assert_eval " 101 102 103 ", "<%= Enum.map([1, 2, 3], (fn x -> %> <%= 100 + x %> <% end) ) %>" end - end - test "raises a syntax error when end expression is found without a start expression" do - assert_raise EEx.SyntaxError, "unexpected token: ' end ' at line 1", fn -> - EEx.compile_string "foo <% end %>" + test "embedded code with variable definition" do + assert_eval "foo 1", "foo <% bar = 1 %><%= bar %>" end - end - test "raises a syntax error when start expression is found without an end expression" do - assert_raise EEx.SyntaxError, "unexpected end of string. expecting a closing <% end %>.", fn -> - EEx.compile_string "foo <% if true do %>" + test "embedded code with require" do + assert_eval "foo 1,2,3", "foo <% require Enum, as: E %><%= E.join [1, 2, 3], \",\" %>" end - end - test "raises a syntax error when nested end expression is found without an start expression" do - assert_raise EEx.SyntaxError, "unexpected token: ' end ' at line 1", fn -> - EEx.compile_string "foo <% if true do %><% end %><% end %>" + test "with end of token" do + assert_eval "foo bar %>", "foo bar %>" end end - test "respects line numbers" do - expected = """ -foo -2 -""" + describe "raises syntax errors" do + test "when the token is invalid" do + assert_raise EEx.SyntaxError, "nofile:1: missing token '%>'", fn -> + EEx.compile_string "foo <%= bar" + end + end + + test "when end expression is found without a start expression" do + assert_raise EEx.SyntaxError, "nofile:1: unexpected end of expression <% end %>", fn -> + EEx.compile_string "foo <% end %>" + end + end + + test "when start expression is found without an end expression" do + assert_raise EEx.SyntaxError, "nofile:2: unexpected end of string, expected a closing '<% end %>'", fn -> + EEx.compile_string "foo\n<% if true do %>" + end + end - string = """ -foo -<%= __ENV__.line %> -""" + test "when nested end expression is found without a start expression" do + assert_raise EEx.SyntaxError, "nofile:1: unexpected end of expression <% end %>", fn -> + EEx.compile_string "foo <% if true do %><% end %><% end %>" + end + end + + test "when middle expression has a modifier" do + ExUnit.CaptureIO.capture_io :stderr, fn -> + EEx.compile_string "foo <%= if true do %>true<%= else %>false<% end %>" + end + end - assert_eval expected, string + test "when end expression has a modifier" do + assert_raise EEx.SyntaxError, ~s[nofile:1: unexpected beginning of EEx tag "<%=" on end of expression "<%= end %>", please remove "=" accordingly], fn -> + EEx.compile_string "foo <%= if true do %>true<% else %>false<%= end %>" + end + end end - test "respects line numbers inside nested expressions" do - expected = """ -foo + describe "environment" do + test "respects line numbers" do + expected = """ + foo + 2 + """ -3 + string = """ + foo + <%= __ENV__.line %> + """ -5 -""" + assert_eval expected, string + end - string = """ -foo -<%= if true do %> -<%= __ENV__.line %> -<% end %> -<%= __ENV__.line %> -""" + test "respects line numbers inside nested expressions" do + expected = """ + foo - assert_eval expected, string - end + 3 - test "respects line numbers inside start expression" do - expected = """ -foo + 5 + """ -true + string = """ + foo + <%= if true do %> + <%= __ENV__.line %> + <% end %> + <%= __ENV__.line %> + """ -5 -""" + assert_eval expected, string + end - string = """ -foo -<%= if __ENV__.line == 2 do %> -<%= true %> -<% end %> -<%= __ENV__.line %> -""" + test "respects line numbers inside start expression" do + expected = """ + foo - assert_eval expected, string - end + true - test "respects line numbers inside middle expression with ->" do - expected = """ -foo + 5 + """ -true + string = """ + foo + <%= if __ENV__.line == 2 do %> + <%= true %> + <% end %> + <%= __ENV__.line %> + """ -7 -""" + assert_eval expected, string + end - string = """ -foo -<%= cond do %> -<% false -> %> false -<% __ENV__.line == 4 -> %> -<%= true %> -<% end %> -<%= __ENV__.line %> -""" + test "respects line numbers inside middle expression with ->" do + expected = """ + foo - assert_eval expected, string - end + true + + 7 + """ + + string = """ + foo + <%= cond do %> + <% false -> %> false + <% __ENV__.line == 4 -> %> + <%= true %> + <% end %> + <%= __ENV__.line %> + """ + + assert_eval expected, string + end + + test "respects line number inside middle expressions with keywords" do + expected = """ + foo - test "respects line number inside middle expressions with keywords" do - expected = """ -foo + 5 -5 + 7 + """ -7 -""" + string = """ + foo + <%= if false do %> + <%= __ENV__.line %> + <% else %> + <%= __ENV__.line %> + <% end %> + <%= __ENV__.line %> + """ - string = """ -foo -<%= if false do %> -<%= __ENV__.line %> -<% else %> -<%= __ENV__.line %> -<% end %> -<%= __ENV__.line %> -""" + assert_eval expected, string + end - assert_eval expected, string + test "respects files" do + assert_eval "sample.ex", "<%= __ENV__.file %>", [], file: "sample.ex" + end end - test "properly handle functions" do - expected = """ + describe "clauses" do + test "inside functions" do + expected = """ -Number 1 + Number 1 -Number 2 + Number 2 -Number 3 + Number 3 -""" + """ - string = """ -<%= Enum.map [1, 2, 3], fn x -> %> -Number <%= x %> -<% end %> -""" + string = """ + <%= Enum.map [1, 2, 3], fn x -> %> + Number <%= x %> + <% end %> + """ - assert_eval expected, string - end + assert_eval expected, string + end - test "do not consider already finished functions" do - expected = """ -foo + test "inside cond" do + expected = """ + foo -true + true -""" + """ - string = """ -foo -<%= cond do %> -<% false -> %> false -<% fn -> 1 end -> %> -<%= true %> -<% end %> -""" + string = """ + foo + <%= cond do %> + <% false -> %> false + <% fn -> 1 end -> %> + <%= true %> + <% end %> + """ - assert_eval expected, string - end + assert_eval expected, string + end - test "evaluates nested do expressions" do - string = """ - <% y = ["a", "b", "c"] %> - <%= cond do %> - <% "a" in y -> %> - Good - <% true -> %> - <% if true do %>true<% else %>false<% end %> - Bad - <% end %> - """ - - assert_eval "\n\n Good\n \n", string + test "inside cond with do end" do + string = """ + <% y = ["a", "b", "c"] %> + <%= cond do %> + <% "a" in y -> %> + Good + <% true -> %> + <% if true do %>true<% else %>false<% end %> + Bad + <% end %> + """ + + assert_eval "\n\n Good\n \n", string + end end - test "for comprehensions" do - string = """ - <%= for _name <- packages || [] do %> - <% end %> - <%= all || :done %> - """ - assert_eval "\ndone\n", string, packages: nil, all: nil - end + describe "buffers" do + test "unused buffers are kept out" do + string = """ + <%= 123 %> + <% if true do %> + <%= 456 %> + <% end %> + <%= 789 %> + """ - test "unicode" do - template = """ - • <%= "•" %> • - <%= "Jößé Vâlìm" %> Jößé Vâlìm - """ - result = EEx.eval_string(template) - assert result == " • • •\n Jößé Vâlìm Jößé Vâlìm\n" - end + assert_eval "123\n\n789\n", string + end - test "evaluates the source from a given file" do - filename = Path.join(__DIR__, "fixtures/eex_template.eex") - result = EEx.eval_file(filename) - assert result == "foo bar.\n" + test "inside comprehensions" do + string = """ + <%= for _name <- packages || [] do %> + <% end %> + <%= all || :done %> + """ + assert_eval "\ndone\n", string, packages: nil, all: nil + end end - test "evaluates the source from a given file with bindings" do - filename = Path.join(__DIR__, "fixtures/eex_template_with_bindings.eex") - result = EEx.eval_file(filename, [bar: 1]) - assert result == "foo 1\n" - end + describe "from file" do + test "evaluates the source" do + filename = Path.join(__DIR__, "fixtures/eex_template.eex") + result = EEx.eval_file(filename) + assert_normalized_newline_equal "foo bar.\n", result + end - test "raises an Exception when there's an error with the given file" do - assert_raise File.Error, "could not read file non-existent.eex: no such file or directory", fn -> - filename = "non-existent.eex" - EEx.compile_file(filename) + test "evaluates the source with bindings" do + filename = Path.join(__DIR__, "fixtures/eex_template_with_bindings.eex") + result = EEx.eval_file(filename, [bar: 1]) + assert_normalized_newline_equal "foo 1\n", result end - end - test "sets external resource attribute" do - assert EExTest.Compiled.__info__(:attributes)[:external_resource] == - [Path.join(__DIR__, "fixtures/eex_template_with_bindings.eex")] - end + test "raises an Exception when file is missing" do + assert_raise File.Error, "could not read file \"non-existent.eex\": no such file or directory", fn -> + filename = "non-existent.eex" + EEx.compile_file(filename) + end + end - test "defined from string" do - assert EExTest.Compiled.string_sample(1, 2) == "3" + test "sets external resource attribute" do + assert EExTest.Compiled.__info__(:attributes)[:external_resource] == + [Path.join(__DIR__, "fixtures/eex_template_with_bindings.eex")] + end end - test "defined from file" do - assert EExTest.Compiled.file_sample(1) == "foo 1\n" - assert EExTest.Compiled.public_file_sample(1) == "foo 1\n" - end + describe "precompiled" do + + test "from string" do + assert EExTest.Compiled.string_sample(1, 2) == "3" + end - test "defined from file do not affect backtrace" do - assert EExTest.Compiled.before_compile == - {8, - {EExTest.Compiled, - :before_compile, - 0, - [file: to_char_list(Path.relative_to_cwd(__ENV__.file)), line: 7] + test "from file" do + assert_normalized_newline_equal "foo 1\n", EExTest.Compiled.file_sample(1) + assert_normalized_newline_equal "foo 1\n", EExTest.Compiled.public_file_sample(1) + end + + test "from file does not affect backtrace" do + assert EExTest.Compiled.before_compile == + {8, + {EExTest.Compiled, + :before_compile, + 0, + [file: to_charlist(Path.relative_to_cwd(__ENV__.file)), line: 7] + } } - } - - assert EExTest.Compiled.after_compile == - {23, - {EExTest.Compiled, - :after_compile, - 0, - [file: to_char_list(Path.relative_to_cwd(__ENV__.file)), line: 22] + + assert EExTest.Compiled.after_compile == + {23, + {EExTest.Compiled, + :after_compile, + 0, + [file: to_charlist(Path.relative_to_cwd(__ENV__.file)), line: 22] + } } - } - - assert EExTest.Compiled.unknown == - {29, - {EExTest.Compiled, - :unknown, - 0, - [file: 'unknown', line: 28] + + assert EExTest.Compiled.unknown == + {29, + {EExTest.Compiled, + :unknown, + 0, + [file: 'unknown', line: 28] + } } - } + end end defmodule TestEngine do @behaviour EEx.Engine + def init(_opts) do + "" + end + def handle_body(body) do {:wrapped, body} end @@ -382,12 +452,19 @@ foo end end - test "calls handle_body" do - assert {:wrapped, "foo"} = EEx.eval_string("foo", [], engine: TestEngine) + describe "custom engines" do + test "calls handle_body" do + assert {:wrapped, "foo"} = EEx.eval_string("foo", [], engine: TestEngine) + end end - defp assert_eval(expected, actual, binding \\ []) do - result = EEx.eval_string(actual, binding, file: __ENV__.file, engine: EEx.Engine) + defp assert_eval(expected, actual, binding \\ [], opts \\ []) do + opts = Enum.into [file: __ENV__.file, engine: EEx.Engine], opts + result = EEx.eval_string(actual, binding, opts) assert result == expected end + + defp assert_normalized_newline_equal(expected, actual) do + assert String.replace(expected, "\r\n", "\n") == String.replace(actual, "\r\n", "\n") + end end diff --git a/lib/eex/test/test_helper.exs b/lib/eex/test/test_helper.exs index bf1bd1990e9..24e27edb5ae 100644 --- a/lib/eex/test/test_helper.exs +++ b/lib/eex/test/test_helper.exs @@ -1 +1 @@ -ExUnit.start [trace: "--trace" in System.argv] \ No newline at end of file +ExUnit.start [trace: "--trace" in System.argv] diff --git a/lib/elixir/include/elixir.hrl b/lib/elixir/include/elixir.hrl deleted file mode 100644 index 9d536c9184a..00000000000 --- a/lib/elixir/include/elixir.hrl +++ /dev/null @@ -1,68 +0,0 @@ --define(m(M, K), maps:get(K, M)). --define(line(Opts), elixir_utils:get_line(Opts)). - --record(elixir_scope, { - context=nil, %% can be match, guards or nil - extra=nil, %% extra information about the context, like fn_match and map_key - noname=false, %% when true, don't add new names (used by try) - super=false, %% when true, it means super was invoked - caller=false, %% when true, it means caller was invoked - return=true, %% when true, the return value is used - module=nil, %% the current module - function=nil, %% the current function - vars=[], %% a dict of defined variables and their alias - backup_vars=nil, %% a copy of vars to be used on ^var - match_vars=nil, %% a set of all variables defined in a particular match - export_vars=nil, %% a dict of all variables defined in a particular clause - extra_guards=nil, %% extra guards from args expansion - counter=[], %% a dict counting the variables defined - file=(<<"nofile">>) %% the current scope filename -}). - --record(elixir_quote, { - line=false, - keep=false, - context=nil, - vars_hygiene=true, - aliases_hygiene=true, - imports_hygiene=true, - unquote=true, - unquoted=false, - escape=false -}). - --record(elixir_tokenizer, { - file, - terminators=[], - check_terminators=true, - existing_atoms_only=false -}). - -%% Used in tokenization and interpolation - -%% Numbers --define(is_hex(S), ?is_digit(S) orelse (S >= $A andalso S =< $F) orelse (S >= $a andalso S =< $f)). --define(is_bin(S), S >= $0 andalso S =< $1). --define(is_octal(S), S >= $0 andalso S =< $7). --define(is_leading_octal(S), S >= $0 andalso S =< $3). - -%% Digits and letters --define(is_digit(S), S >= $0 andalso S =< $9). --define(is_upcase(S), S >= $A andalso S =< $Z). --define(is_downcase(S), S >= $a andalso S =< $z). - -%% Atoms --define(is_atom_start(S), ?is_quote(S) orelse ?is_upcase(S) orelse ?is_downcase(S) orelse (S == $_)). --define(is_atom(S), ?is_identifier(S) orelse (S == $@)). - --define(is_identifier(S), ?is_digit(S) orelse ?is_upcase(S) orelse ?is_downcase(S) orelse (S == $_)). --define(is_sigil(S), (S == $/) orelse (S == $<) orelse (S == $") orelse (S == $') orelse - (S == $[) orelse (S == $() orelse (S == ${) orelse (S == $|)). - -%% Quotes --define(is_quote(S), S == $" orelse S == $'). - -%% Spaces --define(is_horizontal_space(S), (S == $\s) orelse (S == $\t)). --define(is_vertical_space(S), (S == $\r) orelse (S == $\n)). --define(is_space(S), ?is_horizontal_space(S) orelse ?is_vertical_space(S)). diff --git a/lib/elixir/lib/access.ex b/lib/elixir/lib/access.ex index acf88b370b6..1cfd6b203a5 100644 --- a/lib/elixir/lib/access.ex +++ b/lib/elixir/lib/access.ex @@ -1,15 +1,23 @@ -defprotocol Access do +defmodule Access do @moduledoc """ - The Access protocol is used by `foo[bar]` and also - empowers the nested update functions in Kernel. + Key-based access to data structures using the `data[key]` syntax. - For instance, `foo[bar]` translates `Access.get(foo, bar)`. - `Kernel.get_in/2`, `Kernel.put_in/3`, `Kernel.update_in/3` and - `Kernel.get_and_update_in/3` are also all powered by the Access - protocol. + Elixir provides two syntaxes for accessing values. `user[:name]` + is used by dynamic structures, like maps and keywords, while + `user.name` is used by structs. The main difference is that + `user[:name]` won't raise if the key `:name` is missing but + `user.name` will raise if there is no `:name` key. - This protocol is implemented by default for keywords, maps - and dictionary like types: + Besides the cases above, this module provides convenience + functions for accessing other structures, like `at/1` for + lists and `elem/1` for tuples. Those functions can be used + by the nested update functions in `Kernel`, such as + `Kernel.get_in/2`, `Kernel.put_in/3`, `Kernel.update_in/3`, + `Kernel.get_and_update_in/3` and friends. + + ## Dynamic lookups + + Out of the box, `Access` works with `Keyword` and `Map`: iex> keywords = [a: 1, b: 2] iex> keywords[:a] @@ -23,127 +31,673 @@ defprotocol Access do iex> star_ratings[1.5] "★☆" - The key comparison must be implemented using the `===` operator. + Note that the dynamic lookup syntax (`term[key]`) roughly translates to + `Access.get(term, key, nil)`. + + `Access` can be combined with `Kernel.put_in/3` to put a value + in a given key: + + iex> map = %{a: 1, b: 2} + iex> put_in map[:a], 3 + %{a: 3, b: 2} + + This syntax is very convenient as it can be nested arbitrarily: + + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> put_in users["john"][:age], 28 + %{"john" => %{age: 28}, "meg" => %{age: 23}} + + Furthermore, `Access` transparently ignores `nil` values: + + iex> keywords = [a: 1, b: 2] + iex> keywords[:c][:unknown] + nil + + Since `Access` is a behaviour, it can be implemented for key-value + data structures. The implementation should be added to the + module that defines the struct being accessed. `Access` requires the + key comparison to be implemented using the `===` operator. + + ## Static lookups + + The `Access` syntax (`data[key]`) cannot be used to access fields in + structs, since structs do not implement the `Access` behaviour by + default. It is also a design decision: the dynamic access lookup + is meant to be used for dynamic key-value structures, like maps + and keywords, and not by static ones like structs (where fields are + known and not dynamic). + + Therefore Elixir provides a static lookup for struct fields and for atom + fields in maps. Imagine a struct named `User` with a `:name` field. + The following would raise: + + user = %User{name: "John"} + user[:name] + # ** (UndefinedFunctionError) undefined function User.fetch/2 (User does not implement the Access behaviour) + + Structs instead use the `user.name` syntax to access fields: + + user.name + #=> "John" + + The same `user.name` syntax can also be used by `Kernel.put_in/2` + for updating structs fields: + + put_in user.name, "Mary" + #=> %User{name: "Mary"} + + Differently from `user[:name]`, `user.name` is not extensible via + a behaviour and is restricted only to structs and atom keys in maps. + + As mentioned above, this works for atom keys in maps as well. Refer to the + `Map` module for more information on this. + + Summing up: + + * `user[:name]` is used by dynamic structures, is extensible and + does not raise on missing keys + * `user.name` is used by static structures, it is not extensible + and it will raise on missing keys + + ## Accessors + + While Elixir provides built-in syntax only for traversing dynamic + and static key-value structures, this module provides convenience + functions for traversing other structures, like tuples and lists, + to be used alongside `Kernel.put_in/2` in others. + + For instance, given a user map with `:name` and `:languages` keys, here is how + to deeply traverse the map and convert all language names to uppercase: + + iex> languages = [ + ...> %{name: "elixir", type: :functional}, + ...> %{name: "c", type: :procedural}, + ...> ] + iex> user = %{name: "john", languages: languages} + iex> update_in user, [:languages, Access.all(), :name], &String.upcase/1 + %{name: "john", + languages: [%{name: "ELIXIR", type: :functional}, + %{name: "C", type: :procedural}]} + + See the functions `key/1`, `key!/1`, `elem/1`, and `all/0` for some of the + available accessors. + + ## Implementing the Access behaviour for custom data structures + + In order to be able to use the `Access` behaviour with custom data structures + (which have to be structs), such structures have to implement the `Access` + behaviour. For example, for a `User` struct, this would have to be done: + + defmodule User do + defstruct [:name, :email] + + @behaviour Access + # Implementation of the Access callbacks... + end + """ + @type container :: keyword | struct | map + @type nil_container :: nil + @type any_container :: any + @type t :: container | nil_container | any_container + @type key :: any + @type value :: any + + @type get_fun(data, get_value) :: + (:get, data, (term -> term) -> + {get_value, new_data :: container}) + + @type get_and_update_fun(data, get_value) :: + (:get_and_update, data, (term -> term) -> + {get_value, new_data :: container} | :pop) + + @type access_fun(data, get_value) :: + get_fun(data, get_value) | get_and_update_fun(data, get_value) + @doc """ - Accesses the given key in the container. + Invoked in order to access the value stored under `key` in the given term `term`. + + This function should return `{:ok, value}` where `value` is the value under + `key` if the key exists in the term, or `:error` if the key does not exist in + the term. + + Many of the functions defined in the `Access` module internally call this + function. This function is also used when the square-brackets access syntax + (`structure[key]`) is used: the `fetch/2` callback implemented by the module + that defines the `structure` struct is invoked and if it returns `{:ok, + value}` then `value` is returned, or if it returns `:error` then `nil` is + returned. + + See the `Map.fetch/2` and `Keyword.fetch/2` implementations for examples of + how to implement this callback. """ - @spec get(t, term) :: t - def get(container, key) + @callback fetch(term :: t, key) :: {:ok, value} | :error @doc """ - Gets a value and updates the given `key` in one pass. + Invoked in order to access the value stored under `key` in the given term `term`, + defaulting to `default` if not present. + + This function should return the value under `key` in `term` if there's + such key, otherwise `default`. - The function must receive the value for the given `key` - (or `nil` if the key doesn't exist in `container`) and - the function must return a tuple containing the `get` - value and the new value to be stored in the `container`. + For most data structures, this can be implemented using `fetch/2` internally; + for example: + + def get(structure, key, default) do + case fetch(structure, key) do + {:ok, value} -> value + :error -> default + end + end + + See the `Map.get/3` and `Keyword.get/3` implementations for examples of + how to implement this callback. """ - @spec get_and_update(t, term, (term -> {get, term})) :: {get, t} when get: var - def get_and_update(container, key, fun) -end + @callback get(term :: t, key, default :: value) :: value + + @doc """ + Invoked in order to access the value under `key` and update it at the same time. + + The implementation of this callback should invoke `fun` with the value under + `key` in the passed structure `data`, or with `nil` if `key` is not present in it. + This function must return either `{get_value, update_value}` or `:pop`. + + If the passed function returns `{get_value, update_value}`, + the return value of this callback should be `{get_value, new_data}`, where: + - `get_value` is the retrieved value (which can be operated on before being returned) + - `update_value` is the new value to be stored under `key` + - `new_data` is `data` after updating the value of `key` with `update_value`. + + If the passed function returns `:pop`, the return value of this callback + must be `{value, new_data}` where `value` is the value under `key` + (or `nil` if not present) and `new_data` is `data` without `key`. + + See the implementations of `Map.get_and_update/3` or `Keyword.get_and_update/3` + for more examples. + """ + @callback get_and_update(data, key, (value -> {get_value, value} | :pop)) :: + {get_value, data} when get_value: var, data: container | any_container + + @doc """ + Invoked to "pop" the value under `key` out of the given data structure. + + When `key` exists in the given structure `data`, the implementation should + return a `{value, new_data}` tuple where `value` is the value that was under + `key` and `new_data` is `term` without `key`. + + When `key` is not present in the given structure, a tuple `{value, data}` + should be returned, where `value` is implementation-defined. + + See the implementations for `Map.pop/3` or `Keyword.pop/3` for more examples. + """ + @callback pop(data, key) :: {value, data} when data: container | any_container -defimpl Access, for: List do - def get(dict, key) when is_atom(key) do - case :lists.keyfind(key, 1, dict) do - {^key, value} -> value - false -> nil + defmacrop raise_undefined_behaviour(e, struct, top) do + quote do + stacktrace = System.stacktrace + e = + case stacktrace do + [unquote(top) | _] -> + %{unquote(e) | reason: "#{inspect unquote(struct)} does not implement the Access behaviour"} + _ -> + unquote(e) + end + reraise e, stacktrace end end - def get(_dict, key) do - raise ArgumentError, - "the access protocol for lists expect the key to be an atom, got: #{inspect key}" + @doc """ + Fetches the value for the given key in a container (a map, keyword + list, or struct that implements the `Access` behaviour). + + Returns `{:ok, value}` where `value` is the value under `key` if there is such + a key, or `:error` if `key` is not found. + """ + @spec fetch(container, term) :: {:ok, term} | :error + @spec fetch(nil_container, any) :: :error + def fetch(container, key) + + def fetch(%struct{} = container, key) do + struct.fetch(container, key) + rescue + e in UndefinedFunctionError -> + raise_undefined_behaviour e, struct, {^struct, :fetch, [^container, ^key], _} end - def get_and_update(dict, key, fun) when is_atom(key) do - get_and_update(dict, [], key, fun) + def fetch(map, key) when is_map(map) do + case map do + %{^key => value} -> {:ok, value} + _ -> :error + end end - defp get_and_update([{key, value}|t], acc, key, fun) do - {get, update} = fun.(value) - {get, :lists.reverse(acc, [{key, update}|t])} + def fetch(list, key) when is_list(list) and is_atom(key) do + case :lists.keyfind(key, 1, list) do + {_, value} -> {:ok, value} + false -> :error + end end - defp get_and_update([h|t], acc, key, fun) do - get_and_update(t, [h|acc], key, fun) + def fetch(list, key) when is_list(list) do + raise ArgumentError, + "the Access calls for keywords expect the key to be an atom, got: " <> inspect(key) end - defp get_and_update([], acc, key, fun) do - {get, update} = fun.(nil) - {get, [{key, update}|:lists.reverse(acc)]} + def fetch(nil, _key) do + :error end -end -defimpl Access, for: Map do - def get(map, key) do - case :maps.find(key, map) do + @doc """ + Gets the value for the given key in a container (a map, keyword + list, or struct that implements the `Access` behaviour). + + Returns the value under `key` if there is such a key, or `default` if `key` is + not found. + """ + @spec get(container, term, term) :: term + @spec get(nil_container, any, default) :: default when default: var + def get(container, key, default \\ nil) + + def get(%{__struct__: struct} = container, key, default) do + try do + struct.fetch(container, key) + rescue + e in UndefinedFunctionError -> + raise_undefined_behaviour e, struct, {^struct, :fetch, [^container, ^key], _} + else {:ok, value} -> value - :error -> nil + :error -> default end end - def get_and_update(map, key, fun) do - value = - case :maps.find(key, map) do - {:ok, value} -> value - :error -> nil - end - - {get, update} = fun.(value) - {get, :maps.put(key, update, map)} + def get(map, key, default) when is_map(map) do + case map do + %{^key => value} -> value + _ -> default + end end - def get!(%{} = map, key) do - case :maps.find(key, map) do - {:ok, value} -> value - :error -> raise KeyError, key: key, term: map + def get(list, key, default) when is_list(list) and is_atom(key) do + case :lists.keyfind(key, 1, list) do + {_, value} -> value + false -> default end end - def get!(other, key) do + def get(list, key, _default) when is_list(list) do raise ArgumentError, - "could not get key #{inspect key}. Expected map/struct, got: #{inspect other}" + "the Access calls for keywords expect the key to be an atom, got: " <> inspect(key) end - def get_and_update!(%{} = map, key, fun) do - case :maps.find(key, map) do - {:ok, value} -> - {get, update} = fun.(value) - {get, :maps.put(key, update, map)} - :error -> - raise KeyError, key: key, term: map - end + def get(nil, _key, default) do + default + end + + @doc """ + Gets and updates the given key in a `container` (a map, a keyword list, + a struct that implements the `Access` behaviour). + + The `fun` argument receives the value of `key` (or `nil` if `key` is not + present in `container`) and must return a two-element tuple `{get_value, update_value}`: + the "get" value `get_value` (the retrieved value, which can be operated on before + being returned) and the new value to be stored under `key` (`update_value`). + `fun` may also return `:pop`, which means the current value + should be removed from the container and returned. + + The returned value is a two-element tuple with the "get" value returned by + `fun` and a new container with the updated value under `key`. + """ + @spec get_and_update(data, key, (value -> {get_value, value} | :pop)) :: + {get_value, data} when get_value: var, data: container + def get_and_update(container, key, fun) + + def get_and_update(%{__struct__: struct} = container, key, fun) do + struct.get_and_update(container, key, fun) + rescue + e in UndefinedFunctionError -> + raise_undefined_behaviour e, struct, {^struct, :get_and_update, [^container, ^key, ^fun], _} + end + + def get_and_update(map, key, fun) when is_map(map) do + Map.get_and_update(map, key, fun) + end + + def get_and_update(list, key, fun) when is_list(list) do + Keyword.get_and_update(list, key, fun) end - def get_and_update!(other, key, _fun) do + def get_and_update(nil, key, _fun) do raise ArgumentError, - "could not update key #{inspect key}. Expected map/struct, got: #{inspect other}" + "could not put/update key #{inspect key} on a nil value" end -end -defimpl Access, for: Atom do - def get(nil, _) do - nil + @doc """ + Removes the entry with a given key from a container (a map, keyword + list, or struct that implements the `Access` behaviour). + + Returns a tuple containing the value associated with the key and the + updated container. `nil` is returned for the value if the key isn't + in the container. + + ## Examples + + With a map: + + iex> Access.pop(%{name: "Elixir", creator: "Valim"}, :name) + {"Elixir", %{creator: "Valim"}} + + A keyword list: + + iex> Access.pop([name: "Elixir", creator: "Valim"], :name) + {"Elixir", [creator: "Valim"]} + + An unknown key: + + iex> Access.pop(%{name: "Elixir", creator: "Valim"}, :year) + {nil, %{creator: "Valim", name: "Elixir"}} + + """ + @spec pop(data, key) :: {value, data} when data: container + def pop(%{__struct__: struct} = container, key) do + struct.pop(container, key) + rescue + e in UndefinedFunctionError -> + raise_undefined_behaviour e, struct, {^struct, :pop, [^container, ^key], _} + end + + def pop(map, key) when is_map(map) do + Map.pop(map, key) + end + + def pop(list, key) when is_list(list) do + Keyword.pop(list, key) + end + + def pop(nil, key) do + raise ArgumentError, + "could not pop key #{inspect key} on a nil value" + end + + ## Accessors + + @doc """ + Returns a function that accesses the given key in a map/struct. + + The returned function is typically passed as an accessor to `Kernel.get_in/2`, + `Kernel.get_and_update_in/3`, and friends. + + The returned function uses the default value if the key does not exist. + This can be used to specify defaults and safely traverse missing keys: + + iex> get_in(%{}, [Access.key(:user, %{}), Access.key(:name)]) + nil + + Such is also useful when using update functions, allowing us to introduce + values as we traverse the data structure for updates: + + iex> put_in(%{}, [Access.key(:user, %{}), Access.key(:name)], "Mary") + %{user: %{name: "Mary"}} + + ## Examples + + iex> map = %{user: %{name: "john"}} + iex> get_in(map, [Access.key(:unknown, %{}), Access.key(:name, "john")]) + "john" + iex> get_and_update_in(map, [Access.key(:user), Access.key(:name)], fn + ...> prev -> {prev, String.upcase(prev)} + ...> end) + {"john", %{user: %{name: "JOHN"}}} + iex> pop_in(map, [Access.key(:user), Access.key(:name)]) + {"john", %{user: %{}}} + + An error is raised if the accessed structure is not a map or a struct: + + iex> get_in(nil, [Access.key(:foo)]) + ** (BadMapError) expected a map, got: nil + + iex> get_in([], [Access.key(:foo)]) + ** (BadMapError) expected a map, got: [] + + """ + @spec key(key, term) :: access_fun(data :: struct | map, get_value :: term) + def key(key, default \\ nil) do + fn + :get, data, next -> + next.(Map.get(data, key, default)) + :get_and_update, data, next -> + value = Map.get(data, key, default) + case next.(value) do + {get, update} -> {get, Map.put(data, key, update)} + :pop -> {value, Map.delete(data, key)} + end + end + end + + @doc """ + Returns a function that accesses the given key in a map/struct. + + The returned function is typically passed as an accessor to `Kernel.get_in/2`, + `Kernel.get_and_update_in/3`, and friends. + + The returned function raises if the key does not exist. + + ## Examples + + iex> map = %{user: %{name: "john"}} + iex> get_in(map, [Access.key!(:user), Access.key!(:name)]) + "john" + iex> get_and_update_in(map, [Access.key!(:user), Access.key!(:name)], fn + ...> prev -> {prev, String.upcase(prev)} + ...> end) + {"john", %{user: %{name: "JOHN"}}} + iex> pop_in(map, [Access.key!(:user), Access.key!(:name)]) + {"john", %{user: %{}}} + iex> get_in(map, [Access.key!(:user), Access.key!(:unknown)]) + ** (KeyError) key :unknown not found in: %{name: \"john\"} + + An error is raised if the accessed structure is not a map/struct: + + iex> get_in([], [Access.key!(:foo)]) + ** (RuntimeError) Access.key!/1 expected a map/struct, got: [] + + """ + @spec key!(key) :: access_fun(data :: struct | map, get_value :: term) + def key!(key) do + fn + :get, %{} = data, next -> + next.(Map.fetch!(data, key)) + :get_and_update, %{} = data, next -> + value = Map.fetch!(data, key) + case next.(value) do + {get, update} -> {get, Map.put(data, key, update)} + :pop -> {value, Map.delete(data, key)} + end + _op, data, _next -> + raise "Access.key!/1 expected a map/struct, got: #{inspect data}" + end + end + + @doc ~S""" + Returns a function that accesses the element at the given index in a tuple. + + The returned function is typically passed as an accessor to `Kernel.get_in/2`, + `Kernel.get_and_update_in/3`, and friends. + + The returned function raises if `index` is out of bounds. + + ## Examples + + iex> map = %{user: {"john", 27}} + iex> get_in(map, [:user, Access.elem(0)]) + "john" + iex> get_and_update_in(map, [:user, Access.elem(0)], fn + ...> prev -> {prev, String.upcase(prev)} + ...> end) + {"john", %{user: {"JOHN", 27}}} + iex> pop_in(map, [:user, Access.elem(0)]) + ** (RuntimeError) cannot pop data from a tuple + + An error is raised if the accessed structure is not a tuple: + + iex> get_in(%{}, [Access.elem(0)]) + ** (RuntimeError) Access.elem/1 expected a tuple, got: %{} + + """ + @spec elem(non_neg_integer) :: access_fun(data :: tuple, get_value :: term) + def elem(index) when is_integer(index) do + pos = index + 1 + + fn + :get, data, next when is_tuple(data) -> + next.(:erlang.element(pos, data)) + :get_and_update, data, next when is_tuple(data) -> + value = :erlang.element(pos, data) + case next.(value) do + {get, update} -> {get, :erlang.setelement(pos, data, update)} + :pop -> raise "cannot pop data from a tuple" + end + _op, data, _next -> + raise "Access.elem/1 expected a tuple, got: #{inspect data}" + end + end + + @doc ~S""" + Returns a function that accesses all the elements in a list. + + The returned function is typically passed as an accessor to `Kernel.get_in/2`, + `Kernel.get_and_update_in/3`, and friends. + + ## Examples + + iex> list = [%{name: "john"}, %{name: "mary"}] + iex> get_in(list, [Access.all(), :name]) + ["john", "mary"] + iex> get_and_update_in(list, [Access.all(), :name], fn + ...> prev -> {prev, String.upcase(prev)} + ...> end) + {["john", "mary"], [%{name: "JOHN"}, %{name: "MARY"}]} + iex> pop_in(list, [Access.all(), :name]) + {["john", "mary"], [%{}, %{}]} + + Here is an example that traverses the list dropping even + numbers and multiplying odd numbers by 2: + + iex> require Integer + iex> get_and_update_in([1, 2, 3, 4, 5], [Access.all], fn + ...> num -> if Integer.is_even(num), do: :pop, else: {num, num * 2} + ...> end) + {[1, 2, 3, 4, 5], [2, 6, 10]} + + An error is raised if the accessed structure is not a list: + + iex> get_in(%{}, [Access.all()]) + ** (RuntimeError) Access.all/0 expected a list, got: %{} + + """ + @spec all() :: access_fun(data :: list, get_value :: list) + def all() do + &all/3 + end + + defp all(:get, data, next) when is_list(data) do + Enum.map(data, next) + end + + defp all(:get_and_update, data, next) when is_list(data) do + all(data, next, _gets = [], _updates = []) + end + + defp all(_op, data, _next) do + raise "Access.all/0 expected a list, got: #{inspect data}" + end + + defp all([head | rest], next, gets, updates) do + case next.(head) do + {get, update} -> all(rest, next, [get | gets], [update | updates]) + :pop -> all(rest, next, [head | gets], updates) + end + end + + defp all([], _next, gets, updates) do + {:lists.reverse(gets), :lists.reverse(updates)} + end + + @doc ~S""" + Returns a function that accesses the element at `index` (zero based) of a list. + + The returned function is typically passed as an accessor to `Kernel.get_in/2`, + `Kernel.get_and_update_in/3`, and friends. + + ## Examples + + iex> list = [%{name: "john"}, %{name: "mary"}] + iex> get_in(list, [Access.at(1), :name]) + "mary" + iex> get_and_update_in(list, [Access.at(0), :name], fn + ...> prev -> {prev, String.upcase(prev)} + ...> end) + {"john", [%{name: "JOHN"}, %{name: "mary"}]} + + `at/1` can also be used to pop elements out of a list or + a key inside of a list: + + iex> list = [%{name: "john"}, %{name: "mary"}] + iex> pop_in(list, [Access.at(0)]) + {%{name: "john"}, [%{name: "mary"}]} + iex> pop_in(list, [Access.at(0), :name]) + {"john", [%{}, %{name: "mary"}]} + + When the index is out of bounds, `nil` is returned and the update function is never called: + + iex> list = [%{name: "john"}, %{name: "mary"}] + iex> get_in(list, [Access.at(10), :name]) + nil + iex> get_and_update_in(list, [Access.at(10), :name], fn + ...> prev -> {prev, String.upcase(prev)} + ...> end) + {nil, [%{name: "john"}, %{name: "mary"}]} + + An error is raised for negative indexes: + + iex> get_in([], [Access.at(-1)]) + ** (FunctionClauseError) no function clause matching in Access.at/1 + + An error is raised if the accessed structure is not a list: + + iex> get_in(%{}, [Access.at(1)]) + ** (RuntimeError) Access.at/1 expected a list, got: %{} + + """ + @spec at(non_neg_integer) :: access_fun(data :: list, get_value :: term) + def at(index) when is_integer(index) and index >= 0 do + fn(op, data, next) -> at(op, data, index, next) end + end + + defp at(:get, data, index, next) when is_list(data) do + data |> Enum.at(index) |> next.() end - def get(atom, _) do - undefined(atom) + defp at(:get_and_update, data, index, next) when is_list(data) do + get_and_update_at(data, index, next, []) end - def get_and_update(nil, _, fun) do - fun.(nil) + defp at(_op, data, _index, _next) do + raise "Access.at/1 expected a list, got: #{inspect data}" + end + + defp get_and_update_at([head | rest], 0, next, updates) do + case next.(head) do + {get, update} -> {get, :lists.reverse([update | updates], rest)} + :pop -> {head, :lists.reverse(updates, rest)} + end end - def get_and_update(atom, _key, _fun) do - undefined(atom) + defp get_and_update_at([head | rest], index, next, updates) do + get_and_update_at(rest, index - 1, next, [head | updates]) end - defp undefined(atom) do - raise Protocol.UndefinedError, - protocol: @protocol, - value: atom, - description: "only the nil atom is supported" + defp get_and_update_at([], _index, _next, updates) do + {nil, :lists.reverse(updates)} end end diff --git a/lib/elixir/lib/agent.ex b/lib/elixir/lib/agent.ex index 628f0675f79..8fa790334a4 100644 --- a/lib/elixir/lib/agent.ex +++ b/lib/elixir/lib/agent.ex @@ -6,18 +6,18 @@ defmodule Agent do must be accessed from different processes or by the same process at different points in time. - The Agent module provides a basic server implementation that + The `Agent` module provides a basic server implementation that allows state to be retrieved and updated via a simple API. ## Examples For example, in the Mix tool that ships with Elixir, we need to keep a set of all tasks executed by a given project. Since - this set is shared, we can implement it with an Agent: + this set is shared, we can implement it with an agent: defmodule Mix.TasksServer do def start_link do - Agent.start_link(fn -> HashSet.new end, name: __MODULE__) + Agent.start_link(fn -> MapSet.new end, name: __MODULE__) end @doc "Checks if the task has already executed" @@ -31,15 +31,22 @@ defmodule Agent do @doc "Marks a task as executed" def put_task(task, project) do item = {task, project} - Agent.update(__MODULE__, &Set.put(&1, item)) + Agent.update(__MODULE__, &MapSet.put(&1, item)) + end + + @doc "Resets the executed tasks and returns the previous list of tasks" + def take_all() do + Agent.get_and_update(__MODULE__, fn set -> + {Enum.into(set, []), MapSet.new} + end) end end Note that agents still provide a segregation between the client and server APIs, as seen in GenServers. In particular, - all code inside the function passed to the agent is executed + all code inside the function passed to `Agent` functions is executed by the agent. This distinction is important because you may - want to avoid expensive operations inside the agent, as it will + want to avoid expensive operations inside the agent, as they will effectively block the agent until the request is fulfilled. Consider these two examples: @@ -54,48 +61,53 @@ defmodule Agent do Agent.get(agent, &(&1)) |> do_something_expensive() end - The first one blocks the agent while the second one copies - all the state to the client and executes the operation in the client. - The trade-off here is exactly if the data is small enough to be - sent to the client cheaply or large enough to require processing on - the server (or at least some initial processing). + The first function blocks the agent. The second function copies all the state + to the client and then executes the operation in the client. One difference is + whether the data is large enough to require processing in the server, at least + initially, or small enough to be sent to the client cheaply. Another + difference is whether the data needs to be processed atomically: getting the + state and calling `do_something_expensive(state)` outside of the agent means + that the agent's state can be updated in the meantime, so putting the state + back in the agent afterwards may override the updated that happened while + processing. - ## Name Registration + ## Name registration - An Agent is bound to the same name registration rules as GenServers. - Read more about it in the `GenServer` docs. + An agent is bound to the same name registration rules as GenServers. + Read more about it in the `GenServer` documentation. ## A word on distributed agents It is important to consider the limitations of distributed agents. Agents - work by sending anonymous functions between the caller and the agent. - In a distributed setup with multiple nodes, agents only work if the caller - (client) and the agent have the same version of a given module. - - This setup may exhibit issues when doing "rolling upgrades". By rolling - upgrades we mean the following situation: you wish to deploy a new version of - your software by *shutting down* some of your nodes and replacing them with - nodes running a new version of the software. In this setup, part of your - environment will have one version of a given module and the other part - another version (the newer one) of the same module; this may cause agents to - crash. That said, if you plan to run in distributed environments, agents - should likely be avoided. - - Note, however, that agents work fine if you want to perform hot code - swapping, as it keeps both the old and new versions of a given module. - We detail how to do hot code swapping with agents in the next section. + provide two APIs, one that works with anonymous functions and another + that expects an explicit module, function, and arguments. + + In a distributed setup with multiple nodes, the API that accepts anonymous + functions only works if the caller (client) and the agent have the same + version of the caller module. + + Keep in mind this issue also shows up when performing "rolling upgrades" + with agents. By rolling upgrades we mean the following situation: you wish + to deploy a new version of your software by *shutting down* some of your + nodes and replacing them with nodes running a new version of the software. + In this setup, part of your environment will have one version of a given + module and the other part another version (the newer one) of the same module. + + The best solution is to simply use the explicit module, function, and arguments + APIs when working with distributed agents. ## Hot code swapping An agent can have its code hot swapped live by simply passing a module, - function and args tuple to the update instruction. For example, imagine + function, and arguments tuple to the update instruction. For example, imagine you have an agent named `:sample` and you want to convert its inner state - from some dict structure to a map. It can be done with the following + from a keyword list to a map. It can be done with the following instruction: {:update, :sample, {:advanced, {Enum, :into, [%{}]}}} - The agent's state will be added to the given list as the first argument. + The agent's state will be added to the given list of arguments (`[%{}]`) as + the first argument. """ @typedoc "Return values of `start*` functions" @@ -111,12 +123,12 @@ defmodule Agent do @type state :: term @doc """ - Starts an agent linked to the current process. + Starts an agent linked to the current process with the given function. This is often used to start the agent as part of a supervision tree. - Once the agent is spawned, the given function is invoked and its return - value is used as the agent state. Note that `start_link` does not return + Once the agent is spawned, the given function `fun` is invoked and its return + value is used as the agent state. Note that `start_link/2` does not return until the given function has returned. ## Options @@ -137,22 +149,51 @@ defmodule Agent do ## Return values If the server is successfully created and initialized, the function returns - `{:ok, pid}`, where `pid` is the pid of the server. If there already exists - an agent with the specified name, the function returns - `{:error, {:already_started, pid}}` with the pid of that process. + `{:ok, pid}`, where `pid` is the PID of the server. If an agent with the + specified name already exists, the function returns + `{:error, {:already_started, pid}}` with the PID of that process. + + If the given function callback fails, the function returns `{:error, reason}`. + + ## Examples + + iex> {:ok, pid} = Agent.start_link(fn -> 42 end) + iex> Agent.get(pid, fn state -> state end) + 42 + + iex> {:error, {exception, _stacktrace}} = Agent.start(fn -> raise "oops" end) + iex> exception + %RuntimeError{message: "oops"} - If the given function callback fails with `reason`, the function returns - `{:error, reason}`. """ @spec start_link((() -> term), GenServer.options) :: on_start def start_link(fun, options \\ []) when is_function(fun, 0) do GenServer.start_link(Agent.Server, fun, options) end + @doc """ + Starts an agent linked to the current process. + + Same as `start_link/2` but a module, function, and arguments are expected + instead of an anonymous function; `fun` in `module` will be called with the + given arguments `args` to initialize the state. + """ + @spec start_link(module, atom, [any], GenServer.options) :: on_start + def start_link(module, fun, args, options \\ []) do + GenServer.start_link(Agent.Server, {module, fun, args}, options) + end + @doc """ Starts an agent process without links (outside of a supervision tree). See `start_link/2` for more information. + + ## Examples + + iex> {:ok, pid} = Agent.start(fn -> 42 end) + iex> Agent.get(pid, fn(state) -> state end) + 42 + """ @spec start((() -> term), GenServer.options) :: on_start def start(fun, options \\ []) when is_function(fun, 0) do @@ -160,13 +201,34 @@ defmodule Agent do end @doc """ - Gets the agent value and executes the given function. + Starts an agent without links with the given module, function, and arguments. + + See `start_link/4` for more information. + """ + @spec start(module, atom, [any], GenServer.options) :: on_start + def start(module, fun, args, options \\ []) do + GenServer.start(Agent.Server, {module, fun, args}, options) + end + + @doc """ + Gets an agent value via the given anonymous function. The function `fun` is sent to the `agent` which invokes the function passing the agent state. The result of the function invocation is - returned. + returned from this function. + + `timeout` is an integer greater than zero which specifies how many + milliseconds are allowed before the agent executes the function and returns + the result value, or the atom `:infinity` to wait indefinitely. If no result + is received within the specified time, the function call fails and the caller + exits. + + ## Examples + + iex> {:ok, pid} = Agent.start_link(fn -> 42 end) + iex> Agent.get(pid, fn state -> state end) + 42 - A timeout can also be specified (it has a default value of 5000). """ @spec get(agent, (state -> a), timeout) :: a when a: var def get(agent, fun, timeout \\ 5000) when is_function(fun, 1) do @@ -174,14 +236,40 @@ defmodule Agent do end @doc """ - Gets and updates the agent state in one operation. + Gets an agent value via the given function. + + Same as `get/3` but a module, function, and arguments are expected + instead of an anonymous function. The state is added as first + argument to the given list of arguments. + """ + @spec get(agent, module, atom, [term], timeout) :: any + def get(agent, module, fun, args, timeout \\ 5000) do + GenServer.call(agent, {:get, {module, fun, args}}, timeout) + end + + @doc """ + Gets and updates the agent state in one operation via the given anonymous + function. The function `fun` is sent to the `agent` which invokes the function passing the agent state. The function must return a tuple with two - elements, the first being the value to return (i.e. the `get` value) - and the second one is the new state. + elements, the first being the value to return (that is, the "get" value) + and the second one being the new state of the agent. + + `timeout` is an integer greater than zero which specifies how many + milliseconds are allowed before the agent executes the function and returns + the result value, or the atom `:infinity` to wait indefinitely. If no result + is received within the specified time, the function call fails and the caller + exits. + + ## Examples + + iex> {:ok, pid} = Agent.start_link(fn -> 42 end) + iex> Agent.get_and_update(pid, fn state -> {state, state + 1} end) + 42 + iex> Agent.get(pid, fn state -> state end) + 43 - A timeout can also be specified (it has a default value of 5000). """ @spec get_and_update(agent, (state -> {a, state}), timeout) :: a when a: var def get_and_update(agent, fun, timeout \\ 5000) when is_function(fun, 1) do @@ -189,40 +277,105 @@ defmodule Agent do end @doc """ - Updates the agent state. + Gets and updates the agent state in one operation via the given function. + + Same as `get_and_update/3` but a module, function, and arguments are expected + instead of an anonymous function. The state is added as first + argument to the given list of arguments. + """ + @spec get_and_update(agent, module, atom, [term], timeout) :: any + def get_and_update(agent, module, fun, args, timeout \\ 5000) do + GenServer.call(agent, {:get_and_update, {module, fun, args}}, timeout) + end + + @doc """ + Updates the agent state via the given anonymous function. The function `fun` is sent to the `agent` which invokes the function - passing the agent state. The function must return the new state. + passing the agent state. The return value of `fun` becomes the new + state of the agent. - A timeout can also be specified (it has a default value of 5000). This function always returns `:ok`. + + `timeout` is an integer greater than zero which specifies how many + milliseconds are allowed before the agent executes the function and returns + the result value, or the atom `:infinity` to wait indefinitely. If no result + is received within the specified time, the function call fails and the caller + exits. + + ## Examples + + iex> {:ok, pid} = Agent.start_link(fn -> 42 end) + iex> Agent.update(pid, fn state -> state + 1 end) + :ok + iex> Agent.get(pid, fn state -> state end) + 43 + """ - @spec update(agent, (state -> state)) :: :ok + @spec update(agent, (state -> state), timeout) :: :ok def update(agent, fun, timeout \\ 5000) when is_function(fun, 1) do GenServer.call(agent, {:update, fun}, timeout) end @doc """ - Performs a cast (fire and forget) operation on the agent state. + Updates the agent state via the given function. + + Same as `update/3` but a module, function, and arguments are expected + instead of an anonymous function. The state is added as first + argument to the given list of arguments. + """ + @spec update(agent, module, atom, [term], timeout) :: :ok + def update(agent, module, fun, args, timeout \\ 5000) do + GenServer.call(agent, {:update, {module, fun, args}}, timeout) + end + + @doc """ + Performs a cast (*fire and forget*) operation on the agent state. The function `fun` is sent to the `agent` which invokes the function - passing the agent state. The function must return the new state. + passing the agent state. The return value of `fun` becomes the new + state of the agent. - Note that `cast` returns `:ok` immediately, regardless of whether the - destination node or agent exists. + Note that `cast` returns `:ok` immediately, regardless of whether `agent` (or + the node it should live on) exists. """ @spec cast(agent, (state -> state)) :: :ok def cast(agent, fun) when is_function(fun, 1) do - GenServer.cast(agent, fun) + GenServer.cast(agent, {:cast, fun}) + end + + @doc """ + Performs a cast (*fire and forget*) operation on the agent state. + + Same as `cast/2` but a module, function, and arguments are expected + instead of an anonymous function. The state is added as first + argument to the given list of arguments. + """ + @spec cast(agent, module, atom, [term]) :: :ok + def cast(agent, module, fun, args) do + GenServer.cast(agent, {:cast, {module, fun, args}}) end @doc """ - Stops the agent. + Synchronously stops the agent with the given `reason`. + + It returns `:ok` if the agent terminates with the given + reason. If the agent terminates with another reason, the call will + exit. + + This function keeps OTP semantics regarding error reporting. + If the reason is any other than `:normal`, `:shutdown` or + `{:shutdown, _}`, an error report will be logged. + + ## Examples + + iex> {:ok, pid} = Agent.start_link(fn -> 42 end) + iex> Agent.stop(pid) + :ok - Returns `:ok` if the agent is stopped within the given `timeout`. """ - @spec stop(agent, timeout) :: :ok - def stop(agent, timeout \\ 5000) do - GenServer.call(agent, :stop, timeout) + @spec stop(agent, reason :: term, timeout) :: :ok + def stop(agent, reason \\ :normal, timeout \\ :infinity) do + GenServer.stop(agent, reason, timeout) end end diff --git a/lib/elixir/lib/agent/server.ex b/lib/elixir/lib/agent/server.ex index 0b06e4cddbe..c432bfed8ab 100644 --- a/lib/elixir/lib/agent/server.ex +++ b/lib/elixir/lib/agent/server.ex @@ -4,50 +4,56 @@ defmodule Agent.Server do use GenServer def init(fun) do - {:ok, fun.()} + _ = initial_call(fun) + {:ok, run(fun, [])} end def handle_call({:get, fun}, _from, state) do - {:reply, fun.(state), state} + {:reply, run(fun, [state]), state} end def handle_call({:get_and_update, fun}, _from, state) do - {reply, state} = fun.(state) - {:reply, reply, state} + case run(fun, [state]) do + {reply, state} -> {:reply, reply, state} + other -> {:stop, {:bad_return_value, other}, state} + end end def handle_call({:update, fun}, _from, state) do - {:reply, :ok, fun.(state)} - end - - def handle_call(:stop, _from, state) do - {:stop, :normal, :ok, state} + {:reply, :ok, run(fun, [state])} end def handle_call(msg, from, state) do super(msg, from, state) end - def handle_cast(fun, state) when is_function(fun, 1) do - {:noreply, fun.(state)} + def handle_cast({:cast, fun}, state) do + {:noreply, run(fun, [state])} end def handle_cast(msg, state) do super(msg, state) end - def code_change(_old, state, { m, f, a }) do - {:ok, apply(m, f, [state|a])} + def code_change(_old, state, fun) do + {:ok, run(fun, [state])} end - def terminate(_reason, _state) do - # There is a race condition if the agent is - # restarted too fast and it is registered. - try do - self |> :erlang.process_info(:registered_name) |> elem(1) |> Process.unregister - rescue - _ -> :ok - end + defp initial_call(mfa) do + _ = Process.put(:"$initial_call", get_initial_call(mfa)) :ok end + + defp get_initial_call(fun) when is_function(fun, 0) do + {:module, module} = :erlang.fun_info(fun, :module) + {:name, name} = :erlang.fun_info(fun, :name) + {module, name, 0} + end + + defp get_initial_call({mod, fun, args}) do + {mod, fun, length(args)} + end + + defp run({m, f, a}, extra), do: apply(m, f, extra ++ a) + defp run(fun, extra), do: apply(fun, extra) end diff --git a/lib/elixir/lib/application.ex b/lib/elixir/lib/application.ex index 691704e0258..51b6fc5c655 100644 --- a/lib/elixir/lib/application.ex +++ b/lib/elixir/lib/application.ex @@ -4,29 +4,29 @@ defmodule Application do In Elixir (actually, in Erlang/OTP), an application is a component implementing some specific functionality, that can be started and stopped - as a unit, and which can be re-used in other systems as well. + as a unit, and which can be re-used in other systems. Applications are defined with an application file named `APP.app` where - `APP` is the APP name, usually in `underscore_case` convention. The - application file must reside in the same `ebin` directory as the - application's modules bytecode. + `APP` is the application name, usually in `underscore_case`. The application + file must reside in the same `ebin` directory as the compiled modules of the + application. In Elixir, Mix is responsible for compiling your source code and generating your application `.app` file. Furthermore, Mix is also responsible for configuring, starting and stopping your application and its dependencies. For this reason, this documentation will focus - on the remaining aspects of your application: the application environment, + on the remaining aspects of your application: the application environment and the application callback module. - You can learn more about Mix compilation of `.app` files by typing + You can learn more about Mix generation of `.app` files by typing `mix help compile.app`. ## Application environment Once an application is started, OTP provides an application environment - that can be used to configure applications. + that can be used to configure the application. - Assuming you are inside a Mix project, you can edit your application + Assuming you are inside a Mix project, you can edit the `application/0` function in the `mix.exs` file to the following: def application do @@ -38,18 +38,15 @@ defmodule Application do can access the default value: Application.get_env(:APP_NAME, :hello) - #=> {:ok, :hello} + #=> :world It is also possible to put and delete values from the application value, including new values that are not defined in the environment file (although - those should be avoided). - - In the future, we plan to support configuration files which allows - developers to configure the environment of their dependencies. + this should be avoided). Keep in mind that each application is responsible for its environment. - Do not use the functions in this module for directly access or modify - the environment of other application (as it may lead to inconsistent + Do not use the functions in this module for directly accessing or modifying + the environment of other applications (as it may lead to inconsistent data in the application environment). ## Application module callback @@ -64,8 +61,8 @@ defmodule Application do end Our application now requires the `MyApp` module to provide an application - callback. This can be done by invoking `use Application` in that module - and defining a `start/2` callback, for example: + callback. This can be done by invoking `use Application` in that module and + defining a `start/2` callback, for example: defmodule MyApp do use Application @@ -75,46 +72,174 @@ defmodule Application do end end - `start/2` most commonly returns `{:ok, pid}` or `{:ok, pid, state}` where - `pid` identifies the supervision tree and the state is the application state. - `args` is second element of the tuple given to the `:mod` option. + `start/2` typically returns `{:ok, pid}` or `{:ok, pid, state}` where + `pid` identifies the supervision tree and `state` is the application state. + `args` is the second element of the tuple given to the `:mod` option. - The `type` passed into `start/2` is usually `:normal` unless in a distributed - setup where applications takeover and failovers are configured. This particular - aspect of applications can be read with more detail in the OTP documentation: + The `type` argument passed to `start/2` is usually `:normal` unless in a + distributed setup where application takeovers and failovers are configured. + This particular aspect of applications is explained in more detail in the + OTP documentation: - * http://www.erlang.org/doc/man/application.html - * http://www.erlang.org/doc/design_principles/applications.html + * [`:application` module](http://www.erlang.org/doc/man/application.html) + * [Applications – OTP Design Principles](http://www.erlang.org/doc/design_principles/applications.html) A developer may also implement the `stop/1` callback (automatically defined by `use Application`) which does any application cleanup. It receives the - application state and can return any value. Notice that shutting down the - supervisor is automatically handled by the VM; + application state and can return any value. Note that shutting down the + supervisor is automatically handled by the VM. + + An application without a supervision tree doesn't define an application + module callback in the application definition in `mix.exs` file. Even though + there is no module with application callbacks such as `start/2` and + `stop/1`, the application can be started and stopped the same way as an + application with a supervision tree. + """ + + @doc """ + Called when an application is started. + + This function is called when an application is started using + `Application.start/2` (and functions on top of that, such as + `Application.ensure_started/2`). This function should start the top-level + process of the application (which should be the top supervisor of the + application's supervision tree if the application follows the OTP design + principles around supervision). + + `start_type` defines how the application is started: + + * `:normal` - used if the startup is a normal startup or if the application + is distributed and is started on the current node because of a failover + from another node and the application specification key `:start_phases` + is `:undefined`. + * `{:takeover, node}` - used if the application is distributed and is + started on the current node because of a failover on the node `node`. + * `{:failover, node}` - used if the application is distributed and is + started on the current node because of a failover on node `node`, and the + application specification key `:start_phases` is not `:undefined`. + + `start_args` are the arguments passed to the application in the `:mod` + specification key (e.g., `mod: {MyApp, [:my_args]}`). + + This function should either return `{:ok, pid}` or `{:ok, pid, state}` if + startup is successful. `pid` should be the PID of the top supervisor. `state` + can be an arbitrary term, and if omitted will default to `[]`; if the + application is later stopped, `state` is passed to the `stop/1` callback (see + the documentation for the `c:stop/1` callback for more information). + + `use Application` provides no default implementation for the `start/2` + callback. """ + @callback start(start_type, start_args :: term) :: + {:ok, pid} | + {:ok, pid, state} | + {:error, reason :: term} + + @doc """ + Called when an application is stopped. + + This function is called when an application has stopped, i.e., when its + supervision tree has been stopped. It should do the opposite of what the + `start/2` callback did, and should perform any necessary cleanup. The return + value of this callback is ignored. + + `state` is the return value of the `start/2` callback or the return value of + the `prep_stop/1` function if the application module defines such a function. + + `use Application` defines a default implementation of this function which does + nothing and just returns `:ok`. + """ + @callback stop(state) :: term + + @doc """ + Start an application in synchronous phases. + + This function is called after `start/2` finishes but before + `Application.start/2` returns. It will be called once for every start phase + defined in the application's (and any included applications') specification, + in the order they are listed in. + """ + @callback start_phase(phase :: term, start_type, phase_args :: term) :: + :ok | + {:error, reason :: term} + + @optional_callbacks start_phase: 3 @doc false defmacro __using__(_) do quote location: :keep do - @behaviour :application + @behaviour Application @doc false def stop(_state) do :ok end - defoverridable [stop: 1] + defoverridable Application end end @type app :: atom @type key :: atom @type value :: term + @type state :: term @type start_type :: :permanent | :transient | :temporary + @application_keys [:description, :id, :vsn, :modules, :maxP, :maxT, :registered, + :included_applications, :applications, :mod, :start_phases] + + @doc """ + Returns the spec for `app`. + + The following keys are returned: + + * #{Enum.map_join @application_keys, "\n * ", &inspect/1} + + Note the environment is not returned as it can be accessed via + `fetch_env/2`. Returns `nil` if the application is not loaded. + """ + @spec spec(app) :: [{key, value}] | nil + def spec(app) do + case :application.get_all_key(app) do + {:ok, info} -> :lists.keydelete(:env, 1, info) + :undefined -> nil + end + end + + @doc """ + Returns the value for `key` in `app`'s specification. + + See `spec/1` for the supported keys. If the given + specification parameter does not exist, this function + will raise. Returns `nil` if the application is not loaded. + """ + @spec spec(app, key) :: value | nil + def spec(app, key) when key in @application_keys do + case :application.get_key(app, key) do + {:ok, value} -> value + :undefined -> nil + end + end + + @doc """ + Gets the application for the given module. + + The application is located by analyzing the spec + of all loaded applications. Returns `nil` if + the module is not listed in any application spec. + """ + @spec get_application(atom) :: atom | nil + def get_application(module) when is_atom(module) do + case :application.get_application(module) do + {:ok, app} -> app + :undefined -> nil + end + end + @doc """ Returns all key-value pairs for `app`. """ - @spec get_all_env(app) :: [{key,value}] + @spec get_all_env(app) :: [{key, value}] def get_all_env(app) do :application.get_all_env(app) end @@ -122,22 +247,18 @@ defmodule Application do @doc """ Returns the value for `key` in `app`'s environment. - If the specified application is not loaded, or the configuration parameter - does not exist, the function returns the `default` value. + If the configuration parameter does not exist, the function returns the + `default` value. """ @spec get_env(app, key, value) :: value def get_env(app, key, default \\ nil) do - case :application.get_env(app, key) do - {:ok, value} -> value - :undefined -> default - end + :application.get_env(app, key, default) end @doc """ Returns the value for `key` in `app`'s environment in a tuple. - If the specified application is not loaded, or the configuration parameter - does not exist, the function returns `:error`. + If the configuration parameter does not exist, the function returns `:error`. """ @spec fetch_env(app, key) :: {:ok, value} | :error def fetch_env(app, key) do @@ -147,19 +268,35 @@ defmodule Application do end end + @doc """ + Returns the value for `key` in `app`'s environment. + + If the configuration parameter does not exist, raises `ArgumentError`. + """ + @spec fetch_env!(app, key) :: value | no_return + def fetch_env!(app, key) do + case fetch_env(app, key) do + {:ok, value} -> value + :error -> + raise ArgumentError, + "application #{inspect app} is not loaded, " <> + "or the configuration parameter #{inspect key} is not set" + end + end + @doc """ Puts the `value` in `key` for the given `app`. ## Options - * `:timeout` - the timeout for the change (defaults to 5000ms) + * `:timeout` - the timeout for the change (defaults to `5_000` milliseconds) * `:persistent` - persists the given value on application load and reloads If `put_env/4` is called before the application is loaded, the application environment values specified in the `.app` file will override the ones previously set. - The persistent option can be set to true when there is a need to guarantee + The persistent option can be set to `true` when there is a need to guarantee parameters set with this function will not be overridden by the ones defined in the application resource file on load. This means persistent values will stick after the application is loaded and also on application reload. @@ -201,7 +338,7 @@ defmodule Application do `:applications` in the `.app` file in case they were not previously started. """ - @spec ensure_all_started(app, start_type) :: {:ok, [app]} | {:error, term} + @spec ensure_all_started(app, start_type) :: {:ok, [app]} | {:error, {app, term}} def ensure_all_started(app, type \\ :temporary) when is_atom(app) do :application.ensure_all_started(app, type) end @@ -217,7 +354,7 @@ defmodule Application do started before this application is. If not, `{:error, {:not_started, app}}` is returned, where `app` is the name of the missing application. - In case you want to automatically load **and start** all of `app`'s dependencies, + In case you want to automatically load **and start** all of `app`'s dependencies, see `ensure_all_started/2`. The `type` argument specifies the type of the application: @@ -302,7 +439,7 @@ defmodule Application do #=> "bar-123" For more information on code paths, check the `Code` module in - Elixir and also Erlang's `:code` module. + Elixir and also Erlang's [`:code` module](http://www.erlang.org/doc/man/code.html). """ @spec app_dir(app) :: String.t def app_dir(app) when is_atom(app) do @@ -315,22 +452,41 @@ defmodule Application do @doc """ Returns the given path inside `app_dir/1`. """ - @spec app_dir(app, String.t) :: String.t + @spec app_dir(app, String.t | [String.t]) :: String.t def app_dir(app, path) when is_binary(path) do Path.join(app_dir(app), path) end + def app_dir(app, path) when is_list(path) do + Path.join([app_dir(app) | path]) + end + + @doc """ + Returns a list with information about the applications which are currently running. + """ + @spec started_applications(timeout) :: [tuple] + def started_applications(timeout \\ 5000) do + :application.which_applications(timeout) + end + + @doc """ + Returns a list with information about the applications which have been loaded. + """ + @spec loaded_applications :: [tuple] + def loaded_applications do + :application.loaded_applications + end @doc """ Formats the error reason returned by `start/2`, - `ensure_started/2, `stop/1`, `load/1` and `unload/1`, + `ensure_started/2`, `stop/1`, `load/1` and `unload/1`, returns a string. """ @spec format_error(any) :: String.t def format_error(reason) do try do - impl_format_error(reason) + do_format_error(reason) catch - # A user could create an error that looks like a builtin one + # A user could create an error that looks like a built-in one # causing an error. :error, _ -> inspect(reason) @@ -338,68 +494,68 @@ defmodule Application do end # exit(:normal) call is special cased, undo the special case. - defp impl_format_error({{:EXIT, :normal}, {mod, :start, args}}) do + defp do_format_error({{:EXIT, :normal}, {mod, :start, args}}) do Exception.format_exit({:normal, {mod, :start, args}}) end # {:error, reason} return value - defp impl_format_error({reason, {mod, :start, args}}) do + defp do_format_error({reason, {mod, :start, args}}) do Exception.format_mfa(mod, :start, args) <> " returned an error: " <> Exception.format_exit(reason) end # error or exit(reason) call, use exit reason as reason. - defp impl_format_error({:bad_return, {{mod, :start, args}, {:EXIT, reason}}}) do + defp do_format_error({:bad_return, {{mod, :start, args}, {:EXIT, reason}}}) do Exception.format_exit({reason, {mod, :start, args}}) end # bad return value - defp impl_format_error({:bad_return, {{mod, :start, args}, return}}) do + defp do_format_error({:bad_return, {{mod, :start, args}, return}}) do Exception.format_mfa(mod, :start, args) <> " returned a bad value: " <> inspect(return) end - defp impl_format_error({:already_started, app}) when is_atom(app) do + defp do_format_error({:already_started, app}) when is_atom(app) do "already started application #{app}" end - defp impl_format_error({:not_started, app}) when is_atom(app) do + defp do_format_error({:not_started, app}) when is_atom(app) do "not started application #{app}" end - defp impl_format_error({:bad_application, app}) do + defp do_format_error({:bad_application, app}) do "bad application: #{inspect(app)}" end - defp impl_format_error({:already_loaded, app}) when is_atom(app) do + defp do_format_error({:already_loaded, app}) when is_atom(app) do "already loaded application #{app}" end - defp impl_format_error({:not_loaded, app}) when is_atom(app) do + defp do_format_error({:not_loaded, app}) when is_atom(app) do "not loaded application #{app}" end - defp impl_format_error({:invalid_restart_type, restart}) do + defp do_format_error({:invalid_restart_type, restart}) do "invalid application restart type: #{inspect(restart)}" end - defp impl_format_error({:invalid_name, name}) do + defp do_format_error({:invalid_name, name}) do "invalid application name: #{inspect(name)}" end - defp impl_format_error({:invalid_options, opts}) do + defp do_format_error({:invalid_options, opts}) do "invalid application options: #{inspect(opts)}" end - defp impl_format_error({:badstartspec, spec}) do + defp do_format_error({:badstartspec, spec}) do "bad application start specs: #{inspect(spec)}" end - defp impl_format_error({'no such file or directory', file}) do + defp do_format_error({'no such file or directory', file}) do "could not find application file: #{file}" end - defp impl_format_error(reason) do + defp do_format_error(reason) do Exception.format_exit(reason) end end diff --git a/lib/elixir/lib/atom.ex b/lib/elixir/lib/atom.ex index 36e62f9f416..82512c030a3 100644 --- a/lib/elixir/lib/atom.ex +++ b/lib/elixir/lib/atom.ex @@ -1,12 +1,20 @@ defmodule Atom do - @doc """ + @moduledoc """ Convenience functions for working with atoms. + + See also `Kernel.is_atom/1`. """ @doc """ - Converts an atom to string. + Converts an atom to a string. Inlined by the compiler. + + ## Examples + + iex> Atom.to_string(:foo) + "foo" + """ @spec to_string(atom) :: String.t def to_string(atom) do @@ -14,12 +22,24 @@ defmodule Atom do end @doc """ - Converts an atom to a char list. + Converts an atom to a charlist. Inlined by the compiler. + + ## Examples + + iex> Atom.to_charlist(:"An atom") + 'An atom' + """ - @spec to_char_list(atom) :: char_list - def to_char_list(atom) do + @spec to_charlist(atom) :: charlist + def to_charlist(atom) do :erlang.atom_to_list(atom) end + + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + @doc false + @spec to_char_list(atom) :: charlist + def to_char_list(atom), do: Atom.to_charlist(atom) end diff --git a/lib/elixir/lib/base.ex b/lib/elixir/lib/base.ex index ff01c991e73..6fe1fdfa4bf 100644 --- a/lib/elixir/lib/base.ex +++ b/lib/elixir/lib/base.ex @@ -3,7 +3,7 @@ defmodule Base do @moduledoc """ This module provides data encoding and decoding functions - according to [RFC 4648](http://tools.ietf.org/html/rfc4648). + according to [RFC 4648](https://tools.ietf.org/html/rfc4648). This document defines the commonly used base 16, base 32, and base 64 encoding schemes. @@ -98,54 +98,68 @@ defmodule Base do b32_alphabet = Enum.with_index 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567' b32hex_alphabet = Enum.with_index '0123456789ABCDEFGHIJKLMNOPQRSTUV' - Enum.each [ {:enc16, :dec16, b16_alphabet}, - {:enc64, :dec64, b64_alphabet}, - {:enc32, :dec32, b32_alphabet}, - {:enc64url, :dec64url, b64url_alphabet}, - {:enc32hex, :dec32hex, b32hex_alphabet} ], fn({enc, dec, alphabet}) -> + Enum.each [{:enc16, :dec16, b16_alphabet}, + {:enc32, :dec32, b32_alphabet}, + {:enc64, :dec64, b64_alphabet}, + {:enc64url, :dec64url, b64url_alphabet}, + {:enc32hex, :dec32hex, b32hex_alphabet}], fn({enc, dec, alphabet}) -> for {encoding, value} <- alphabet do defp unquote(enc)(unquote(value)), do: unquote(encoding) defp unquote(dec)(unquote(encoding)), do: unquote(value) end defp unquote(dec)(c) do - raise ArgumentError, "non-alphabet digit found: #{<>}" + raise ArgumentError, "non-alphabet digit found: #{inspect <>, binaries: :as_strings} (byte #{c})" end end - defp encode_case(:upper, func), - do: func - defp encode_case(:lower, func), - do: &to_lower(func.(&1)) - - defp decode_case(:upper, func), - do: func - defp decode_case(:lower, func), - do: &func.(from_lower(&1)) - defp decode_case(:mixed, func), - do: &func.(from_mixed(&1)) + @compile {:inline, from_upper: 1, from_lower: 1, from_mixed: 1, + to_lower: 1, to_upper: 1, enc16: 1, dec16: 1, + enc32: 1, dec32: 1, enc32hex: 1, dec32hex: 1, + enc64: 1, dec64: 1, enc64url: 1, dec64url: 1} defp to_lower(char) when char in ?A..?Z, do: char + (?a - ?A) defp to_lower(char), do: char + defp to_upper(char), do: char + + defp from_upper(char), do: char + defp from_lower(char) when char in ?a..?z, do: char - (?a - ?A) - defp from_lower(char) when not char in ?A..?Z, + defp from_lower(char) when char not in ?A..?Z, do: char defp from_lower(char), - do: raise(ArgumentError, "non-alphabet digit found: #{<>}") + do: raise(ArgumentError, "non-alphabet digit found: \"#{<>}\" (byte #{char})") defp from_mixed(char) when char in ?a..?z, do: char - (?a - ?A) defp from_mixed(char), do: char + defp maybe_pad(subject, false, _, _), + do: subject + defp maybe_pad(subject, _, group_size, pad) do + case rem(byte_size(subject), group_size) do + 0 -> subject + x -> subject <> String.duplicate(pad, group_size - x) + end + end + @doc """ Encodes a binary string into a base 16 encoded string. - Accepts an atom `:upper` (default) for encoding to upper case characters or - `:lower` for lower case characters. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to use when encoding + + The values for `:case` can be: + + * `:upper` - uses upper case characters (default) + * `:lower` - uses lower case characters ## Examples @@ -156,20 +170,26 @@ defmodule Base do "666f6f626172" """ - @spec encode16(binary) :: binary @spec encode16(binary, Keyword.t) :: binary def encode16(data, opts \\ []) when is_binary(data) do case = Keyword.get(opts, :case, :upper) - do_encode16(data, encode_case(case, &enc16/1)) + do_encode16(case, data) end - @doc """ Decodes a base 16 encoded string into a binary string. - Accepts an atom `:upper` (default) for decoding from upper case characters or - `:lower` for lower case characters. `:mixed` can be given for mixed case - characters. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to accept when decoding + + The values for `:case` can be: + + * `:upper` - only allows upper case characters (default) + * `:lower` - only allows lower case characters + * `:mixed` - allows mixed case characters ## Examples @@ -183,11 +203,9 @@ defmodule Base do {:ok, "foobar"} """ - @spec decode16(binary) :: {:ok, binary} | :error @spec decode16(binary, Keyword.t) :: {:ok, binary} | :error - def decode16(string, opts \\ []) when is_binary(string) do - case = Keyword.get(opts, :case, :upper) - {:ok, do_decode16(string, decode_case(case, &dec16/1))} + def decode16(string, opts \\ []) do + {:ok, decode16!(string, opts)} rescue ArgumentError -> :error end @@ -195,9 +213,17 @@ defmodule Base do @doc """ Decodes a base 16 encoded string into a binary string. - Accepts an atom `:upper` (default) for decoding from upper case characters or - `:lower` for lower case characters. `:mixed` can be given for mixed case - characters. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to accept when decoding + + The values for `:case` can be: + + * `:upper` - only allows upper case characters (default) + * `:lower` - only allows lower case characters + * `:mixed` - allows mixed case characters An `ArgumentError` exception is raised if the padding is incorrect or a non-alphabet character is present in the string. @@ -214,39 +240,69 @@ defmodule Base do "foobar" """ - @spec decode16!(binary) :: binary @spec decode16!(binary, Keyword.t) :: binary - def decode16!(string, opts \\ []) when is_binary(string) do + def decode16!(string, opts \\ []) + + def decode16!(string, opts) when is_binary(string) and rem(byte_size(string), 2) == 0 do case = Keyword.get(opts, :case, :upper) - do_decode16(string, decode_case(case, &dec16/1)) + do_decode16(case, string) + end + + def decode16!(string, _opts) when is_binary(string) do + raise ArgumentError, "odd-length string" end @doc """ Encodes a binary string into a base 64 encoded string. + Accepts `padding: false` option which will omit padding from + the output string. + ## Examples iex> Base.encode64("foobar") "Zm9vYmFy" + iex> Base.encode64("foob") + "Zm9vYg==" + + iex> Base.encode64("foob", padding: false) + "Zm9vYg" + """ - @spec encode64(binary) :: binary - def encode64(data) when is_binary(data) do - do_encode64(data, &enc64/1) + @spec encode64(binary, Keyword.t) :: binary + def encode64(data, opts \\ []) when is_binary(data) do + pad? = Keyword.get(opts, :padding, true) + do_encode64(data, pad?) end @doc """ Decodes a base 64 encoded string into a binary string. + Accepts `ignore: :whitespace` option which will ignore all the + whitespace characters in the input string. + + Accepts `padding: false` option which will ignore padding from + the input string. + ## Examples iex> Base.decode64("Zm9vYmFy") {:ok, "foobar"} + iex> Base.decode64("Zm9vYmFy\\n", ignore: :whitespace) + {:ok, "foobar"} + + iex> Base.decode64("Zm9vYg==") + {:ok, "foob"} + + iex> Base.decode64("Zm9vYg", padding: false) + {:ok, "foob"} + """ - @spec decode64(binary) :: {:ok, binary} | :error - def decode64(string) when is_binary(string) do - {:ok, do_decode64(string, &dec64/1)} + @spec decode64(binary, Keyword.t) :: {:ok, binary} | :error + def decode64(string, opts \\ []) when is_binary(string) do + {:ok, decode64!(string, opts)} rescue ArgumentError -> :error end @@ -254,7 +310,11 @@ defmodule Base do @doc """ Decodes a base 64 encoded string into a binary string. - The following alphabet is used both for encoding and decoding: + Accepts `ignore: :whitespace` option which will ignore all the + whitespace characters in the input string. + + Accepts `padding: false` option which will ignore padding from + the input string. An `ArgumentError` exception is raised if the padding is incorrect or a non-alphabet character is present in the string. @@ -264,40 +324,69 @@ defmodule Base do iex> Base.decode64!("Zm9vYmFy") "foobar" + iex> Base.decode64!("Zm9vYmFy\\n", ignore: :whitespace) + "foobar" + + iex> Base.decode64!("Zm9vYg==") + "foob" + + iex> Base.decode64!("Zm9vYg", padding: false) + "foob" + """ - @spec decode64!(binary) :: binary - def decode64!(string) when is_binary(string) do - do_decode64(string, &dec64/1) + @spec decode64!(binary, Keyword.t) :: binary + def decode64!(string, opts \\ []) when is_binary(string) do + pad? = Keyword.get(opts, :padding, true) + string |> remove_ignored(opts[:ignore]) |> do_decode64(pad?) end @doc """ Encodes a binary string into a base 64 encoded string with URL and filename safe alphabet. + Accepts `padding: false` option which will omit padding from + the output string. + ## Examples - iex> Base.url_encode64(<<255,127,254,252>>) + iex> Base.url_encode64(<<255, 127, 254, 252>>) "_3_-_A==" + iex> Base.url_encode64(<<255, 127, 254, 252>>, padding: false) + "_3_-_A" + """ - @spec url_encode64(binary) :: binary - def url_encode64(data) when is_binary(data) do - do_encode64(data, &enc64url/1) + @spec url_encode64(binary, Keyword.t) :: binary + def url_encode64(data, opts \\ []) when is_binary(data) do + pad? = Keyword.get(opts, :padding, true) + do_encode64url(https://github.com/data,%20pad?) end @doc """ Decodes a base 64 encoded string with URL and filename safe alphabet into a binary string. + Accepts `ignore: :whitespace` option which will ignore all the + whitespace characters in the input string. + + Accepts `padding: false` option which will ignore padding from + the input string. + ## Examples iex> Base.url_decode64("_3_-_A==") - {:ok, <<255,127,254,252>>} + {:ok, <<255, 127, 254, 252>>} + + iex> Base.url_decode64("_3_-_A==\\n", ignore: :whitespace) + {:ok, <<255, 127, 254, 252>>} + + iex> Base.url_decode64("_3_-_A", padding: false) + {:ok, <<255, 127, 254, 252>>} """ - @spec url_decode64(binary) :: {:ok, binary} | :error - def url_decode64(string) when is_binary(string) do - {:ok, do_decode64(string, &dec64url/1)} + @spec url_decode64(binary, Keyword.t) :: {:ok, binary} | :error + def url_decode64(string, opts \\ []) when is_binary(string) do + {:ok, url_decode64!(string, opts)} rescue ArgumentError -> :error end @@ -306,25 +395,52 @@ defmodule Base do Decodes a base 64 encoded string with URL and filename safe alphabet into a binary string. + Accepts `ignore: :whitespace` option which will ignore all the + whitespace characters in the input string. + + Accepts `padding: false` option which will ignore padding from + the input string. + An `ArgumentError` exception is raised if the padding is incorrect or a non-alphabet character is present in the string. ## Examples iex> Base.url_decode64!("_3_-_A==") - <<255,127,254,252>> + <<255, 127, 254, 252>> + + iex> Base.url_decode64!("_3_-_A==\\n", ignore: :whitespace) + <<255, 127, 254, 252>> + + iex> Base.url_decode64!("_3_-_A", padding: false) + <<255, 127, 254, 252>> """ - @spec url_decode64!(binary) :: binary - def url_decode64!(string) when is_binary(string) do - do_decode64(string, &dec64url/1) + @spec url_decode64!(binary, Keyword.t) :: binary + def url_decode64!(string, opts \\ []) when is_binary(string) do + pad? = Keyword.get(opts, :padding, true) + string |> remove_ignored(opts[:ignore]) |> do_decode64url(https://github.com/pad?) end @doc """ Encodes a binary string into a base 32 encoded string. - Accepts an atom `:upper` (default) for encoding to upper case characters or - `:lower` for lower case characters. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to use when encoding + * `:padding` - specifies whether to apply padding + + The values for `:case` can be: + + * `:upper` - uses upper case characters (default) + * `:lower` - uses lower case characters + + The values for `:padding` can be: + + * `true` - pad the output string to the nearest multiple of 8 (default) + * `false` - omit padding from the output string ## Examples @@ -334,20 +450,37 @@ defmodule Base do iex> Base.encode32("foobar", case: :lower) "mzxw6ytboi======" + iex> Base.encode32("foobar", padding: false) + "MZXW6YTBOI" + """ - @spec encode32(binary) :: binary @spec encode32(binary, Keyword.t) :: binary def encode32(data, opts \\ []) when is_binary(data) do case = Keyword.get(opts, :case, :upper) - do_encode32(data, encode_case(case, &enc32/1)) + pad? = Keyword.get(opts, :padding, true) + do_encode32(case, data, pad?) end @doc """ Decodes a base 32 encoded string into a binary string. - Accepts an atom `:upper` (default) for decoding from upper case characters or - `:lower` for lower case characters. `:mixed` can be given for mixed case - characters. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to accept when decoding + * `:padding` - specifies whether to require padding + + The values for `:case` can be: + + * `:upper` - only allows upper case characters (default) + * `:lower` - only allows lower case characters + * `:mixed` - allows mixed case characters + + The values for `:padding` can be: + + * `true` - requires the input string to be padded to the nearest multiple of 8 (default) + * `false` - ignores padding from the input string ## Examples @@ -360,12 +493,13 @@ defmodule Base do iex> Base.decode32("mzXW6ytBOi======", case: :mixed) {:ok, "foobar"} + iex> Base.decode32("MZXW6YTBOI", padding: false) + {:ok, "foobar"} + """ - @spec decode32(binary) :: {:ok, binary} | :error @spec decode32(binary, Keyword.t) :: {:ok, binary} | :error def decode32(string, opts \\ []) do - case = Keyword.get(opts, :case, :upper) - {:ok, do_decode32(string, decode_case(case, &dec32/1))} + {:ok, decode32!(string, opts)} rescue ArgumentError -> :error end @@ -373,13 +507,27 @@ defmodule Base do @doc """ Decodes a base 32 encoded string into a binary string. - Accepts an atom `:upper` (default) for decoding from upper case characters or - `:lower` for lower case characters. `:mixed` can be given for mixed case - characters. - An `ArgumentError` exception is raised if the padding is incorrect or a non-alphabet character is present in the string. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to accept when decoding + * `:padding` - specifies whether to require padding + + The values for `:case` can be: + + * `:upper` - only allows upper case characters (default) + * `:lower` - only allows lower case characters + * `:mixed` - allows mixed case characters + + The values for `:padding` can be: + + * `true` - requires the input string to be padded to the nearest multiple of 8 (default) + * `false` - ignores padding from the input string + ## Examples iex> Base.decode32!("MZXW6YTBOI======") @@ -391,20 +539,37 @@ defmodule Base do iex> Base.decode32!("mzXW6ytBOi======", case: :mixed) "foobar" + iex> Base.decode32!("MZXW6YTBOI", padding: false) + "foobar" + """ - @spec decode32!(binary) :: binary @spec decode32!(binary, Keyword.t) :: binary - def decode32!(string, opts \\ []) do + def decode32!(string, opts \\ []) when is_binary(string) do case = Keyword.get(opts, :case, :upper) - do_decode32(string, decode_case(case, &dec32/1)) + pad? = Keyword.get(opts, :padding, true) + do_decode32(case, string, pad?) end @doc """ Encodes a binary string into a base 32 encoded string with an extended hexadecimal alphabet. - Accepts an atom `:upper` (default) for encoding to upper case characters or - `:lower` for lower case characters. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to use when encoding + * `:padding` - specifies whether to apply padding + + The values for `:case` can be: + + * `:upper` - uses upper case characters (default) + * `:lower` - uses lower case characters + + The values for `:padding` can be: + + * `true` - pad the output string to the nearest multiple of 8 (default) + * `false` - omit padding from the output string ## Examples @@ -414,21 +579,38 @@ defmodule Base do iex> Base.hex_encode32("foobar", case: :lower) "cpnmuoj1e8======" + iex> Base.hex_encode32("foobar", padding: false) + "CPNMUOJ1E8" + """ - @spec hex_encode32(binary) :: binary @spec hex_encode32(binary, Keyword.t) :: binary def hex_encode32(data, opts \\ []) when is_binary(data) do case = Keyword.get(opts, :case, :upper) - do_encode32(data, encode_case(case, &enc32hex/1)) + pad? = Keyword.get(opts, :padding, true) + do_hex_encode32(case, data, pad?) end @doc """ Decodes a base 32 encoded string with extended hexadecimal alphabet into a binary string. - Accepts an atom `:upper` (default) for decoding from upper case characters or - `:lower` for lower case characters. `:mixed` can be given for mixed case - characters. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to accept when decoding + * `:padding` - specifies whether to require padding + + The values for `:case` can be: + + * `:upper` - only allows upper case characters (default) + * `:lower` - only allows lower case characters + * `:mixed` - allows mixed case characters + + The values for `:padding` can be: + + * `true` - requires the input string to be padded to the nearest multiple of 8 (default) + * `false` - ignores padding from the input string ## Examples @@ -441,12 +623,13 @@ defmodule Base do iex> Base.hex_decode32("cpnMuOJ1E8======", case: :mixed) {:ok, "foobar"} + iex> Base.hex_decode32("CPNMUOJ1E8", padding: false) + {:ok, "foobar"} + """ - @spec hex_decode32(binary) :: {:ok, binary} | :error @spec hex_decode32(binary, Keyword.t) :: {:ok, binary} | :error - def hex_decode32(string, opts \\ []) when is_binary(string) do - case = Keyword.get(opts, :case, :upper) - {:ok, do_decode32(string, decode_case(case, &dec32hex/1))} + def hex_decode32(string, opts \\ []) do + {:ok, hex_decode32!(string, opts)} rescue ArgumentError -> :error end @@ -455,13 +638,27 @@ defmodule Base do Decodes a base 32 encoded string with extended hexadecimal alphabet into a binary string. - Accepts an atom `:upper` (default) for decoding from upper case characters or - `:lower` for lower case characters. `:mixed` can be given for mixed case - characters. - An `ArgumentError` exception is raised if the padding is incorrect or a non-alphabet character is present in the string. + ## Options + + The accepted options are: + + * `:case` - specifies the character case to accept when decoding + * `:padding` - specifies whether to require padding + + The values for `:case` can be: + + * `:upper` - only allows upper case characters (default) + * `:lower` - only allows lower case characters + * `:mixed` - allows mixed case characters + + The values for `:padding` can be: + + * `true` - requires the input string to be padded to the nearest multiple of 8 (default) + * `false` - ignores padding from the input string + ## Examples iex> Base.hex_decode32!("CPNMUOJ1E8======") @@ -473,120 +670,260 @@ defmodule Base do iex> Base.hex_decode32!("cpnMuOJ1E8======", case: :mixed) "foobar" + iex> Base.hex_decode32!("CPNMUOJ1E8", padding: false) + "foobar" + """ - @spec hex_decode32!(binary) :: binary @spec hex_decode32!(binary, Keyword.t) :: binary def hex_decode32!(string, opts \\ []) when is_binary(string) do case = Keyword.get(opts, :case, :upper) - do_decode32(string, decode_case(case, &dec32hex/1)) + pad? = Keyword.get(opts, :padding, true) + do_hex_decode32(case, string, pad?) end - defp do_encode16(<<>>, _), do: <<>> - defp do_encode16(data, enc) do - for <>, into: <<>>, do: <> + defp remove_ignored(string, nil), do: string + defp remove_ignored(string, :whitespace) do + for <>, char not in '\s\t\r\n', into: <<>>, do: <> end - defp do_decode16(<<>>, _), do: <<>> - defp do_decode16(string, dec) when rem(byte_size(string), 2) == 0 do - for <>, into: <<>> do - <> + defp do_encode16(_, <<>>), do: <<>> + defp do_encode16(:upper, data) do + for <>, into: <<>>, do: <> + end + defp do_encode16(:lower, data) do + for <>, into: <<>>, do: <> + end + + defp do_decode16(_, <<>>), do: <<>> + defp do_decode16(:upper, string) when rem(byte_size(string), 2) == 0 do + for <>, into: <<>> do + <> end end - defp do_decode16(_, _) do - raise ArgumentError, "odd-length string" + defp do_decode16(:lower, string) when rem(byte_size(string), 2) == 0 do + for <>, into: <<>> do + <> + end + end + defp do_decode16(:mixed, string) when rem(byte_size(string), 2) == 0 do + for <>, into: <<>> do + <> + end end defp do_encode64(<<>>, _), do: <<>> - defp do_encode64(data, enc) do + defp do_encode64(data, pad?) do split = 3 * div(byte_size(data), 3) - <> = data - main = for <>, into: <<>>, do: <> - case rest do + <> = data + main = for <>, into: <<>>, do: <> + tail = case rest do <> -> - <> + <> <> -> - <> + <> <<>> -> - main + <<>> end + main <> maybe_pad(tail, pad?, 4, "=") end defp do_decode64(<<>>, _), do: <<>> - defp do_decode64(string, dec) when rem(byte_size(string), 4) == 0 do + defp do_decode64(string, false) do + maybe_pad(string, true, 4, "=") |> do_decode64(true) + end + defp do_decode64(string, _pad?) when rem(byte_size(string), 4) == 0 do split = byte_size(string) - 4 - <> = string - main = for <>, into: <<>>, do: <> - case rest do + <> = string + main = for <>, into: <<>>, do: <> + tail = case rest do <> -> - <> + <> <> -> - <> + <> <> -> - <> + <> <<>> -> - main + <<>> end + main <> tail end defp do_decode64(_, _) do raise ArgumentError, "incorrect padding" end - defp do_encode32(<<>>, _), do: <<>> - defp do_encode32(data, enc) do - split = 5 * div(byte_size(data), 5) - <> = data - main = for <>, into: <<>>, do: <> - case rest do - <> -> - <> - <> -> - <> - <> -> - <> - <> -> - <> + defp do_encode64url(https://github.com/%3C%3C%3E%3E,%20_), do: <<>> + defp do_encode64url(https://github.com/data,%20pad?) do + split = 3 * div(byte_size(data), 3) + <> = data + main = for <>, into: <<>>, do: <> + tail = case rest do + <> -> + <> + <> -> + <> <<>> -> - main + <<>> end + main <> maybe_pad(tail, pad?, 4, "=") end - defp do_decode32(<<>>, _), do: <<>> - defp do_decode32(string, dec) when rem(byte_size(string), 8) == 0 do - split = byte_size(string) - 8 - <> = string - main = for <>, into: <<>>, do: <> - case rest do - <> -> - <> - <> -> - <> - <> -> - <> - <> -> - <> - <> -> - <> + defp do_decode64url(https://github.com/%3C%3C%3E%3E,%20_), do: <<>> + defp do_decode64url(https://github.com/string,%20false) do + maybe_pad(string, true, 4, "=") |> do_decode64url(https://github.com/true) + end + defp do_decode64url(https://github.com/string,%20_pad?) when rem(byte_size(string), 4) == 0 do + split = byte_size(string) - 4 + <> = string + main = for <>, into: <<>>, do: <> + tail = case rest do + <> -> + <> + <> -> + <> + <> -> + <> <<>> -> - main + <<>> end + main <> tail end - defp do_decode32(_, _) do + defp do_decode64url(https://github.com/_,%20_) do raise ArgumentError, "incorrect padding" end + defp do_encode32(_, <<>>, _), do: <<>> + + for {case, fun} <- [upper: :to_upper, lower: :to_lower] do + defp do_encode32(unquote(case), data, pad?) do + split = 5 * div(byte_size(data), 5) + <> = data + main = for <>, into: <<>>, do: <> + tail = case rest do + <> -> + <> + <> -> + <> + <> -> + <> + <> -> + <> + <<>> -> + <<>> + end + main <> maybe_pad(tail, pad?, 8, "=") + end + end + + defp do_decode32(_, <<>>, _), do: <<>> + defp do_decode32(case, string, false), + do: do_decode32(case, maybe_pad(string, true, 8, "="), true) + + for {case, fun} <- [upper: :from_upper, lower: :from_lower, mixed: :from_mixed] do + defp do_decode32(unquote(case), string, _pad?) when rem(byte_size(string), 8) == 0 do + split = byte_size(string) - 8 + <> = string + main = for <>, into: <<>>, do: <> + tail = case rest do + <> -> + <> + <> -> + <> + <> -> + <> + <> -> + <> + <> -> + <> + <<>> -> + <<>> + end + main <> tail + end + end + + defp do_decode32(_, _, _), + do: raise ArgumentError, "incorrect padding" + + defp do_hex_encode32(_, <<>>, _), do: <<>> + + for {case, fun} <- [upper: :to_upper, lower: :to_lower] do + defp do_hex_encode32(unquote(case), data, pad?) do + split = 5 * div(byte_size(data), 5) + <> = data + main = for <>, into: <<>>, do: <> + tail = case rest do + <> -> + <> + <> -> + <> + <> -> + <> + <> -> + <> + <<>> -> + <<>> + end + main <> maybe_pad(tail, pad?, 8, "=") + end + end + + defp do_hex_decode32(_, <<>>, _), do: <<>> + defp do_hex_decode32(case, string, false), + do: do_hex_decode32(case, maybe_pad(string, true, 8, "="), true) + + for {case, fun} <- [upper: :from_upper, lower: :from_lower, mixed: :from_mixed] do + defp do_hex_decode32(unquote(case), string, _pad?) when rem(byte_size(string), 8) == 0 do + split = byte_size(string) - 8 + <> = string + main = for <>, into: <<>>, do: <> + tail = case rest do + <> -> + <> + <> -> + <> + <> -> + <> + <> -> + <> + <> -> + <> + <<>> -> + <<>> + end + main <> tail + end + end + + defp do_hex_decode32(_, _, _), + do: raise ArgumentError, "incorrect padding" end diff --git a/lib/elixir/lib/behaviour.ex b/lib/elixir/lib/behaviour.ex index 8f882a00294..3873b148ade 100644 --- a/lib/elixir/lib/behaviour.ex +++ b/lib/elixir/lib/behaviour.ex @@ -1,60 +1,27 @@ defmodule Behaviour do @moduledoc """ - Utilities for defining behaviour interfaces. + This module has been deprecated. - Behaviours can be referenced by other modules - to ensure they implement required callbacks. + Instead of `defcallback/1` and `defmacrocallback/1`, the `@callback` and + `@macrocallback` module attributes can be used (respectively). See the + documentation for `Module` for more information on these attributes. - For example, you can specify the `URI.Parser` - behaviour as follows: - - defmodule URI.Parser do - use Behaviour - - @doc "Parses the given URL" - defcallback parse(uri_info :: URI.t) :: URI.t - - @doc "Defines a default port" - defcallback default_port() :: integer - end - - And then a module may use it as: - - defmodule URI.HTTP do - @behaviour URI.Parser - def default_port(), do: 80 - def parse(info), do: info - end - - If the behaviour changes or `URI.HTTP` does - not implement one of the callbacks, a warning - will be raised. - - ## Implementation - - Since Erlang R15, behaviours must be defined via - `@callback` attributes. `defcallback` is a simple - mechanism that defines the `@callback` attribute - according to the given type specification. `defcallback` allows - documentation to be created for the callback and defines - a custom function signature. - - The callbacks and their documentation can be retrieved - via the `__behaviour__` callback function. + Instead of `MyModule.__behaviour__(:callbacks)`, + `MyModule.behaviour_info(:callbacks)` can be used. """ @doc """ - Define a function callback according to the given type specification. + Defines a function callback according to the given type specification. """ defmacro defcallback(spec) do - do_defcallback(split_spec(spec, quote(do: term)), __CALLER__) + do_defcallback(:def, split_spec(spec, quote(do: term))) end @doc """ - Define a macro callback according to the given type specification. + Defines a macro callback according to the given type specification. """ defmacro defmacrocallback(spec) do - do_defmacrocallback(split_spec(spec, quote(do: Macro.t)), __CALLER__) + do_defcallback(:defmacro, split_spec(spec, quote(do: Macro.t))) end defp split_spec({:when, _, [{:::, _, [spec, return]}, guard]}, _default) do @@ -73,27 +40,17 @@ defmodule Behaviour do {spec, default, []} end - defp do_defcallback({spec, return, guards}, caller) do - case Macro.decompose_call(spec) do - {name, args} -> - do_callback(:def, name, args, name, length(args), args, return, guards, caller) - _ -> - raise ArgumentError, "invalid syntax in defcallback #{Macro.to_string(spec)}" - end - end - - defp do_defmacrocallback({spec, return, guards}, caller) do + defp do_defcallback(kind, {spec, return, guards}) do case Macro.decompose_call(spec) do {name, args} -> - do_callback(:defmacro, :"MACRO-#{name}", [quote(do: env :: Macro.Env.t)|args], - name, length(args), args, return, guards, caller) + do_callback(kind, name, args, return, guards) _ -> - raise ArgumentError, "invalid syntax in defmacrocallback #{Macro.to_string(spec)}" + raise ArgumentError, "invalid syntax in #{kind}callback #{Macro.to_string(spec)}" end end - defp do_callback(kind, name, args, docs_name, docs_arity, _docs_args, return, guards, caller) do - Enum.each args, fn + defp do_callback(kind, name, args, return, guards) do + :lists.foreach fn {:::, _, [left, right]} -> ensure_not_default(left) ensure_not_default(right) @@ -101,12 +58,16 @@ defmodule Behaviour do other -> ensure_not_default(other) other - end + end, args - quote do - @callback unquote(name)(unquote_splicing(args)) :: unquote(return) when unquote(guards) - Behaviour.store_docs(__MODULE__, unquote(caller.line), unquote(kind), - unquote(docs_name), unquote(docs_arity)) + spec = + quote do + unquote(name)(unquote_splicing(args)) :: unquote(return) when unquote(guards) + end + + case kind do + :def -> quote(do: @callback unquote(spec)) + :defmacro -> quote(do: @macrocallback unquote(spec)) end end @@ -116,37 +77,30 @@ defmodule Behaviour do defp ensure_not_default(_), do: :ok - @doc false - def store_docs(module, line, kind, name, arity) do - doc = Module.get_attribute module, :doc - Module.delete_attribute module, :doc - Module.put_attribute module, :behaviour_docs, {{name, arity}, line, kind, doc} - end - @doc false defmacro __using__(_) do quote do - Module.register_attribute(__MODULE__, :behaviour_docs, accumulate: true) - @before_compile unquote(__MODULE__) - import unquote(__MODULE__) - end - end - - @doc false - defmacro __before_compile__(env) do - docs = if Code.compiler_options[:docs] do - Enum.reverse Module.get_attribute(env.module, :behaviour_docs) - end + warning = + "the Behaviour module is deprecated. Instead of using this module, " <> + "use the @callback and @macrocallback module attributes. See the " <> + "documentation for Module for more information on these attributes" + IO.warn(warning) - quote do @doc false def __behaviour__(:callbacks) do __MODULE__.behaviour_info(:callbacks) end def __behaviour__(:docs) do - unquote(Macro.escape(docs)) + for {tuple, line, kind, docs} <- Code.get_docs(__MODULE__, :callback_docs) do + case kind do + :callback -> {tuple, line, :def, docs} + :macrocallback -> {tuple, line, :defmacro, docs} + end + end end + + import unquote(__MODULE__) end end end diff --git a/lib/elixir/lib/bitwise.ex b/lib/elixir/lib/bitwise.ex index 9d74a608983..84d3b4d173c 100644 --- a/lib/elixir/lib/bitwise.ex +++ b/lib/elixir/lib/bitwise.ex @@ -1,40 +1,49 @@ defmodule Bitwise do @moduledoc """ - This module provides macros and operators for bitwise operators. - These macros can be used in guards. + A set of macros that perform calculations on bits. - The easiest way to use is to simply import them into - your module: + The macros in this module come in two flavors: named or + operators. For example: iex> use Bitwise - iex> bnot 1 + iex> bnot 1 # named -2 - iex> 1 &&& 1 + iex> 1 &&& 1 # operator 1 - You can select to include only or skip operators by passing options: + If you prefer to use only operators or skip them, you can + pass the following options: + + * `:only_operators` - includes only operators + * `:skip_operators` - skips operators + + For example: iex> use Bitwise, only_operators: true iex> 1 &&& 1 1 - """ + When invoked with no options, `use Bitwise` is equivalent + to `import Bitwise`. - @doc """ - Allow a developer to use this module in their programs with - the following options: + All bitwise macros can be used in guards: - * `:only_operators` - include only operators - * `:skip_operators` - skip operators + iex> use Bitwise + iex> odd? = fn int when band(int, 1) == 1 -> true; _ -> false end + iex> odd?.(1) + true """ + + @doc false defmacro __using__(options) do except = cond do Keyword.get(options, :only_operators) -> [bnot: 1, band: 2, bor: 2, bxor: 2, bsl: 2, bsr: 2] Keyword.get(options, :skip_operators) -> [~~~: 1, &&&: 2, |||: 2, ^^^: 2, <<<: 2, >>>: 2] - true -> [] + true -> + [] end quote do @@ -43,84 +52,160 @@ defmodule Bitwise do end @doc """ - Bitwise not. + Calculates the bitwise NOT of its argument. + + iex> bnot(2) + -3 + iex> bnot(2) &&& 3 + 1 + """ defmacro bnot(expr) do quote do: :erlang.bnot(unquote(expr)) end @doc """ - Bitwise not as operator. + Prefix (unary) operator; calculates the bitwise NOT of its argument. + + iex> ~~~2 + -3 + iex> ~~~2 &&& 3 + 1 + """ defmacro ~~~expr do quote do: :erlang.bnot(unquote(expr)) end @doc """ - Bitwise and. + Calculates the bitwise AND of its arguments. + + iex> band(9, 3) + 1 + """ defmacro band(left, right) do quote do: :erlang.band(unquote(left), unquote(right)) end @doc """ - Bitwise and as operator. + Infix operator; calculates the bitwise AND of its arguments. + + iex> 9 &&& 3 + 1 + """ defmacro left &&& right do quote do: :erlang.band(unquote(left), unquote(right)) end @doc """ - Bitwise or. + Calculates the bitwise OR of its arguments. + + iex> bor(9, 3) + 11 + """ defmacro bor(left, right) do quote do: :erlang.bor(unquote(left), unquote(right)) end @doc """ - Bitwise or as operator. + Infix operator; calculates the bitwise OR of its arguments. + + iex> 9 ||| 3 + 11 + """ defmacro left ||| right do quote do: :erlang.bor(unquote(left), unquote(right)) end @doc """ - Bitwise xor. + Calculates the bitwise XOR of its arguments. + + iex> bxor(9, 3) + 10 + """ defmacro bxor(left, right) do quote do: :erlang.bxor(unquote(left), unquote(right)) end @doc """ - Bitwise xor as operator. + Infix operator; calculates the bitwise XOR of its arguments. + + iex> 9 ^^^ 3 + 10 + """ defmacro left ^^^ right do quote do: :erlang.bxor(unquote(left), unquote(right)) end @doc """ - Arithmetic bitshift left. + Calculates the result of an arithmetic left bitshift. + + iex> bsl(1, 2) + 4 + iex> bsl(1, -2) + 0 + iex> bsl(-1, 2) + -4 + iex> bsl(-1, -2) + -1 + """ defmacro bsl(left, right) do quote do: :erlang.bsl(unquote(left), unquote(right)) end @doc """ - Arithmetic bitshift left as operator. + Infix operator; calculates the result of an arithmetic left bitshift. + + iex> 1 <<< 2 + 4 + iex> 1 <<< -2 + 0 + iex> -1 <<< 2 + -4 + iex> -1 <<< -2 + -1 + """ defmacro left <<< right do quote do: :erlang.bsl(unquote(left), unquote(right)) end @doc """ - Arithmetic bitshift right. + Calculates the result of an arithmetic right bitshift. + + iex> bsr(1, 2) + 0 + iex> bsr(1, -2) + 4 + iex> bsr(-1, 2) + -1 + iex> bsr(-1, -2) + -4 + """ defmacro bsr(left, right) do quote do: :erlang.bsr(unquote(left), unquote(right)) end @doc """ - Arithmetic bitshift right as operator. + Infix operator; calculates the result of an arithmetic right bitshift. + + iex> 1 >>> 2 + 0 + iex> 1 >>> -2 + 4 + iex> -1 >>> 2 + -1 + iex> -1 >>> -2 + -4 + """ defmacro left >>> right do quote do: :erlang.bsr(unquote(left), unquote(right)) diff --git a/lib/elixir/lib/calendar.ex b/lib/elixir/lib/calendar.ex new file mode 100644 index 00000000000..99d3cd6e157 --- /dev/null +++ b/lib/elixir/lib/calendar.ex @@ -0,0 +1,197 @@ +defmodule Calendar do + @moduledoc """ + This module defines the responsibilities for working with + calendars, dates, times and datetimes in Elixir. + + Currently it defines types and the minimal implementation + for a calendar behaviour in Elixir. The goal of the Calendar + features in Elixir is to provide a base for interoperability + instead of full-featured datetime API. + + For the actual date, time and datetime structures, see `Date`, + `Time`, `NaiveDateTime` and `DateTime`. + + Note the year, month, day, etc. designations are overspecified + (i.e. an integer instead of `1..12` for months) because different + calendars may have a different number of days per month, months per year and so on. + """ + + @type year :: integer + @type month :: integer + @type day :: integer + @type hour :: integer + @type minute :: integer + @type second :: integer + + @typedoc """ + The internal time format is used when converting between calendars. + + It represents time as a fraction of a day (starting from midnight). + `parts_in_day` specifies how much of the day is already passed, + while `parts_per_day` signifies how many parts there fit in a day. + """ + @type day_fraction :: {parts_in_day :: non_neg_integer, parts_per_day :: pos_integer} + + @typedoc """ + The internal date format that is used when converting between calendars. + + This is the amount of days including the fractional part that has passed of the last day, + since midnight 1 January AD 1 of the Proleptic Gregorian Calendar + (0000-01-01+00:00T00:00.00000 in ISO 8601 notation). + + The `parts_per_day` represent how many subparts the current day is subdivided in + (for different calendars, picking a different `parts_per_day` might make sense). + The `parts_in_day` represents how many of these `parts_per_day` have passed in the last day. + + Thus, a Rata Die like `{1234, {1, 2}}` should be read as `1234½`. + """ + @type rata_die :: {days :: integer, day_fraction} + + @typedoc """ + Microseconds with stored precision. + + The precision represents the number of digits that must be used when + representing the microseconds to external format. If the precision is 0, + it means microseconds must be skipped. + """ + @type microsecond :: {0..999_999, 0..6} + + @typedoc "A calendar implementation" + @type calendar :: module + + @typedoc "The time zone ID according to the IANA tz database (e.g. Europe/Zurich)" + @type time_zone :: String.t + + @typedoc "The time zone abbreviation (e.g. CET or CEST or BST etc.)" + @type zone_abbr :: String.t + + @typedoc "The time zone UTC offset in seconds" + @type utc_offset :: integer + + @typedoc "The time zone standard offset in seconds (not zero in summer times)" + @type std_offset :: integer + + @typedoc "Any map/struct that contains the date fields" + @type date :: %{optional(any) => any, calendar: calendar, year: year, month: month, day: day} + + @typedoc "Any map/struct that contains the time fields" + @type time :: %{optional(any) => any, hour: hour, minute: minute, second: second, microsecond: microsecond} + + @typedoc "Any map/struct that contains the naive_datetime fields" + @type naive_datetime :: %{optional(any) => any, calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond} + + @typedoc "Any map/struct that contains the datetime fields" + @type datetime :: %{optional(any) => any, calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond, + time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset} + + @doc """ + Returns how many days there are in the given year-month. + """ + @callback days_in_month(year, month) :: day + + @doc """ + Returns true if the given year is a leap year. + + A leap year is a year of a longer length than normal. The exact meaning + is up to the calendar. A calendar must return `false` if it does not support + the concept of leap years. + """ + @callback leap_year?(year) :: boolean + + @doc """ + Calculates the day of the week from the given `year`, `month`, and `day`. + """ + @callback day_of_week(year, month, day) :: non_neg_integer() + + @doc """ + Converts the date into a string according to the calendar. + """ + @callback date_to_string(year, month, day) :: String.t + + @doc """ + Converts the datetime (without time zone) into a string according to the calendar. + """ + @callback naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) :: String.t + + @doc """ + Converts the datetime (with time zone) into a string according to the calendar. + """ + @callback datetime_to_string(year, month, day, hour, minute, second, microsecond, + time_zone, zone_abbr, utc_offset, std_offset) :: String.t + + @doc """ + Converts the time into a string according to the calendar. + """ + @callback time_to_string(hour, minute, second, microsecond) :: String.t + + @doc """ + Converts the given datetime (with time zone) into the `t:rata_die` format. + """ + @callback naive_datetime_to_rata_die(year, month, day, hour, minute, second, microsecond) :: rata_die + + @doc """ + Converts `t:rata_die` to the Calendar's datetime format. + """ + @callback naive_datetime_from_rata_die(rata_die) :: {year, month, day, hour, minute, second, microsecond} + + @doc """ + Converts the given time to the `t:day_fraction` format. + """ + @callback time_to_day_fraction(hour, minute, second, microsecond) :: day_fraction + + @doc """ + Converts `t:day_fraction` to the Calendar's time format. + """ + @callback time_from_day_fraction(day_fraction) :: {hour, minute, second, microsecond} + + @doc """ + Define the rollover moment for the given calendar. + + This is the moment, in your calendar, when the current day ends + and the next day starts. + + The result of this function is used to check if two calendars rollover at + the same time of day. If they do not, we can only convert datetimes and times + between them. If they do, this means that we can also convert dates as well + as naive datetimes between them. + + This day fraction should be in its most simplified form possible, to make comparisons fast. + + ## Examples + + * If, in your Calendar, a new day starts at midnight, return {0, 1}. + * If, in your Calendar, a new day starts at sunrise, return {1, 4}. + * If, in your Calendar, a new day starts at noon, return {1, 2}. + * If, in your Calendar, a new day starts at sunset, return {3, 4}. + + """ + @callback day_rollover_relative_to_midnight_utc() :: day_fraction + + @doc """ + Should return `true` if the given date describes a proper date in the calendar. + """ + @callback valid_date?(year, month, day) :: boolean + + @doc """ + Should return `true` if the given time describes a proper time in the calendar. + """ + @callback valid_time?(hour, minute, second, microsecond) :: boolean + + # General Helpers + + @doc """ + Returns `true` if two calendars have the same moment of starting a new day, + `false` otherwise. + + If two calendars are not compatible, we can only convert datetimes and times + between them. If they are compatible, this means that we can also convert + dates as well as naive datetimes between them. + """ + @spec compatible_calendars?(Calendar.calendar, Calendar.calendar) :: boolean + def compatible_calendars?(calendar, calendar), do: true + def compatible_calendars?(calendar1, calendar2) do + calendar1.day_rollover_relative_to_midnight_utc() == calendar2.day_rollover_relative_to_midnight_utc() + end +end diff --git a/lib/elixir/lib/calendar/date.ex b/lib/elixir/lib/calendar/date.ex new file mode 100644 index 00000000000..926adaadab7 --- /dev/null +++ b/lib/elixir/lib/calendar/date.ex @@ -0,0 +1,487 @@ +defmodule Date do + @moduledoc """ + A Date struct and functions. + + The Date struct contains the fields year, month, day and calendar. + New dates can be built with the `new/3` function or using the `~D` + sigil: + + iex> ~D[2000-01-01] + ~D[2000-01-01] + + Both `new/3` and sigil return a struct where the date fields can + be accessed directly: + + iex> date = ~D[2000-01-01] + iex> date.year + 2000 + iex> date.month + 1 + + The functions on this module work with the `Date` struct as well + as any struct that contains the same fields as the `Date` struct, + such as `NaiveDateTime` and `DateTime`. Such functions expect + `t:Calendar.date/0` in their typespecs (instead of `t:t/0`). + + Remember, comparisons in Elixir using `==`, `>`, `<` and friends + are structural and based on the Date struct fields. For proper + comparison between dates, use the `compare/2` function. + + Developers should avoid creating the Date struct directly and + instead rely on the functions provided by this module as well as + the ones in 3rd party calendar libraries. + """ + + @enforce_keys [:year, :month, :day] + defstruct [:year, :month, :day, calendar: Calendar.ISO] + + @type t :: %Date{year: Calendar.year, month: Calendar.month, + day: Calendar.day, calendar: Calendar.calendar} + + @doc """ + Returns the current date in UTC. + + ## Examples + + iex> date = Date.utc_today() + iex> date.year >= 2016 + true + + """ + @spec utc_today(Calendar.calendar) :: t + def utc_today(calendar \\ Calendar.ISO) + + def utc_today(Calendar.ISO) do + {:ok, {year, month, day}, _, _} = Calendar.ISO.from_unix(System.os_time, :native) + %Date{year: year, month: month, day: day} + end + + def utc_today(calendar) do + calendar + |> DateTime.utc_now + |> DateTime.to_date + end + + @doc """ + Returns true if the year in `date` is a leap year. + + ## Examples + + iex> Date.leap_year?(~D[2000-01-01]) + true + iex> Date.leap_year?(~D[2001-01-01]) + false + iex> Date.leap_year?(~D[2004-01-01]) + true + iex> Date.leap_year?(~D[1900-01-01]) + false + iex> Date.leap_year?(~N[2004-01-01 01:23:45]) + true + + """ + @spec leap_year?(Calendar.date) :: boolean() + def leap_year?(date) + + def leap_year?(%{calendar: calendar, year: year}) do + calendar.leap_year?(year) + end + + @doc """ + Returns the number of days in the given date month. + + ## Examples + + iex> Date.days_in_month(~D[1900-01-13]) + 31 + iex> Date.days_in_month(~D[1900-02-09]) + 28 + iex> Date.days_in_month(~N[2000-02-20 01:23:45]) + 29 + + """ + @spec days_in_month(Calendar.date) :: Calendar.day + def days_in_month(date) + + def days_in_month(%{calendar: calendar, year: year, month: month}) do + calendar.days_in_month(year, month) + end + + @doc """ + Builds a new ISO date. + + Expects all values to be integers. Returns `{:ok, date}` if each + entry fits its appropriate range, returns `{:error, reason}` otherwise. + + ## Examples + + iex> Date.new(2000, 1, 1) + {:ok, ~D[2000-01-01]} + iex> Date.new(2000, 13, 1) + {:error, :invalid_date} + iex> Date.new(2000, 2, 29) + {:ok, ~D[2000-02-29]} + + iex> Date.new(2000, 2, 30) + {:error, :invalid_date} + iex> Date.new(2001, 2, 29) + {:error, :invalid_date} + + """ + @spec new(Calendar.year, Calendar.month, Calendar.day) :: {:ok, t} | {:error, atom} + def new(year, month, day, calendar \\ Calendar.ISO) do + if calendar.valid_date?(year, month, day) do + {:ok, %Date{year: year, month: month, day: day, calendar: calendar}} + else + {:error, :invalid_date} + end + end + + @doc """ + Converts the given date to a string according to its calendar. + + ### Examples + + iex> Date.to_string(~D[2000-02-28]) + "2000-02-28" + iex> Date.to_string(~N[2000-02-28 01:23:45]) + "2000-02-28" + + """ + @spec to_string(Calendar.date) :: String.t + def to_string(date) + + def to_string(%{calendar: calendar, year: year, month: month, day: day}) do + calendar.date_to_string(year, month, day) + end + + @doc """ + Parses the extended "Date and time of day" format described by + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + Timezone offset may be included in the string but they will be + simply discarded as such information is not included in naive date + times. + + Time representations with reduced accuracy are not supported. + + ## Examples + + iex> Date.from_iso8601("2015-01-23") + {:ok, ~D[2015-01-23]} + + iex> Date.from_iso8601("2015:01:23") + {:error, :invalid_format} + + iex> Date.from_iso8601("2015-01-32") + {:error, :invalid_date} + + """ + @spec from_iso8601(String.t) :: {:ok, t} | {:error, atom} + def from_iso8601(string, calendar \\ Calendar.ISO) + + def from_iso8601(<>, calendar) do + with {year, ""} <- Integer.parse(year), + {month, ""} <- Integer.parse(month), + {day, ""} <- Integer.parse(day) do + with {:ok, date} <- new(year, month, day, Calendar.ISO), + do: convert(date, calendar) + else + _ -> {:error, :invalid_format} + end + end + + def from_iso8601(<<_::binary>>, _calendar) do + {:error, :invalid_format} + end + + @doc """ + Parses the extended "Date and time of day" format described by + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + Raises if the format is invalid. + + ## Examples + + iex> Date.from_iso8601!("2015-01-23") + ~D[2015-01-23] + iex> Date.from_iso8601!("2015:01:23") + ** (ArgumentError) cannot parse "2015:01:23" as date, reason: :invalid_format + """ + @spec from_iso8601!(String.t) :: t | no_return + def from_iso8601!(string, calendar \\ Calendar.ISO) do + case from_iso8601(string, calendar) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot parse #{inspect string} as date, reason: #{inspect reason}" + end + end + + @doc """ + Converts the given `date` to + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + By default, `Date.to_iso8601/2` returns dates formatted in the "extended" + format, for human readability. It also supports the "basic" format through passing the `:basic` option. + + Only supports converting dates which are in the ISO calendar, + or other calendars in which the days also start at midnight. + Attempting to convert dates from other calendars will raise an `ArgumentError`. + + ### Examples + + iex> Date.to_iso8601(~D[2000-02-28]) + "2000-02-28" + + iex> Date.to_iso8601(~D[2000-02-28], :basic) + "20000228" + + """ + @spec to_iso8601(Date.t, :extended | :basic) :: String.t + def to_iso8601(date, format \\ :extended) + + def to_iso8601(%Date{} = date, format) when format in [:basic, :extended] do + %{year: year, month: month, day: day} = convert!(date, Calendar.ISO) + Calendar.ISO.date_to_iso8601(year, month, day, format) + end + + # TODO: Remove on 2.0 + def to_iso8601(%{calendar: Calendar.ISO, year: year, month: month, day: day}, format) when format in [:basic, :extended] do + IO.warn "calling Date.to_iso8601/1 with a DateTime or NaiveDateTime structs is deprecated, explicitly convert them into a Date first by using DateTime.to_date/1 or NaiveDateTime.to_date/1 respectively" + Calendar.ISO.date_to_iso8601(year, month, day, format) + end + + def to_iso8601(_date, format) do + raise ArgumentError, "Date.to_iso8601/2 expects format to be :extended or :basic, got: #{inspect format}" + end + + @doc """ + Converts a `Date` struct to an Erlang date tuple. + + Only supports converting dates which are in the ISO calendar, + or other calendars in which the days also start at midnight. + Attempting to convert dates from other calendars will raise an `ArgumentError`. + + ## Examples + + iex> Date.to_erl(~D[2000-01-01]) + {2000, 1, 1} + + """ + @spec to_erl(Date.t) :: :calendar.date + def to_erl(%Date{} = date) do + %{year: year, month: month, day: day} = convert!(date, Calendar.ISO) + {year, month, day} + end + + # TODO: Remove on 2.0 + def to_erl(%{calendar: Calendar.ISO, year: year, month: month, day: day}) do + IO.warn "calling Date.to_erl/1 with a DateTime or NaiveDateTime structs is deprecated, explicitly convert them into a Date first by using DateTime.to_date/1 or NaiveDateTime.to_date/1 respectively" + {year, month, day} + end + + @doc """ + Converts an Erlang date tuple to a `Date` struct. + + Only supports converting dates which are in the ISO calendar, + or other calendars in which the days also start at midnight. + Attempting to convert dates from other calendars will return an error tuple. + + ## Examples + + iex> Date.from_erl({2000, 1, 1}) + {:ok, ~D[2000-01-01]} + iex> Date.from_erl({2000, 13, 1}) + {:error, :invalid_date} + + """ + @spec from_erl(:calendar.date) :: {:ok, t} | {:error, atom} + def from_erl(tuple, calendar \\ Calendar.ISO) + + def from_erl({year, month, day}, calendar) do + with {:ok, date} <- new(year, month, day, Calendar.ISO), + do: convert(date, calendar) + end + + @doc """ + Converts an Erlang date tuple but raises for invalid dates. + + ## Examples + + iex> Date.from_erl!({2000, 1, 1}) + ~D[2000-01-01] + iex> Date.from_erl!({2000, 13, 1}) + ** (ArgumentError) cannot convert {2000, 13, 1} to date, reason: :invalid_date + + """ + @spec from_erl!(:calendar.date) :: t | no_return + def from_erl!(tuple) do + case from_erl(tuple) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot convert #{inspect tuple} to date, reason: #{inspect reason}" + end + end + + @doc """ + Compares two `Date` structs. + + Returns `:gt` if first date is later than the second + and `:lt` for vice versa. If the two dates are equal + `:eq` is returned. + + ## Examples + + iex> Date.compare(~D[2016-04-16], ~D[2016-04-28]) + :lt + + This function can also be used to compare across more + complex calendar types by considering only the date fields: + + iex> Date.compare(~D[2016-04-16], ~N[2016-04-28 01:23:45]) + :lt + iex> Date.compare(~D[2016-04-16], ~N[2016-04-16 01:23:45]) + :eq + iex> Date.compare(~N[2016-04-16 12:34:56], ~N[2016-04-16 01:23:45]) + :eq + + """ + @spec compare(Calendar.date, Calendar.date) :: :lt | :eq | :gt + def compare(date1, date2) do + if Calendar.compatible_calendars?(date1.calendar, date2.calendar) do + case {to_rata_die(date1), to_rata_die(date2)} do + {first, second} when first > second -> :gt + {first, second} when first < second -> :lt + _ -> :eq + end + else + raise ArgumentError, """ + cannot compare #{inspect date1} with #{inspect date2}. + + This comparison would be ambiguous as their calendars have incompatible day rollover moments. + Specify an exact time of day (using `DateTime`s) to resolve this ambiguity + """ + end + end + + @doc """ + Converts a date from one calendar to another. + + Returns `{:ok, date}` if the calendars are compatible, + or `{:error, :incompatible_calendars}` if they are not. + + See also `Calendar.compatible_calendars?/2`. + """ + @spec convert(Date.t, Calendar.calendar) :: {:ok, Date.t} | {:error, :incompatible_calendars} + def convert(%Date{calendar: calendar} = date, calendar), do: {:ok, date} + def convert(%Date{} = date, target_calendar) do + if Calendar.compatible_calendars?(date.calendar, target_calendar) do + result_date = + date + |> to_rata_die() + |> from_rata_die(target_calendar) + {:ok, result_date} + else + {:error, :incompatible_calendars} + end + end + + @doc """ + Similar to `Date.convert/2`, but raises an `ArgumentError` + if the conversion between the two calendars is not possible. + """ + @spec convert!(Date.t, Calendar.calendar) :: Date.t + def convert!(date, calendar) do + case convert(date, calendar) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot convert #{inspect date} to target calendar #{inspect calendar}, reason: #{inspect reason}" + end + end + + @doc """ + Calculates the difference between two dates, in a full number of days. + + Note that only `Date` structs that follow the same or compatible + calendars can be compared this way. If two calendars are not compatible, + it will raise. + + ## Examples + + iex> Date.diff(~D[2000-01-03], ~D[2000-01-01]) + 2 + iex> Date.diff(~D[2000-01-01], ~D[2000-01-03]) + -2 + + """ + @spec diff(Date.t, Date.t) :: integer + def diff(%Date{} = date1, %Date{} = date2) do + if Calendar.compatible_calendars?(date1.calendar, date2.calendar) do + {days1, _} = to_rata_die(date1) + {days2, _} = to_rata_die(date2) + days1 - days2 + else + raise ArgumentError, "cannot calculate the difference between #{inspect date1} and #{inspect date2} because their calendars are not compatible and thus the result would be ambiguous" + end + end + + defp to_rata_die(%{calendar: Calendar.ISO, year: year, month: month, day: day}) do + {Calendar.ISO.date_to_rata_die_days(year, month, day), {0, 86400000000}} + end + defp to_rata_die(%{calendar: calendar, year: year, month: month, day: day}) do + calendar.naive_datetime_to_rata_die(year, month, day, 0, 0, 0, {0, 0}) + end + + defp from_rata_die({days, _}, Calendar.ISO) do + {year, month, day} = Calendar.ISO.date_from_rata_die_days(days) + %Date{year: year, month: month, day: day, calendar: Calendar.ISO} + end + defp from_rata_die(rata_die, target_calendar) do + {year, month, day, _, _, _, _} = target_calendar.naive_datetime_from_rata_die(rata_die) + %Date{year: year, month: month, day: day, calendar: target_calendar} + end + + @doc """ + Calculates the day of the week of a given `Date` struct. + + Returns the day of the week as an integer. For the ISO 8601 + calendar (the default), it is an integer from 1 to 7, where + 1 is Monday and 7 is Sunday. + + ## Examples + + iex> Date.day_of_week(~D[2016-10-31]) + 1 + iex> Date.day_of_week(~D[2016-11-01]) + 2 + iex> Date.day_of_week(~N[2016-11-01 01:23:45]) + 2 + + """ + @spec day_of_week(Calendar.date) :: non_neg_integer() + def day_of_week(date) + + def day_of_week(%{calendar: calendar, year: year, month: month, day: day}) do + calendar.day_of_week(year, month, day) + end + + ## Helpers + + defimpl String.Chars do + def to_string(%{calendar: calendar, year: year, month: month, day: day}) do + calendar.date_to_string(year, month, day) + end + end + + defimpl Inspect do + def inspect(%{calendar: Calendar.ISO, year: year, month: month, day: day}, _) do + "~D[" <> Calendar.ISO.date_to_string(year, month, day) <> "]" + end + + def inspect(date, opts) do + Inspect.Any.inspect(date, opts) + end + end +end diff --git a/lib/elixir/lib/calendar/datetime.ex b/lib/elixir/lib/calendar/datetime.ex new file mode 100644 index 00000000000..c539eb29f94 --- /dev/null +++ b/lib/elixir/lib/calendar/datetime.ex @@ -0,0 +1,628 @@ +defmodule DateTime do + @moduledoc """ + A datetime implementation with a time zone. + + This datetime can be seen as an ephemeral snapshot + of a datetime at a given time zone. For such purposes, + it also includes both UTC and Standard offsets, as + well as the zone abbreviation field used exclusively + for formatting purposes. + + Remember, comparisons in Elixir using `==`, `>`, `<` and friends + are structural and based on the DateTime struct fields. For proper + comparison between datetimes, use the `compare/2` function. + + Developers should avoid creating the DateTime struct directly + and instead rely on the functions provided by this module as + well as the ones in 3rd party calendar libraries. + + ## Where are my functions? + + You will notice this module only contains conversion + functions as well as functions that work on UTC. This + is because a proper DateTime implementation requires a + TimeZone database which currently is not provided as part + of Elixir. + + Such may be addressed in upcoming versions, meanwhile, + use 3rd party packages to provide DateTime building and + similar functionality with time zone backing. + """ + + @enforce_keys [:year, :month, :day, :hour, :minute, :second, + :time_zone, :zone_abbr, :utc_offset, :std_offset] + defstruct [:year, :month, :day, :hour, :minute, :second, :time_zone, + :zone_abbr, :utc_offset, :std_offset, microsecond: {0, 0}, calendar: Calendar.ISO] + + @type t :: %__MODULE__{year: Calendar.year, month: Calendar.month, day: Calendar.day, + calendar: Calendar.calendar, hour: Calendar.hour, minute: Calendar.minute, + second: Calendar.second, microsecond: Calendar.microsecond, + time_zone: Calendar.time_zone, zone_abbr: Calendar.zone_abbr, + utc_offset: Calendar.utc_offset, std_offset: Calendar.std_offset} + + @unix_days :calendar.date_to_gregorian_days({1970, 1, 1}) - 365 + + @doc """ + Returns the current datetime in UTC. + + ## Examples + + iex> datetime = DateTime.utc_now() + iex> datetime.time_zone + "Etc/UTC" + + """ + @spec utc_now(Calendar.calendar) :: DateTime.t + def utc_now(calendar \\ Calendar.ISO) do + System.os_time |> from_unix!(:native, calendar) + end + + @doc """ + Converts the given Unix time to DateTime. + + The integer can be given in different unit + according to `System.convert_time_unit/3` and it will + be converted to microseconds internally. + + Unix times are always in UTC and therefore the DateTime + will be returned in UTC. + + ## Examples + + iex> DateTime.from_unix(1464096368) + {:ok, %DateTime{calendar: Calendar.ISO, day: 24, hour: 13, microsecond: {0, 0}, minute: 26, + month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, + year: 2016, zone_abbr: "UTC"}} + + iex> DateTime.from_unix(1432560368868569, :microsecond) + {:ok, %DateTime{calendar: Calendar.ISO, day: 25, hour: 13, microsecond: {868569, 6}, minute: 26, + month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, + year: 2015, zone_abbr: "UTC"}} + + The unit can also be an integer as in `t:System.time_unit/0`: + + iex> DateTime.from_unix(143256036886856, 1024) + {:ok, %DateTime{calendar: Calendar.ISO, day: 17, hour: 7, microsecond: {320312, 3}, + minute: 5, month: 3, second: 22, std_offset: 0, time_zone: "Etc/UTC", + utc_offset: 0, year: 6403, zone_abbr: "UTC"}} + + Negative Unix times are supported, up to -62167219200 seconds, + which is equivalent to "0000-01-01T00:00:00Z" or 0 Gregorian seconds. + """ + @spec from_unix(integer, :native | System.time_unit, Calendar.calendar) :: {:ok, DateTime.t} | {:error, atom} + def from_unix(integer, unit \\ :second, calendar \\ Calendar.ISO) when is_integer(integer) do + case Calendar.ISO.from_unix(integer, unit) do + {:ok, {year, month, day}, {hour, minute, second}, microsecond} -> + iso_datetime = %DateTime{year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond, + std_offset: 0, utc_offset: 0, zone_abbr: "UTC", time_zone: "Etc/UTC"} + convert(iso_datetime, calendar) + {:error, _} = error -> + error + end + end + + @doc """ + Converts the given Unix time to DateTime. + + The integer can be given in different unit + according to `System.convert_time_unit/3` and it will + be converted to microseconds internally. + + Unix times are always in UTC and therefore the DateTime + will be returned in UTC. + + ## Examples + + iex> DateTime.from_unix!(1464096368) + %DateTime{calendar: Calendar.ISO, day: 24, hour: 13, microsecond: {0, 0}, minute: 26, + month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, + year: 2016, zone_abbr: "UTC"} + + iex> DateTime.from_unix!(1432560368868569, :microsecond) + %DateTime{calendar: Calendar.ISO, day: 25, hour: 13, microsecond: {868569, 6}, minute: 26, + month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, + year: 2015, zone_abbr: "UTC"} + + """ + @spec from_unix!(integer, :native | System.time_unit, Calendar.calendar) :: DateTime.t + def from_unix!(integer, unit \\ :second, calendar \\ Calendar.ISO) when is_atom(unit) do + case from_unix(integer, unit, calendar) do + {:ok, datetime} -> + datetime + {:error, :invalid_unix_time} -> + raise ArgumentError, "invalid Unix time #{integer}" + end + end + + @doc """ + Converts the given NaiveDateTime to DateTime. + + It expects a time zone to put the NaiveDateTime in. + Currently it only supports "Etc/UTC" as time zone. + + ## Examples + + iex> DateTime.from_naive(~N[2016-05-24 13:26:08.003], "Etc/UTC") + {:ok, %DateTime{calendar: Calendar.ISO, day: 24, hour: 13, microsecond: {3000, 3}, minute: 26, + month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, + year: 2016, zone_abbr: "UTC"}} + """ + @spec from_naive(NaiveDateTime.t, Calendar.time_zone) :: {:ok, DateTime.t} + def from_naive(naive_datetime, time_zone) + + def from_naive(%NaiveDateTime{calendar: calendar, + hour: hour, minute: minute, second: second, microsecond: microsecond, + year: year, month: month, day: day}, "Etc/UTC") do + {:ok, %DateTime{calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond, + std_offset: 0, utc_offset: 0, zone_abbr: "UTC", time_zone: "Etc/UTC"}} + end + + @doc """ + Converts the given NaiveDateTime to DateTime. + + It expects a time zone to put the NaiveDateTime in. + Currently it only supports "Etc/UTC" as time zone. + + ## Examples + + iex> DateTime.from_naive!(~N[2016-05-24 13:26:08.003], "Etc/UTC") + %DateTime{calendar: Calendar.ISO, day: 24, hour: 13, microsecond: {3000, 3}, minute: 26, + month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, + year: 2016, zone_abbr: "UTC"} + + """ + @spec from_naive!(non_neg_integer, :native | System.time_unit) :: DateTime.t + def from_naive!(naive_datetime, time_zone) do + case from_naive(naive_datetime, time_zone) do + {:ok, datetime} -> + datetime + {:error, reason} -> + raise ArgumentError, "cannot parse #{inspect naive_datetime} to datetime, reason: #{inspect reason}" + end + end + + @doc """ + Converts the given DateTime to Unix time. + + The DateTime is expected to be using the ISO calendar + with a year greater than or equal to 0. + + It will return the integer with the given unit, + according to `System.convert_time_unit/3`. + + ## Examples + + iex> 1464096368 |> DateTime.from_unix!() |> DateTime.to_unix() + 1464096368 + + iex> dt = %DateTime{calendar: Calendar.ISO, day: 20, hour: 18, microsecond: {273806, 6}, + ...> minute: 58, month: 11, second: 19, time_zone: "America/Montevideo", + ...> utc_offset: -10800, std_offset: 3600, year: 2014, zone_abbr: "UYST"} + iex> DateTime.to_unix(dt) + 1416517099 + + iex> flamel = %DateTime{calendar: Calendar.ISO, day: 22, hour: 8, microsecond: {527771, 6}, + ...> minute: 2, month: 3, second: 25, std_offset: 0, time_zone: "Etc/UTC", + ...> utc_offset: 0, year: 1418, zone_abbr: "UTC"} + iex> DateTime.to_unix(flamel) + -17412508655 + + """ + @spec to_unix(DateTime.t, System.time_unit) :: non_neg_integer + def to_unix(datetime, unit \\ :second) + + def to_unix(%DateTime{utc_offset: utc_offset, std_offset: std_offset} = datetime, unit) do + {days, fraction} = to_rata_die(datetime) + unix_units = Calendar.ISO.rata_die_to_unit({days - @unix_days, fraction}, unit) + offset_units = System.convert_time_unit(utc_offset + std_offset, :second, unit) + unix_units - offset_units + end + + @doc """ + Converts a `DateTime` into a `NaiveDateTime`. + + Because `NaiveDateTime` does not hold time zone information, + any time zone related data will be lost during the conversion. + + ## Examples + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 1}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> DateTime.to_naive(dt) + ~N[2000-02-29 23:00:07.0] + + """ + def to_naive(%DateTime{year: year, month: month, day: day, calendar: calendar, + hour: hour, minute: minute, second: second, microsecond: microsecond}) do + %NaiveDateTime{year: year, month: month, day: day, calendar: calendar, + hour: hour, minute: minute, second: second, microsecond: microsecond} + end + + @doc """ + Converts a `DateTime` into a `Date`. + + Because `Date` does not hold time nor time zone information, + data will be lost during the conversion. + + ## Examples + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> DateTime.to_date(dt) + ~D[2000-02-29] + + """ + def to_date(%DateTime{year: year, month: month, day: day, calendar: calendar}) do + %Date{year: year, month: month, day: day, calendar: calendar} + end + + @doc """ + Converts a `DateTime` into `Time`. + + Because `Time` does not hold date nor time zone information, + data will be lost during the conversion. + + ## Examples + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 1}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> DateTime.to_time(dt) + ~T[23:00:07.0] + + """ + def to_time(%DateTime{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar}) do + %Time{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar} + end + + @doc """ + Converts the given datetime to + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601) format. + + By default, `DateTime.to_iso8601/2` returns datetimes formatted in the "extended" + format, for human readability. It also supports the "basic" format through passing the `:basic` option. + + Only supports converting datetimes which are in the ISO calendar, + attempting to convert datetimes from other calendars will raise. + + WARNING: the ISO 8601 datetime format does not contain the time zone nor + its abbreviation, which means information is lost when converting to such + format. + + ### Examples + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> DateTime.to_iso8601(dt) + "2000-02-29T23:00:07+01:00" + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"} + iex> DateTime.to_iso8601(dt) + "2000-02-29T23:00:07Z" + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + iex> DateTime.to_iso8601(dt, :extended) + "2000-02-29T23:00:07-04:00" + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + iex> DateTime.to_iso8601(dt, :basic) + "20000229T230007-0400" + + """ + @spec to_iso8601(Calendar.datetime, :extended | :basic ) :: String.t + def to_iso8601(datetime, format \\ :extended) + + def to_iso8601(%{calendar: Calendar.ISO, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond, + time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset}, format) when format in [:extended, :basic] do + Calendar.ISO.datetime_to_iso8601(year, month, day, hour, minute, second, microsecond, + time_zone, zone_abbr, utc_offset, std_offset, format) + end + + def to_iso8601(%{calendar: _, year: _, month: _, day: _, + hour: _, minute: _, second: _, microsecond: _, + time_zone: _, zone_abbr: _, utc_offset: _, std_offset: _} = datetime, format) when format in [:extended, :basic] do + datetime + |> convert!(Calendar.ISO) + |> to_iso8601(format) + end + + def to_iso8601(_, format) do + raise ArgumentError, "DateTime.to_iso8601/2 expects format to be :extended or :basic, got: #{inspect format}" + end + + @doc """ + Parses the extended "Date and time of day" format described by + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + Since ISO8601 does not include the proper time zone, the given + string will be converted to UTC and its offset in seconds will be + returned as part of this function. Therefore offset information + must be present in the string. + + As specified in the standard, the separator "T" may be omitted if + desired as there is no ambiguity within this function. + + Time representations with reduced accuracy are not supported. + + Note that while ISO8601 allows datetimes to specify 24:00:00 as the + zero hour of the next day, this notation is not supported by Elixir. + + ## Examples + + iex> DateTime.from_iso8601("2015-01-23T23:50:07Z") + {:ok, %DateTime{calendar: Calendar.ISO, day: 23, hour: 23, microsecond: {0, 0}, minute: 50, month: 1, second: 7, std_offset: 0, + time_zone: "Etc/UTC", utc_offset: 0, year: 2015, zone_abbr: "UTC"}, 0} + iex> DateTime.from_iso8601("2015-01-23T23:50:07.123+02:30") + {:ok, %DateTime{calendar: Calendar.ISO, day: 23, hour: 21, microsecond: {123000, 3}, minute: 20, month: 1, second: 7, std_offset: 0, + time_zone: "Etc/UTC", utc_offset: 0, year: 2015, zone_abbr: "UTC"}, 9000} + iex> DateTime.from_iso8601("2015-01-23T23:50:07,123+02:30") + {:ok, %DateTime{calendar: Calendar.ISO, day: 23, hour: 21, microsecond: {123000, 3}, minute: 20, month: 1, second: 7, std_offset: 0, + time_zone: "Etc/UTC", utc_offset: 0, year: 2015, zone_abbr: "UTC"}, 9000} + + iex> DateTime.from_iso8601("2015-01-23P23:50:07") + {:error, :invalid_format} + iex> DateTime.from_iso8601("2015-01-23 23:50:07A") + {:error, :invalid_format} + iex> DateTime.from_iso8601("2015-01-23T23:50:07") + {:error, :missing_offset} + iex> DateTime.from_iso8601("2015-01-23 23:50:61") + {:error, :invalid_time} + iex> DateTime.from_iso8601("2015-01-32 23:50:07") + {:error, :invalid_date} + + iex> DateTime.from_iso8601("2015-01-23T23:50:07.123-00:00") + {:error, :invalid_format} + iex> DateTime.from_iso8601("2015-01-23T23:50:07.123-00:60") + {:error, :invalid_format} + + """ + @spec from_iso8601(String.t, Calendar.calendar) :: {:ok, t, Calendar.utc_offset} | {:error, atom} + def from_iso8601(string, calendar \\ Calendar.ISO) + + def from_iso8601(<>, calendar) when sep in [?\s, ?T] do + with {year, ""} <- Integer.parse(year), + {month, ""} <- Integer.parse(month), + {day, ""} <- Integer.parse(day), + {hour, ""} <- Integer.parse(hour), + {minute, ""} <- Integer.parse(min), + {second, ""} <- Integer.parse(sec), + {microsecond, rest} <- Calendar.ISO.parse_microsecond(rest), + {:ok, date} <- Date.new(year, month, day), + {:ok, time} <- Time.new(hour, minute, second, microsecond), + {:ok, offset} <- parse_offset(rest) do + %{year: year, month: month, day: day} = date + %{hour: hour, minute: minute, second: second, microsecond: microsecond} = time + + datetime = + Calendar.ISO.naive_datetime_to_rata_die(year, month, day, hour, minute, second, microsecond) + |> apply_tz_offset(offset) + |> from_rata_die("Etc/UTC", "UTC", 0, 0, calendar) + + {:ok, %{datetime | microsecond: microsecond}, offset} + else + {:error, reason} -> {:error, reason} + _ -> {:error, :invalid_format} + end + end + + def from_iso8601(_, _) do + {:error, :invalid_format} + end + + defp parse_offset(rest) do + case Calendar.ISO.parse_offset(rest) do + {offset, ""} when is_integer(offset) -> {:ok, offset} + {nil, ""} -> {:error, :missing_offset} + _ -> {:error, :invalid_format} + end + end + + @doc """ + Converts the given datetime to a string according to its calendar. + + ### Examples + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> DateTime.to_string(dt) + "2000-02-29 23:00:07+01:00 CET Europe/Warsaw" + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"} + iex> DateTime.to_string(dt) + "2000-02-29 23:00:07Z" + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + iex> DateTime.to_string(dt) + "2000-02-29 23:00:07-04:00 AMT America/Manaus" + + """ + @spec to_string(Calendar.datetime) :: String.t + def to_string(datetime) + + def to_string(%{calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond, + time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset}) do + calendar.datetime_to_string(year, month, day, hour, minute, second, microsecond, + time_zone, zone_abbr, utc_offset, std_offset) + end + + defimpl String.Chars do + def to_string(%{calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond, + time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset}) do + calendar.datetime_to_string(year, month, day, hour, minute, second, microsecond, + time_zone, zone_abbr, utc_offset, std_offset) + end + end + + @doc """ + Compares two `DateTime` structs. + + Returns `:gt` if first datetime is later than the second + and `:lt` for vice versa. If the two datetimes are equal + `:eq` is returned. + + Note that both utc and stc offsets will be taken into + account when comparison is done. + + ## Examples + + iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> DateTime.compare(dt1, dt2) + :gt + + """ + @spec compare(DateTime.t, DateTime.t) :: :lt | :eq | :gt + def compare(%DateTime{utc_offset: utc_offset1, std_offset: std_offset1} = datetime1, + %DateTime{utc_offset: utc_offset2, std_offset: std_offset2} = datetime2) do + {days1, {parts1, ppd1}} = + datetime1 + |> to_rata_die() + |> apply_tz_offset(utc_offset1 + std_offset1) + + {days2, {parts2, ppd2}} = + datetime2 + |> to_rata_die() + |> apply_tz_offset(utc_offset2 + std_offset2) + + # Ensure fraction tuples have same denominator. + rata_die1 = {days1, parts1 * ppd2} + rata_die2 = {days2, parts2 * ppd1} + + case {rata_die1, rata_die2} do + {first, second} when first > second -> :gt + {first, second} when first < second -> :lt + _ -> :eq + end + end + + @doc """ + Subtracts `datetime2` from `datetime1`. + + The answer can be returned in any `unit` available from `t:System.time_unit/0`. + + This function returns the difference in seconds where seconds are measured + according to `Calendar.ISO`. + + ## Examples + + iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> DateTime.diff(dt1, dt2) + 18000 + + """ + @spec diff(DateTime.t, DateTime.t) :: integer() + def diff(%DateTime{utc_offset: utc_offset1, std_offset: std_offset1} = datetime1, + %DateTime{utc_offset: utc_offset2, std_offset: std_offset2} = datetime2, unit \\ :second) do + naive_diff = + (datetime1 |> to_rata_die() |> Calendar.ISO.rata_die_to_unit(unit)) - + (datetime2 |> to_rata_die() |> Calendar.ISO.rata_die_to_unit(unit)) + offset_diff = + (utc_offset2 + std_offset2) - (utc_offset1 + std_offset1) + naive_diff + System.convert_time_unit(offset_diff, :second, unit) + end + + @doc """ + Converts a DateTime from one calendar to another. + + If this conversion fails for some reason, an `{:error, reason}` tuple is returned. + + ## Examples + + iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + iex> DateTime.convert(dt1, Calendar.ISO) + {:ok, %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}} + + """ + @spec convert(DateTime.t, Calendar.calendar) :: {:ok, DateTime.t} | {:error, atom} + def convert(%DateTime{calendar: calendar} = datetime, calendar) do + {:ok, datetime} + end + + def convert(%DateTime{} = datetime, calendar) do + result_datetime = + datetime + |> to_rata_die + |> from_rata_die(datetime, calendar) + {:ok, result_datetime} + end + + @doc """ + Converts a `DateTime` struct from one calendar to another. + + If this conversion fails for some reason, an `ArgumentError` is raised. + + ## Examples + + iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + iex> DateTime.convert!(dt1, Calendar.ISO) + %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + + """ + @spec convert!(DateTime.t, Calendar.calendar) :: DateTime.t + def convert!(datetime, calendar) do + case convert(datetime, calendar) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot convert #{inspect datetime} to target calendar #{inspect calendar}, reason: #{inspect reason}" + end + end + + defp to_rata_die(%DateTime{calendar: calendar,year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond}) do + calendar.naive_datetime_to_rata_die(year, month, day, hour, minute, second, microsecond) + end + + defp from_rata_die(rata_die, datetime, calendar) do + %{time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset} = datetime + from_rata_die(rata_die, time_zone, zone_abbr, utc_offset, std_offset, calendar) + end + + defp from_rata_die(rata_die, time_zone, zone_abbr, utc_offset, std_offset, calendar) do + {year, month, day, hour, minute, second, microsecond} = calendar.naive_datetime_from_rata_die(rata_die) + %DateTime{year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond, + time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset} + end + + defp apply_tz_offset(rata_die, offset) do + Calendar.ISO.add_day_fraction_to_rata_die(rata_die, -offset, 86400) + end +end diff --git a/lib/elixir/lib/calendar/iso.ex b/lib/elixir/lib/calendar/iso.ex new file mode 100644 index 00000000000..bfb9d5ae5bd --- /dev/null +++ b/lib/elixir/lib/calendar/iso.ex @@ -0,0 +1,432 @@ +defmodule Calendar.ISO do + @moduledoc """ + A calendar implementation that follows to ISO8601. + + This calendar implements the proleptic Gregorian calendar and + is therefore compatible with the calendar used in most countries + today. The proleptic means the Gregorian rules for leap years are + applied for all time, consequently the dates give different results + before the year 1583 from when the Gregorian calendar was adopted. + + Note that while ISO8601 allows times and datetimes to specify + 24:00:00 as the zero hour of the next day, this notation is not + supported by Elixir. + """ + + @behaviour Calendar + + @unix_epoch :calendar.datetime_to_gregorian_seconds {{1970, 1, 1}, {0, 0, 0}} + @unix_start 1_000_000 * -@unix_epoch + @unix_end 1_000_000 * (-@unix_epoch + :calendar.datetime_to_gregorian_seconds({{9999, 12, 31}, {23, 59, 59}})) + @unix_range_microseconds @unix_start..@unix_end + + @type year :: 0..9999 + @type month :: 1..12 + @type day :: 1..31 + + @seconds_per_minute 60 + @seconds_per_hour 60 * 60 + @seconds_per_day 24 * 60 * 60 # Note that this does _not_ handle leap seconds. + @microseconds_per_second 1_000_000 + + @doc """ + Returns the normalized Rata Die representation of the specified date. + + ## Examples + + iex> Calendar.ISO.naive_datetime_to_rata_die(1, 1, 1, 0, 0, 0, {0, 6}) + {1, {0, 86400000000}} + iex> Calendar.ISO.naive_datetime_to_rata_die(2000, 1, 1, 12, 0, 0, {0, 6}) + {730120, {43200000000, 86400000000}} + iex> Calendar.ISO.naive_datetime_to_rata_die(2000, 1, 1, 13, 0, 0, {0, 6}) + {730120, {46800000000, 86400000000}} + + """ + @spec naive_datetime_to_rata_die(Calendar.year, Calendar.month, Calendar.day, + Calendar.hour, Calendar.minute, Calendar.second, + Calendar.microsecond) :: Calendar.rata_die + def naive_datetime_to_rata_die(year, month, day, hour, minute, second, microsecond) do + {date_to_rata_die_days(year, month, day), + time_to_day_fraction(hour, minute, second, microsecond)} + end + + @doc """ + Converts a Rata Die to the datetime format specified by this calendar. + + ## Examples + + iex> Calendar.ISO.naive_datetime_from_rata_die({1, {0, 86400}}) + {1, 1, 1, 0, 0, 0, {0, 6}} + + iex> Calendar.ISO.naive_datetime_from_rata_die({730120, {0, 86400}}) + {2000, 1, 1, 0, 0, 0, {0, 6}} + + iex> Calendar.ISO.naive_datetime_from_rata_die({730120, {43200, 86400}}) + {2000, 1, 1, 12, 0, 0, {0, 6}} + + """ + @spec naive_datetime_from_rata_die(Calendar.rata_die) :: + {Calendar.year, Calendar.month, Calendar.day, + Calendar.hour, Calendar.minute, Calendar.second, Calendar.microsecond} + def naive_datetime_from_rata_die({days, day_fraction}) do + {year, month, day} = date_from_rata_die_days(days) + {hour, minute, second, microsecond} = time_from_day_fraction(day_fraction) + {year, month, day, hour, minute, second, microsecond} + end + + @doc """ + Returns the normalized day fraction of the specified time. + + ## Examples + + iex> Calendar.ISO.time_to_day_fraction(0, 0, 0, {0, 6}) + {0, 86400000000} + iex> Calendar.ISO.time_to_day_fraction(12, 34, 56, {123, 6}) + {45296000123, 86400000000} + + """ + @spec time_to_day_fraction(Calendar.hour, Calendar.minute, + Calendar.second, Calendar.microsecond) :: Calendar.day_fraction + def time_to_day_fraction(hour, minute, second, {microsecond, _}) do + combined_seconds = hour * @seconds_per_hour + minute * @seconds_per_minute + second + {combined_seconds * @microseconds_per_second + microsecond, @seconds_per_day * @microseconds_per_second} + end + + @doc """ + Converts a day fraction to this Calendar's representation of time. + + ## Examples + + iex> Calendar.ISO.time_from_day_fraction({1,2}) + {12, 0, 0, {0, 6}} + iex> Calendar.ISO.time_from_day_fraction({13,24}) + {13, 0, 0, {0, 6}} + + """ + @spec time_from_day_fraction(Calendar.day_fraction) :: + {Calendar.hour, Calendar.minute, Calendar.second, Calendar.microsecond} + def time_from_day_fraction({parts_in_day, parts_per_day}) do + total_microseconds = div(parts_in_day * @seconds_per_day * @microseconds_per_second, parts_per_day) + {hours, rest_microseconds1} = div_mod(total_microseconds, @seconds_per_hour * @microseconds_per_second) + {minutes, rest_microseconds2} = div_mod(rest_microseconds1, @seconds_per_minute * @microseconds_per_second) + {seconds, microseconds} = div_mod(rest_microseconds2, @microseconds_per_second) + {hours, minutes, seconds, {microseconds, 6}} + end + + # Converts a year, month, day in only a count of days since the Rata Die epoch. + @doc false + def date_to_rata_die_days(year, month, day) do + # Rata Die starts at year 1, rather than at year 0. + :calendar.date_to_gregorian_days(year, month, day) - 365 + end + + # Calculates {year, month, day} from the count of days since the Rata Die epoch. + @doc false + def date_from_rata_die_days(days) do + :calendar.gregorian_days_to_date(days + 365) + end + + defp div_mod(int1, int2) do + div = div(int1, int2) + mod = int1 - (div * int2) + {div, mod} + end + + @doc """ + Returns how many days there are in the given year-month. + + ## Examples + + iex> Calendar.ISO.days_in_month(1900, 1) + 31 + iex> Calendar.ISO.days_in_month(1900, 2) + 28 + iex> Calendar.ISO.days_in_month(2000, 2) + 29 + iex> Calendar.ISO.days_in_month(2001, 2) + 28 + iex> Calendar.ISO.days_in_month(2004, 2) + 29 + iex> Calendar.ISO.days_in_month(2004, 4) + 30 + + """ + @spec days_in_month(year, month) :: 28..31 + def days_in_month(year, month) + + def days_in_month(year, 2) do + if leap_year?(year), do: 29, else: 28 + end + def days_in_month(_, month) when month in [4, 6, 9, 11], do: 30 + def days_in_month(_, month) when month in 1..12, do: 31 + + @doc """ + Returns if the given year is a leap year. + + ## Examples + + iex> Calendar.ISO.leap_year?(2000) + true + iex> Calendar.ISO.leap_year?(2001) + false + iex> Calendar.ISO.leap_year?(2004) + true + iex> Calendar.ISO.leap_year?(1900) + false + + """ + @spec leap_year?(year) :: boolean() + def leap_year?(year) when is_integer(year) and year >= 0 do + rem(year, 4) === 0 and (rem(year, 100) > 0 or rem(year, 400) === 0) + end + + @doc """ + Calculates the day of the week from the given `year`, `month`, and `day`. + + It is an integer from 1 to 7, where 1 is Monday and 7 is Sunday. + + ## Examples + + iex> Calendar.ISO.day_of_week(2016, 10, 31) + 1 + iex> Calendar.ISO.day_of_week(2016, 11, 01) + 2 + iex> Calendar.ISO.day_of_week(2016, 11, 02) + 3 + iex> Calendar.ISO.day_of_week(2016, 11, 03) + 4 + iex> Calendar.ISO.day_of_week(2016, 11, 04) + 5 + iex> Calendar.ISO.day_of_week(2016, 11, 05) + 6 + iex> Calendar.ISO.day_of_week(2016, 11, 06) + 7 + """ + @spec day_of_week(year, month, day) :: 1..7 + def day_of_week(year, month, day) + when is_integer(year) and is_integer(month) and is_integer(day) do + :calendar.day_of_the_week(year, month, day) + end + + @doc """ + Converts the given time into a string. + """ + def time_to_string(hour, minute, second, microsecond, format \\ :extended) + + def time_to_string(hour, minute, second, {_, 0}, format) do + time_to_string_format(hour, minute, second, format) + end + + def time_to_string(hour, minute, second, {microsecond, precision}, format) do + time_to_string_format(hour, minute, second, format) <> + "." <> (microsecond |> zero_pad(6) |> binary_part(0, precision)) + end + + defp time_to_string_format(hour, minute, second, :extended) do + zero_pad(hour, 2) <> ":" <> zero_pad(minute, 2) <> ":" <> zero_pad(second, 2) + end + + defp time_to_string_format(hour, minute, second, :basic) do + zero_pad(hour, 2) <> zero_pad(minute, 2) <> zero_pad(second, 2) + end + + @doc """ + Converts the given date into a string. + """ + def date_to_string(year, month, day) do + zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2) + end + + defp date_to_string(year, month, day, :extended), do: date_to_string(year, month, day) + defp date_to_string(year, month, day, :basic) do + zero_pad(year, 4) <> zero_pad(month, 2) <> zero_pad(day, 2) + end + + @doc """ + Converts the datetime (without time zone) into a string. + """ + def naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) do + date_to_string(year, month, day) <> " " <> time_to_string(hour, minute, second, microsecond) + end + + @doc """ + Convers the datetime (with time zone) into a string. + """ + def datetime_to_string(year, month, day, hour, minute, second, microsecond, + time_zone, zone_abbr, utc_offset, std_offset) do + date_to_string(year, month, day) <> " " <> + time_to_string(hour, minute, second, microsecond) <> + offset_to_string(utc_offset, std_offset, time_zone) <> + zone_to_string(utc_offset, std_offset, zone_abbr, time_zone) + end + + def valid_date?(year, month, day) do + :calendar.valid_date(year, month, day) and year <= 9999 + end + + def valid_time?(hour, minute, second, {microsecond, _}) do + hour in 0..23 and minute in 0..59 and second in 0..60 and microsecond in 0..999_999 + end + + def day_rollover_relative_to_midnight_utc() do + {0, 1} + end + + defp offset_to_string(utc, std, zone, format \\ :extended) + defp offset_to_string(0, 0, "Etc/UTC", _format), do: "Z" + defp offset_to_string(utc, std, _zone, format) do + total = utc + std + second = abs(total) + minute = second |> rem(3600) |> div(60) + hour = div(second, 3600) + format_offset(total, hour, minute, format) + end + + defp format_offset(total, hour, minute, :extended) do + sign(total) <> zero_pad(hour, 2) <> ":" <> zero_pad(minute, 2) + end + + defp format_offset(total, hour, minute, :basic) do + sign(total) <> zero_pad(hour, 2) <> zero_pad(minute, 2) + end + + defp zone_to_string(0, 0, _abbr, "Etc/UTC"), do: "" + defp zone_to_string(_, _, abbr, zone), do: " " <> abbr <> " " <> zone + + defp sign(total) when total < 0, do: "-" + defp sign(_), do: "+" + + defp zero_pad(val, count) do + num = Integer.to_string(val) + :binary.copy("0", count - byte_size(num)) <> num + end + + ## Helpers + + @doc false + def from_unix(integer, unit) when is_integer(integer) do + total = System.convert_time_unit(integer, unit, :microsecond) + if total in @unix_range_microseconds do + microsecond = rem(total, 1_000_000) + precision = precision_for_unit(unit) + {date, time} = :calendar.gregorian_seconds_to_datetime(@unix_epoch + div(total, 1_000_000)) + {:ok, date, time, {microsecond, precision}} + else + {:error, :invalid_unix_time} + end + end + + defp precision_for_unit(unit) do + subsecond = div System.convert_time_unit(1, :second, unit), 10 + precision_for_unit(subsecond, 0) + end + + defp precision_for_unit(0, precision), + do: precision + defp precision_for_unit(_, 6), + do: 6 + defp precision_for_unit(number, precision), + do: precision_for_unit(div(number, 10), precision + 1) + + @doc false + def date_to_iso8601(year, month, day, format \\ :extended) do + date_to_string(year, month, day, format) + end + + @doc false + def time_to_iso8601(hour, minute, second, microsecond, format \\ :extended) do + time_to_string(hour, minute, second, microsecond, format) + end + + @doc false + def naive_datetime_to_iso8601(year, month, day, hour, minute, second, microsecond, format \\ :extended) do + date_to_string(year, month, day, format) <> "T" <> time_to_string(hour, minute, second, microsecond, format) + end + + @doc false + def datetime_to_iso8601(year, month, day, hour, minute, second, microsecond, + time_zone, _zone_abbr, utc_offset, std_offset, format \\ :extended) do + date_to_string(year, month, day, format) <> "T" <> + time_to_string(hour, minute, second, microsecond, format) <> + offset_to_string(utc_offset, std_offset, time_zone, format) + end + + @doc false + def parse_microsecond("." <> rest) do + case parse_microsecond(rest, 0, "") do + {"", 0, _} -> + :error + {microsecond, precision, rest} when precision in 1..6 -> + pad = String.duplicate("0", 6 - byte_size(microsecond)) + {{String.to_integer(microsecond <> pad), precision}, rest} + {microsecond, _precision, rest} -> + {{String.to_integer(binary_part(microsecond, 0, 6)), 6}, rest} + end + end + + def parse_microsecond("," <> rest) do + parse_microsecond("." <> rest) + end + + def parse_microsecond(rest) do + {{0, 0}, rest} + end + + defp parse_microsecond(<>, precision, acc) when head in ?0..?9, + do: parse_microsecond(tail, precision + 1, <>) + defp parse_microsecond(rest, precision, acc), + do: {acc, precision, rest} + + @doc false + def parse_offset(""), + do: {nil, ""} + def parse_offset("Z"), + do: {0, ""} + def parse_offset("-00:00"), + do: :error + def parse_offset(<>), + do: parse_offset(1, hour, min, rest) + def parse_offset(<>), + do: parse_offset(-1, hour, min, rest) + def parse_offset(_), + do: :error + + defp parse_offset(sign, hour, min, rest) do + with {hour, ""} when hour < 24 <- Integer.parse(hour), + {min, ""} when min < 60 <- Integer.parse(min) do + {((hour * 60) + min) * 60 * sign, rest} + else + _ -> :error + end + end + + @doc false + def rata_die_to_unit({days, {parts, ppd}}, unit) do + day_microseconds = days * @seconds_per_day * @microseconds_per_second + microseconds = div(parts * @seconds_per_day * @microseconds_per_second, ppd) + System.convert_time_unit(day_microseconds + microseconds, :microsecond, unit) + end + + @doc false + def add_day_fraction_to_rata_die({days, {parts, ppd}}, add, ppd) do + normalize_rata_die(days, parts + add, ppd) + end + def add_day_fraction_to_rata_die({days, {parts, ppd}}, add, add_ppd) do + parts = parts * add_ppd + add = add * ppd + gcd = Integer.gcd(ppd, add_ppd) + result_parts = div(parts + add, gcd) + result_ppd = div(ppd * add_ppd, gcd) + normalize_rata_die(days, result_parts, result_ppd) + end + + defp normalize_rata_die(days, parts, ppd) do + days_offset = div(parts, ppd) + parts = rem(parts, ppd) + if parts < 0 do + {days + days_offset - 1, {parts + ppd, ppd}} + else + {days + days_offset, {parts, ppd}} + end + end +end diff --git a/lib/elixir/lib/calendar/naive_datetime.ex b/lib/elixir/lib/calendar/naive_datetime.ex new file mode 100644 index 00000000000..07378ef488f --- /dev/null +++ b/lib/elixir/lib/calendar/naive_datetime.ex @@ -0,0 +1,656 @@ +defmodule NaiveDateTime do + @moduledoc """ + A NaiveDateTime struct (without a time zone) and functions. + + The NaiveDateTime struct contains the fields year, month, day, hour, + minute, second, microsecond and calendar. New naive datetimes can be + built with the `new/7` function or using the `~N` sigil: + + iex> ~N[2000-01-01 23:00:07] + ~N[2000-01-01 23:00:07] + + Both `new/7` and sigil return a struct where the date fields can + be accessed directly: + + iex> naive = ~N[2000-01-01 23:00:07] + iex> naive.year + 2000 + iex> naive.second + 7 + + The naive bit implies this datetime representation does + not have a time zone. This means the datetime may not + actually exist in certain areas in the world even though + it is valid. + + For example, when daylight saving changes are applied + by a region, the clock typically moves forward or backward + by one hour. This means certain datetimes never occur or + may occur more than once. Since `NaiveDateTime` is not + validated against a time zone, such errors would go unnoticed. + + Remember, comparisons in Elixir using `==`, `>`, `<` and friends + are structural and based on the NaiveDateTime struct fields. For + proper comparison between naive datetimes, use the `compare/2` + function. + + Developers should avoid creating the NaiveDateTime struct directly + and instead rely on the functions provided by this module as well + as the ones in 3rd party calendar libraries. + """ + + @enforce_keys [:year, :month, :day, :hour, :minute, :second] + defstruct [:year, :month, :day, :hour, :minute, :second, microsecond: {0, 0}, calendar: Calendar.ISO] + + @type t :: %NaiveDateTime{year: Calendar.year, month: Calendar.month, day: Calendar.day, + calendar: Calendar.calendar, hour: Calendar.hour, minute: Calendar.minute, + second: Calendar.second, microsecond: Calendar.microsecond} + + @doc """ + Returns the current naive datetime in UTC. + + Prefer using `DateTime.utc_now/0` when possible as, opposite + to `NaiveDateTime`, it will keep the time zone information. + + ## Examples + + iex> naive_datetime = NaiveDateTime.utc_now() + iex> naive_datetime.year >= 2016 + true + + """ + @spec utc_now(Calendar.calendar) :: t + def utc_now(calendar \\ Calendar.ISO) + + def utc_now(Calendar.ISO) do + {:ok, {year, month, day}, {hour, minute, second}, microsecond} = + Calendar.ISO.from_unix(:os.system_time, :native) + %NaiveDateTime{year: year, month: month, day: day, + hour: hour, minute: minute, second: second, + microsecond: microsecond, calendar: Calendar.ISO} + end + + def utc_now(calendar) do + calendar + |> DateTime.utc_now + |> DateTime.to_naive + end + + @doc """ + Builds a new ISO naive datetime. + + Expects all values to be integers. Returns `{:ok, naive_datetime}` + if each entry fits its appropriate range, returns `{:error, reason}` + otherwise. + + ## Examples + + iex> NaiveDateTime.new(2000, 1, 1, 0, 0, 0) + {:ok, ~N[2000-01-01 00:00:00]} + iex> NaiveDateTime.new(2000, 13, 1, 0, 0, 0) + {:error, :invalid_date} + iex> NaiveDateTime.new(2000, 2, 29, 0, 0, 0) + {:ok, ~N[2000-02-29 00:00:00]} + iex> NaiveDateTime.new(2000, 2, 30, 0, 0, 0) + {:error, :invalid_date} + iex> NaiveDateTime.new(2001, 2, 29, 0, 0, 0) + {:error, :invalid_date} + + iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 59, {0, 1}) + {:ok, ~N[2000-01-01 23:59:59.0]} + iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 59, 999_999) + {:ok, ~N[2000-01-01 23:59:59.999999]} + iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 60, 999_999) + {:ok, ~N[2000-01-01 23:59:60.999999]} + iex> NaiveDateTime.new(2000, 1, 1, 24, 59, 59, 999_999) + {:error, :invalid_time} + iex> NaiveDateTime.new(2000, 1, 1, 23, 60, 59, 999_999) + {:error, :invalid_time} + iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 61, 999_999) + {:error, :invalid_time} + iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 59, 1_000_000) + {:error, :invalid_time} + + """ + @spec new(Calendar.year, Calendar.month, Calendar.day, + Calendar.hour, Calendar.minute, Calendar.second, Calendar.microsecond, Calendar.calendar) :: + {:ok, t} | {:error, atom} + def new(year, month, day, hour, minute, second, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) do + with {:ok, date} <- Date.new(year, month, day, calendar), + {:ok, time} <- Time.new(hour, minute, second, microsecond, calendar), + do: new(date, time) + end + + @doc """ + Builds a naive datetime from date and time structs. + + ## Examples + + iex> NaiveDateTime.new(~D[2010-01-13], ~T[23:00:07.005]) + {:ok, ~N[2010-01-13 23:00:07.005]} + + """ + @spec new(Date.t, Time.t) :: {:ok, t} + def new(date, time) + + def new(%Date{calendar: calendar, year: year, month: month, day: day}, + %Time{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar}) do + {:ok, %NaiveDateTime{calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond}} + end + + @doc """ + Adds a specified amount of time to a `NaiveDateTime`. + + Accepts an `integer` in any `unit` available from `t:System.time_unit/0`. + Negative values will be move backwards in time. + + This operation is only possible if both calendars are convertible to `Calendar.ISO`. + + ## Examples + + # adds seconds by default + iex> NaiveDateTime.add(~N[2014-10-02 00:29:10], 2) + ~N[2014-10-02 00:29:12] + + # accepts negative offsets + iex> NaiveDateTime.add(~N[2014-10-02 00:29:10], -2) + ~N[2014-10-02 00:29:08] + + # can work with other units + iex> NaiveDateTime.add(~N[2014-10-02 00:29:10], 2_000, :millisecond) + ~N[2014-10-02 00:29:12] + + # keeps the same precision + iex> NaiveDateTime.add(~N[2014-10-02 00:29:10.021], 21, :second) + ~N[2014-10-02 00:29:31.021] + + # changes below the precision will not be visible + iex> hidden = NaiveDateTime.add(~N[2014-10-02 00:29:10], 21, :millisecond) + iex> hidden.microsecond # ~N[2014-10-02 00:29:10] + {21000, 0} + + # from Gregorian seconds + iex> NaiveDateTime.add(~N[0000-01-01 00:00:00], 63579428950) + ~N[2014-10-02 00:29:10] + + """ + @spec add(t, integer, System.time_unit) :: t + def add(%NaiveDateTime{microsecond: {_, precision}, calendar: calendar} = naive_datetime, + integer, unit \\ :second) when is_integer(integer) do + ppd = System.convert_time_unit(86400, :second, unit) + naive_datetime + |> to_rata_die() + |> Calendar.ISO.add_day_fraction_to_rata_die(integer, ppd) + |> from_rata_die(calendar, precision) + end + + @doc """ + Subtracts `naive_datetime2` from `naive_datetime1`. + + The answer can be returned in any `unit` available from `t:System.time_unit/0`. + + This function returns the difference in seconds where seconds are measured + according to `Calendar.ISO`. + + ## Examples + + iex> NaiveDateTime.diff(~N[2014-10-02 00:29:12], ~N[2014-10-02 00:29:10]) + 2 + iex> NaiveDateTime.diff(~N[2014-10-02 00:29:12], ~N[2014-10-02 00:29:10], :microsecond) + 2_000_000 + iex> NaiveDateTime.diff(~N[2014-10-02 00:29:10.042], ~N[2014-10-02 00:29:10.021], :millisecond) + 21 + + # to Gregorian seconds + iex> NaiveDateTime.diff(~N[2014-10-02 00:29:10], ~N[0000-01-01 00:00:00]) + 63579428950 + + """ + @spec diff(t, t, System.time_unit) :: integer + def diff(%NaiveDateTime{} = naive_datetime1, + %NaiveDateTime{} = naive_datetime2, + unit \\ :second) do + if not Calendar.compatible_calendars?(naive_datetime1.calendar, naive_datetime2.calendar) do + raise ArgumentError, "cannot calculate the difference between #{inspect naive_datetime1} and #{inspect naive_datetime2} because their calendars are not compatible and thus the result would be ambiguous" + end + + units1 = naive_datetime1 |> to_rata_die() |> Calendar.ISO.rata_die_to_unit(unit) + units2 = naive_datetime2 |> to_rata_die() |> Calendar.ISO.rata_die_to_unit(unit) + units1 - units2 + end + + @doc """ + Converts a `NaiveDateTime` into a `Date`. + + Because `Date` does not hold time information, + data will be lost during the conversion. + + ## Examples + + iex> NaiveDateTime.to_date(~N[2002-01-13 23:00:07]) + ~D[2002-01-13] + + """ + @spec to_date(t) :: Date.t + def to_date(%NaiveDateTime{year: year, month: month, day: day, calendar: calendar}) do + %Date{year: year, month: month, day: day, calendar: calendar} + end + + @doc """ + Converts a `NaiveDateTime` into `Time`. + + Because `Time` does not hold date information, + data will be lost during the conversion. + + ## Examples + + iex> NaiveDateTime.to_time(~N[2002-01-13 23:00:07]) + ~T[23:00:07] + + """ + @spec to_time(t) :: Time.t + def to_time(%NaiveDateTime{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar}) do + %Time{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar} + end + + @doc """ + Converts the given naive datetime to a string according to its calendar. + + ### Examples + + iex> NaiveDateTime.to_string(~N[2000-02-28 23:00:13]) + "2000-02-28 23:00:13" + iex> NaiveDateTime.to_string(~N[2000-02-28 23:00:13.001]) + "2000-02-28 23:00:13.001" + + This function can also be used to convert a DateTime to a string without + the time zone information: + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> NaiveDateTime.to_string(dt) + "2000-02-29 23:00:07" + + """ + @spec to_string(Calendar.naive_datetime) :: String.t + def to_string(naive_datetime) + + def to_string(%{calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond}) do + calendar.naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) + end + + @doc """ + Parses the extended "Date and time of day" format described by + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + Timezone offset may be included in the string but they will be + simply discarded as such information is not included in naive date + times. + + As specified in the standard, the separator "T" may be omitted if + desired as there is no ambiguity within this function. + + Time representations with reduced accuracy are not supported. + + Note that while ISO8601 allows datetimes to specify 24:00:00 as the + zero hour of the next day, this notation is not supported by Elixir. + + ## Examples + + iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07") + {:ok, ~N[2015-01-23 23:50:07]} + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07") + {:ok, ~N[2015-01-23 23:50:07]} + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07Z") + {:ok, ~N[2015-01-23 23:50:07]} + + iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07.0") + {:ok, ~N[2015-01-23 23:50:07.0]} + iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07,0123456") + {:ok, ~N[2015-01-23 23:50:07.012345]} + iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07.0123456") + {:ok, ~N[2015-01-23 23:50:07.012345]} + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123Z") + {:ok, ~N[2015-01-23 23:50:07.123]} + + iex> NaiveDateTime.from_iso8601("2015-01-23P23:50:07") + {:error, :invalid_format} + iex> NaiveDateTime.from_iso8601("2015:01:23 23-50-07") + {:error, :invalid_format} + iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07A") + {:error, :invalid_format} + iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:61") + {:error, :invalid_time} + iex> NaiveDateTime.from_iso8601("2015-01-32 23:50:07") + {:error, :invalid_date} + + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123+02:30") + {:ok, ~N[2015-01-23 23:50:07.123]} + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123+00:00") + {:ok, ~N[2015-01-23 23:50:07.123]} + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123-02:30") + {:ok, ~N[2015-01-23 23:50:07.123]} + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123-00:00") + {:error, :invalid_format} + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123-00:60") + {:error, :invalid_format} + iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123-24:00") + {:error, :invalid_format} + + """ + @spec from_iso8601(String.t, Calendar.calendar) :: {:ok, t} | {:error, atom} + def from_iso8601(string, calendar \\ Calendar.ISO) + + def from_iso8601(<>, calendar) when sep in [?\s, ?T] do + with {year, ""} <- Integer.parse(year), + {month, ""} <- Integer.parse(month), + {day, ""} <- Integer.parse(day), + {hour, ""} <- Integer.parse(hour), + {min, ""} <- Integer.parse(min), + {sec, ""} <- Integer.parse(sec), + {microsec, rest} <- Calendar.ISO.parse_microsecond(rest), + {_offset, ""} <- Calendar.ISO.parse_offset(rest) do + with {:ok, utc_date} <- new(year, month, day, hour, min, sec, microsec, Calendar.ISO), + do: convert(utc_date, calendar) + else + _ -> {:error, :invalid_format} + end + end + + def from_iso8601(<<_::binary>>, _calendar) do + {:error, :invalid_format} + end + + @doc """ + Parses the extended "Date and time of day" format described by + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + Raises if the format is invalid. + + ## Examples + + iex> NaiveDateTime.from_iso8601!("2015-01-23T23:50:07.123Z") + ~N[2015-01-23 23:50:07.123] + iex> NaiveDateTime.from_iso8601!("2015-01-23T23:50:07,123Z") + ~N[2015-01-23 23:50:07.123] + iex> NaiveDateTime.from_iso8601!("2015-01-23P23:50:07") + ** (ArgumentError) cannot parse "2015-01-23P23:50:07" as naive datetime, reason: :invalid_format + + """ + @spec from_iso8601!(String.t, Calendar.calendar) :: t | no_return + def from_iso8601!(string, calendar \\ Calendar.ISO) do + case from_iso8601(string, calendar) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot parse #{inspect string} as naive datetime, reason: #{inspect reason}" + end + end + + @doc """ + Converts the given naive datetime to + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + By default, `NaiveDateTime.to_iso8601/2` returns naive datetimes formatted in the "extended" + format, for human readability. It also supports the "basic" format through passing the `:basic` option. + + Only supports converting naive datetimes which are in the ISO calendar, + attempting to convert naive datetimes from other calendars will raise. + + ### Examples + + iex> NaiveDateTime.to_iso8601(~N[2000-02-28 23:00:13]) + "2000-02-28T23:00:13" + + iex> NaiveDateTime.to_iso8601(~N[2000-02-28 23:00:13.001]) + "2000-02-28T23:00:13.001" + + iex> NaiveDateTime.to_iso8601(~N[2000-02-28 23:00:13.001], :basic) + "20000228T230013.001" + + This function can also be used to convert a DateTime to ISO8601 without + the time zone information: + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> NaiveDateTime.to_iso8601(dt) + "2000-02-29T23:00:07" + + """ + @spec to_iso8601(Calendar.naive_datetime, :basic | :extended) :: String.t + def to_iso8601(naive_datetime, format \\ :extended) + + def to_iso8601(%{year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: + Calendar.ISO}, format) when format in [:basic, :extended] do + Calendar.ISO.naive_datetime_to_iso8601(year, month, day, hour, minute, second, microsecond, format) + end + + def to_iso8601(%{year: _, month: _, day: _, + hour: _, minute: _, second: _, microsecond: _, calendar: _} = naive_datetime, format) when format in [:basic, :extended] do + naive_datetime + |> convert!(Calendar.ISO) + |> to_iso8601(format) + end + + def to_iso8601(_date, format) do + raise ArgumentError, "NaiveDateTime.to_iso8601/2 expects format to be :extended or :basic, got: #{inspect format}" + end + + @doc """ + Converts a `NaiveDateTime` struct to an Erlang datetime tuple. + + Only supports converting naive datetimes which are in the ISO calendar, + attempting to convert naive datetimes from other calendars will raise. + + WARNING: Loss of precision may occur, as Erlang time tuples only store + hour/minute/second. + + ## Examples + + iex> NaiveDateTime.to_erl(~N[2000-01-01 13:30:15]) + {{2000, 1, 1}, {13, 30, 15}} + + This function can also be used to convert a DateTime to a erl format + without the time zone information: + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> NaiveDateTime.to_erl(dt) + {{2000, 2, 29}, {23, 00, 07}} + + """ + @spec to_erl(t) :: :calendar.datetime + def to_erl(naive_datetime) + + @spec to_erl(Calendar.time) :: :calendar.time + def to_erl(%{calendar: _, year: _, month: _, day: _, + hour: _, minute: _, second: _} = naive_datetime) do + %{year: year, month: month, day: day, + hour: hour, minute: minute, second: second} = convert!(naive_datetime, Calendar.ISO) + {{year, month, day}, {hour, minute, second}} + end + + @doc """ + Converts an Erlang datetime tuple to a `NaiveDateTime` struct. + + Attempting to convert an invalid ISO calendar date will produce an error tuple. + + ## Examples + + iex> NaiveDateTime.from_erl({{2000, 1, 1}, {13, 30, 15}}) + {:ok, ~N[2000-01-01 13:30:15]} + iex> NaiveDateTime.from_erl({{2000, 1, 1}, {13, 30, 15}}, {5000, 3}) + {:ok, ~N[2000-01-01 13:30:15.005]} + iex> NaiveDateTime.from_erl({{2000, 13, 1}, {13, 30, 15}}) + {:error, :invalid_date} + iex> NaiveDateTime.from_erl({{2000, 13, 1},{13, 30, 15}}) + {:error, :invalid_date} + """ + @spec from_erl(:calendar.datetime, Calendar.microsecond) :: {:ok, t} | {:error, atom} + def from_erl(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) + + def from_erl({{year, month, day}, {hour, minute, second}}, microsecond, calendar) do + with {:ok, utc_date} <- new(year, month, day, hour, minute, second, microsecond), + do: convert(utc_date, calendar) + end + + @doc """ + Converts an Erlang datetime tuple to a `NaiveDateTime` struct. + + Raises if the datetime is invalid. + Attempting to convert an invalid ISO calendar date will produce an error tuple. + + ## Examples + + iex> NaiveDateTime.from_erl!({{2000, 1, 1}, {13, 30, 15}}) + ~N[2000-01-01 13:30:15] + iex> NaiveDateTime.from_erl!({{2000, 1, 1}, {13, 30, 15}}, {5000, 3}) + ~N[2000-01-01 13:30:15.005] + iex> NaiveDateTime.from_erl!({{2000, 13, 1}, {13, 30, 15}}) + ** (ArgumentError) cannot convert {{2000, 13, 1}, {13, 30, 15}} to naive datetime, reason: :invalid_date + """ + @spec from_erl!(:calendar.datetime, Calendar.microsecond) :: t | no_return + def from_erl!(tuple, microsecond \\ {0, 0}) do + case from_erl(tuple, microsecond) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot convert #{inspect tuple} to naive datetime, reason: #{inspect reason}" + end + end + + @doc """ + Compares two `NaiveDateTime` structs. + + Returns `:gt` if first is later than the second + and `:lt` for vice versa. If the two NaiveDateTime + are equal `:eq` is returned. + + ## Examples + + iex> NaiveDateTime.compare(~N[2016-04-16 13:30:15], ~N[2016-04-28 16:19:25]) + :lt + iex> NaiveDateTime.compare(~N[2016-04-16 13:30:15.1], ~N[2016-04-16 13:30:15.01]) + :gt + + This function can also be used to compare a DateTime without + the time zone information: + + iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + ...> hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + ...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + iex> NaiveDateTime.compare(dt, ~N[2000-02-29 23:00:07]) + :eq + iex> NaiveDateTime.compare(dt, ~N[2000-01-29 23:00:07]) + :gt + iex> NaiveDateTime.compare(dt, ~N[2000-03-29 23:00:07]) + :lt + + """ + @spec compare(Calendar.naive_datetime, Calendar.naive_datetime) :: :lt | :eq | :gt + def compare(%{calendar: calendar1} = naive_datetime1, %{calendar: calendar2} = naive_datetime2) do + if Calendar.compatible_calendars?(calendar1, calendar2) do + case {to_rata_die(naive_datetime1), to_rata_die(naive_datetime2)} do + {first, second} when first > second -> :gt + {first, second} when first < second -> :lt + _ -> :eq + end + else + raise ArgumentError, """ + cannot compare #{inspect naive_datetime1} with #{inspect naive_datetime2}. + + This comparison would be ambiguous as their calendars have incompatible day rollover moments. + Specify an exact time of day (using `DateTime`s) to resolve this ambiguity + """ + end + end + + @doc """ + Converts a `NaiveDateTime` struct from one calendar to another. + + If it is not possible to convert unambiguously between the calendars + (see `Calendar.compatible_calendars?/2`), an `{:error, :incompatible_calendars}` tuple + is returned. + """ + @spec convert(NaiveDateTime.t, Calendar.calendar) :: {:ok, NaiveDateTime.t} | {:error, :incompatible_calendars} + def convert(%{calendar: calendar} = naive_datetime, calendar) do + {:ok, naive_datetime} + end + + def convert(%{calendar: ndt_calendar} = naive_datetime, calendar) do + if Calendar.compatible_calendars?(ndt_calendar, calendar) do + result_naive_datetime = + naive_datetime + |> to_rata_die + |> from_rata_die(calendar) + {:ok, result_naive_datetime} + else + {:error, :incompatible_calendars} + end + end + + @doc """ + Converts a NaiveDateTime from one calendar to another. + + If it is not possible to convert unambiguously between the calendars + (see `Calendar.compatible_calendars?/2`), an ArgumentError is raised. + """ + @spec convert!(NaiveDateTime.t, Calendar.calendar) :: NaiveDateTime.t + def convert!(naive_datetime, calendar) do + case convert(naive_datetime, calendar) do + {:ok, value} -> + value + {:error, :incompatible_calendars} -> + raise ArgumentError, "cannot convert #{inspect naive_datetime} to target calendar #{inspect calendar}, reason: #{inspect naive_datetime.calendar} and #{inspect calendar} have different day rollover moments, making this conversion ambiguous" + {:error, reason} -> + raise ArgumentError, "cannot convert #{inspect naive_datetime} to target calendar #{inspect calendar}, reason: #{inspect reason}" + end + end + + ## Helpers + + defp to_rata_die(%{calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond}) do + calendar.naive_datetime_to_rata_die(year, month, day, hour, minute, second, microsecond) + end + + defp from_rata_die(rata_die, calendar) do + {year, month, day, hour, minute, second, microsecond} = + calendar.naive_datetime_from_rata_die(rata_die) + %NaiveDateTime{year: year, month: month, day: day, hour: hour, minute: minute, second: second, + microsecond: microsecond, calendar: calendar} + end + + defp from_rata_die(rata_die, calendar, precision) do + {year, month, day, hour, minute, second, {microsecond, _}} = + calendar.naive_datetime_from_rata_die(rata_die) + %NaiveDateTime{year: year, month: month, day: day, hour: hour, minute: minute, second: second, + microsecond: {microsecond, precision}, calendar: calendar} + end + + defimpl String.Chars do + def to_string(%{calendar: calendar, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond}) do + calendar.naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) + end + end + + defimpl Inspect do + def inspect(%{calendar: Calendar.ISO, year: year, month: month, day: day, + hour: hour, minute: minute, second: second, microsecond: microsecond}, _) do + formatted = Calendar.ISO.naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) + "~N[" <> formatted <> "]" + end + + def inspect(naive, opts) do + Inspect.Any.inspect(naive, opts) + end + end +end diff --git a/lib/elixir/lib/calendar/time.ex b/lib/elixir/lib/calendar/time.ex new file mode 100644 index 00000000000..d81249e9d04 --- /dev/null +++ b/lib/elixir/lib/calendar/time.ex @@ -0,0 +1,433 @@ +defmodule Time do + @moduledoc """ + A Time struct and functions. + + The Time struct contains the fields hour, minute, second and microseconds. + New times can be built with the `new/4` function or using the `~T` + sigil: + + iex> ~T[23:00:07.001] + ~T[23:00:07.001] + + Both `new/4` and sigil return a struct where the time fields can + be accessed directly: + + iex> time = ~T[23:00:07.001] + iex> time.hour + 23 + iex> time.microsecond + {1000, 3} + + The functions on this module work with the `Time` struct as well + as any struct that contains the same fields as the `Time` struct, + such as `NaiveDateTime` and `DateTime`. Such functions expect + `t:Calendar.time/0` in their typespecs (instead of `t:t/0`). + + Remember, comparisons in Elixir using `==`, `>`, `<` and friends + are structural and based on the Time struct fields. For proper + comparison between times, use the `compare/2` function. + + Developers should avoid creating the Time struct directly and + instead rely on the functions provided by this module as well as + the ones in 3rd party calendar libraries. + """ + + @enforce_keys [:hour, :minute, :second] + defstruct [:hour, :minute, :second, microsecond: {0, 0}, calendar: Calendar.ISO] + + @type t :: %Time{hour: Calendar.hour, minute: Calendar.minute, + second: Calendar.second, microsecond: Calendar.microsecond, calendar: Calendar.calendar} + + @doc """ + Returns the current time in UTC. + + ## Examples + + iex> time = Time.utc_now() + iex> time.hour >= 0 + true + + """ + @spec utc_now(Calendar.calendar) :: t + def utc_now(calendar \\ Calendar.ISO) do + {:ok, _, {hour, minute, second}, microsecond} = Calendar.ISO.from_unix(:os.system_time, :native) + iso_time = %Time{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: Calendar.ISO} + convert!(iso_time, calendar) + end + + @doc """ + Builds a new time. + + Expects all values to be integers. Returns `{:ok, time}` if each + entry fits its appropriate range, returns `{:error, reason}` otherwise. + + Note a time may have 60 seconds in case of leap seconds. + + ## Examples + + iex> Time.new(0, 0, 0, 0) + {:ok, ~T[00:00:00.000000]} + iex> Time.new(23, 59, 59, 999_999) + {:ok, ~T[23:59:59.999999]} + iex> Time.new(23, 59, 60, 999_999) + {:ok, ~T[23:59:60.999999]} + + # Time with microseconds and their precision + iex> Time.new(23, 59, 60, {10_000, 2}) + {:ok, ~T[23:59:60.01]} + + iex> Time.new(24, 59, 59, 999_999) + {:error, :invalid_time} + iex> Time.new(23, 60, 59, 999_999) + {:error, :invalid_time} + iex> Time.new(23, 59, 61, 999_999) + {:error, :invalid_time} + iex> Time.new(23, 59, 59, 1_000_000) + {:error, :invalid_time} + + """ + @spec new(Calendar.hour, Calendar.minute, Calendar.second, Calendar.microsecond, Calendar.calendar) :: + {:ok, Time.t} | {:error, atom} + def new(hour, minute, second, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) + + def new(hour, minute, second, microsecond, calendar) when is_integer(microsecond) do + new(hour, minute, second, {microsecond, 6}, calendar) + end + + def new(hour, minute, second, {microsecond, precision}, calendar) + when is_integer(hour) and is_integer(minute) and is_integer(second) and + is_integer(microsecond) and is_integer(precision) do + case calendar.valid_time?(hour, minute, second, {microsecond, precision}) do + true -> + {:ok, %Time{hour: hour, minute: minute, second: second, microsecond: {microsecond, precision}, calendar: calendar}} + false -> + {:error, :invalid_time} + end + end + + @doc """ + Converts the given time to a string. + + ### Examples + + iex> Time.to_string(~T[23:00:00]) + "23:00:00" + iex> Time.to_string(~T[23:00:00.001]) + "23:00:00.001" + iex> Time.to_string(~T[23:00:00.123456]) + "23:00:00.123456" + + iex> Time.to_string(~N[2015-01-01 23:00:00.001]) + "23:00:00.001" + iex> Time.to_string(~N[2015-01-01 23:00:00.123456]) + "23:00:00.123456" + + """ + @spec to_string(Calendar.time) :: String.t + def to_string(time) + + def to_string(%{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar}) do + calendar.time_to_string(hour, minute, second, microsecond) + end + + @doc """ + Parses the extended "Local time" format described by + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + Timezone offset may be included in the string but they will be + simply discarded as such information is not included in times. + + As specified in the standard, the separator "T" may be omitted if + desired as there is no ambiguity within this function. + + Time representations with reduced accuracy are not supported. + + Note that while ISO8601 allows times to specify 24:00:00 as the + zero hour of the next day, this notation is not supported by Elixir. + + ## Examples + + iex> Time.from_iso8601("23:50:07") + {:ok, ~T[23:50:07]} + iex> Time.from_iso8601("23:50:07Z") + {:ok, ~T[23:50:07]} + iex> Time.from_iso8601("T23:50:07Z") + {:ok, ~T[23:50:07]} + + iex> Time.from_iso8601("23:50:07,0123456") + {:ok, ~T[23:50:07.012345]} + iex> Time.from_iso8601("23:50:07.0123456") + {:ok, ~T[23:50:07.012345]} + iex> Time.from_iso8601("23:50:07.123Z") + {:ok, ~T[23:50:07.123]} + + iex> Time.from_iso8601("2015:01:23 23-50-07") + {:error, :invalid_format} + iex> Time.from_iso8601("23:50:07A") + {:error, :invalid_format} + iex> Time.from_iso8601("23:50:07.") + {:error, :invalid_format} + iex> Time.from_iso8601("23:50:61") + {:error, :invalid_time} + + """ + @spec from_iso8601(String.t) :: {:ok, t} | {:error, atom} + def from_iso8601(string, calendar \\ Calendar.ISO) + + def from_iso8601(<>, calendar) when h in ?0..?9 do + from_iso8601(<>, calendar) + end + + def from_iso8601(<>, calendar) do + with {hour, ""} <- Integer.parse(hour), + {min, ""} <- Integer.parse(min), + {sec, ""} <- Integer.parse(sec), + {microsec, rest} <- Calendar.ISO.parse_microsecond(rest), + {_offset, ""} <- Calendar.ISO.parse_offset(rest) do + with {:ok, utc_time} <- new(hour, min, sec, microsec, Calendar.ISO), + do: convert(utc_time, calendar) + else + _ -> {:error, :invalid_format} + end + end + + def from_iso8601(<<_::binary>>, _calendar) do + {:error, :invalid_format} + end + + @doc """ + Parses the extended "Local time" format described by + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + Raises if the format is invalid. + + ## Examples + + iex> Time.from_iso8601!("23:50:07,123Z") + ~T[23:50:07.123] + iex> Time.from_iso8601!("23:50:07.123Z") + ~T[23:50:07.123] + iex> Time.from_iso8601!("2015:01:23 23-50-07") + ** (ArgumentError) cannot parse "2015:01:23 23-50-07" as time, reason: :invalid_format + """ + @spec from_iso8601!(String.t) :: t | no_return + def from_iso8601!(string) do + case from_iso8601(string) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot parse #{inspect string} as time, reason: #{inspect reason}" + end + end + + @doc """ + Converts the given time to + [ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601). + + By default, `Time.to_iso8601/2` returns times formatted in the "extended" + format, for human readability. It also supports the "basic" format through passing the `:basic` option. + + ### Examples + + iex> Time.to_iso8601(~T[23:00:13]) + "23:00:13" + + iex> Time.to_iso8601(~T[23:00:13.001]) + "23:00:13.001" + + iex> Time.to_iso8601(~T[23:00:13.001], :basic) + "230013.001" + + """ + @spec to_iso8601(Time.t, :extended | :basic) :: String.t + def to_iso8601(time, format \\ :extended) + + def to_iso8601(%Time{} = time, format) when format in [:extended, :basic] do + %{hour: hour, minute: minute, second: second, microsecond: microsecond} = convert!(time, Calendar.ISO) + Calendar.ISO.time_to_iso8601(hour, minute, second, microsecond, format) + end + + def to_iso8601(%{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: + Calendar.ISO}, format) when format in [:extended, :basic] do + IO.warn "calling Time.to_erl/1 with a DateTime or NaiveDateTime structs is deprecated, explicitly convert them into a Time first by using DateTime.to_time/1 or NaiveDateTime.to_time/1 respectively" + Calendar.ISO.time_to_iso8601(hour, minute, second, microsecond, format) + end + + def to_iso8601(_date, format) do + raise ArgumentError, "Time.to_iso8601/2 expects format to be :extended or :basic, got: #{inspect format}" + end + + @doc """ + Converts a `Time` struct to an Erlang time tuple. + + WARNING: Loss of precision may occur, as Erlang time tuples + only contain hours/minutes/seconds. + + ## Examples + + iex> Time.to_erl(~T[23:30:15.999]) + {23, 30, 15} + + """ + @spec to_erl(Time.t) :: :calendar.time + def to_erl(%Time{} = time) do + %{hour: hour, minute: minute, second: second} = convert!(time, Calendar.ISO) + {hour, minute, second} + end + + def to_erl(%{calendar: Calendar.ISO, hour: hour, minute: minute, second: second}) do + IO.warn "calling Time.to_erl/1 with a DateTime or NaiveDateTime structs is deprecated, explicitly convert them into a Time first by using DateTime.to_time/1 or NaiveDateTime.to_time/1 respectively" + {hour, minute, second} + end + + @doc """ + Converts an Erlang time tuple to a `Time` struct. + + ## Examples + + iex> Time.from_erl({23, 30, 15}, {5000, 3}) + {:ok, ~T[23:30:15.005]} + iex> Time.from_erl({24, 30, 15}) + {:error, :invalid_time} + + """ + @spec from_erl(:calendar.time, Calendar.microsecond, Calendar.calendar) :: {:ok, t} | {:error, atom} + def from_erl(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) + + def from_erl({hour, minute, second}, microsecond, calendar) do + with {:ok, time} <- new(hour, minute, second, microsecond, Calendar.ISO), + do: convert(time, calendar) + end + + @doc """ + Converts an Erlang time tuple to a `Time` struct. + + ## Examples + + iex> Time.from_erl!({23, 30, 15}) + ~T[23:30:15] + iex> Time.from_erl!({23, 30, 15}, {5000, 3}) + ~T[23:30:15.005] + iex> Time.from_erl!({24, 30, 15}) + ** (ArgumentError) cannot convert {24, 30, 15} to time, reason: :invalid_time + + """ + @spec from_erl!(:calendar.time, Calendar.microsecond, Calendar.calendar) :: t | no_return + def from_erl!(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) do + case from_erl(tuple, microsecond, calendar) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot convert #{inspect tuple} to time, reason: #{inspect reason}" + end + end + + @doc """ + Compares two `Time` structs. + + Returns `:gt` if first time is later than the second + and `:lt` for vice versa. If the two times are equal + `:eq` is returned. + + ## Examples + + iex> Time.compare(~T[16:04:16], ~T[16:04:28]) + :lt + iex> Time.compare(~T[16:04:16.01], ~T[16:04:16.001]) + :gt + + This function can also be used to compare across more + complex calendar types by considering only the time fields: + + iex> Time.compare(~N[2015-01-01 16:04:16], ~N[2015-01-01 16:04:28]) + :lt + iex> Time.compare(~N[2015-01-01 16:04:16.01], ~N[2000-01-01 16:04:16.001]) + :gt + + """ + @spec compare(Calendar.time, Calendar.time) :: :lt | :eq | :gt + def compare(time1, time2) do + {parts1, ppd1} = to_day_fraction(time1) + {parts2, ppd2} = to_day_fraction(time2) + + case {parts1 * ppd2, parts2 * ppd1} do + {first, second} when first > second -> :gt + {first, second} when first < second -> :lt + _ -> :eq + end + end + + @doc """ + Converts the `Time` struct to a different calendar. + + Returns `{:ok, time}` if the conversion was successful, + or `{:error, reason}` if it was not, for some reason. + """ + @spec convert(Time.t, Calendar.calendar) :: {:ok, Time.t} | {:error, atom} + def convert(%Time{calendar: calendar} = time, calendar) do + {:ok, time} + end + + def convert(%Time{} = time, calendar) do + result_time = + time + |> to_day_fraction() + |> calendar.time_from_day_fraction + {:ok, result_time} + end + + @doc """ + Similar to `Time.convert/2`, but raises an `ArgumentError` + if the conversion between the two calendars is not possible. + """ + @spec convert!(Time.t, Calendar.calendar) :: Time.t + def convert!(time, calendar) do + case convert(time, calendar) do + {:ok, value} -> + value + {:error, reason} -> + raise ArgumentError, "cannot convert #{inspect time} to target calendar #{inspect calendar}, reason: #{inspect reason}" + end + end + + @doc """ + Returns the difference between two `Time` structs. + + The answer can be returned in any `unit` available from `t:System.time_unit/0`. + + This function returns the difference in seconds where seconds are measured + according to `Calendar.ISO`. + """ + @spec diff(Time.t, Time.t, System.time_unit) :: integer + def diff(%Time{} = time1, %Time{} = time2, unit \\ :second) do + fraction1 = to_day_fraction(time1) + fraction2 = to_day_fraction(time2) + Calendar.ISO.rata_die_to_unit({0, fraction1}, unit) - Calendar.ISO.rata_die_to_unit({0, fraction2}, unit) + end + + ## Helpers + + defp to_day_fraction(%{hour: hour, minute: minute, second: second, microsecond: {_, _} = microsecond, calendar: calendar}) do + calendar.time_to_day_fraction(hour, minute, second, microsecond) + end + + defp to_day_fraction(%{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar}) do + calendar.time_to_day_fraction(hour, minute, second, {microsecond, 0}) + end + + defimpl String.Chars do + def to_string(%{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: calendar}) do + calendar.time_to_string(hour, minute, second, microsecond) + end + end + + defimpl Inspect do + def inspect(%{hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: Calendar.ISO}, _) do + "~T[" <> Calendar.ISO.time_to_string(hour, minute, second, microsecond) <> "]" + end + + def inspect(time, opts) do + Inspect.Any.inspect(time, opts) + end + end +end diff --git a/lib/elixir/lib/code.ex b/lib/elixir/lib/code.ex index 21ec4ac78d6..3d0e19c0b5c 100644 --- a/lib/elixir/lib/code.ex +++ b/lib/elixir/lib/code.ex @@ -2,67 +2,120 @@ defmodule Code do @moduledoc """ Utilities for managing code compilation, code evaluation and code loading. - This module complements [Erlang's code module](http://www.erlang.org/doc/man/code.html) - to add behaviour which is specific to Elixir. + This module complements Erlang's [`:code` module](http://www.erlang.org/doc/man/code.html) + to add behaviour which is specific to Elixir. Almost all of the functions in this module + have global side effects on the behaviour of Elixir. """ @doc """ - List all loaded files. + Lists all loaded files. + + ## Examples + + Code.require_file("../eex/test/eex_test.exs") + List.first(Code.loaded_files) =~ "eex_test.exs" #=> true + """ def loaded_files do :elixir_code_server.call :loaded end @doc """ - Remove files from the loaded files list. + Removes files from the loaded files list. The modules defined in the file are not removed; calling this function only removes them from the list, allowing them to be required again. + + ## Examples + + # Load EEx test code, unload file, check for functions still available + Code.load_file("../eex/test/eex_test.exs") + Code.unload_files(Code.loaded_files) + function_exported?(EExTest.Compiled, :before_compile, 0) #=> true + """ def unload_files(files) do :elixir_code_server.cast {:unload_files, files} end @doc """ - Append a path to the Erlang VM code path. + Appends a path to the end of the Erlang VM code path list. + + This is the list of directories the Erlang VM uses for + finding module code. The path is expanded with `Path.expand/1` before being appended. + If this path does not exist, an error is returned. + + ## Examples + + Code.append_path(".") #=> true + + Code.append_path("/does_not_exist") #=> {:error, :bad_directory} + """ def append_path(path) do - :code.add_pathz(to_char_list(Path.expand path)) + :code.add_pathz(to_charlist(Path.expand path)) end @doc """ - Prepend a path to the Erlang VM code path. + Prepends a path to the beginning of the Erlang VM code path list. + + This is the list of directories the Erlang VM uses for finding + module code. The path is expanded with `Path.expand/1` before being prepended. + If this path does not exist, an error is returned. + + ## Examples + + Code.prepend_path(".") #=> true + + Code.prepend_path("/does_not_exist") #=> {:error, :bad_directory} + """ def prepend_path(path) do - :code.add_patha(to_char_list(Path.expand path)) + :code.add_patha(to_charlist(Path.expand path)) end @doc """ - Delete a path from the Erlang VM code path. + Deletes a path from the Erlang VM code path list. This is the list of + directories the Erlang VM uses for finding module code. + + The path is expanded with `Path.expand/1` before being deleted. If the + path does not exist it returns `false`. + + ## Examples + + Code.prepend_path(".") + Code.delete_path(".") #=> true + + Code.delete_path("/does_not_exist") #=> false - The path is expanded with `Path.expand/1` before being deleted. """ def delete_path(path) do - :code.del_path(to_char_list(Path.expand path)) + :code.del_path(to_charlist(Path.expand path)) end @doc """ - Evaluate the contents given by `string`. + Evaluates the contents given by `string`. The `binding` argument is a keyword list of variable bindings. The `opts` argument is a keyword list of environment options. - Those options can be: + **Warning**: `string` can be any Elixir code and will be executed with + the same privileges as the Erlang VM: this means that such code could + compromise the machine (for example by executing system commands). + Don't use `eval_string/3` with untrusted input (such as strings coming + from the network). + + ## Options - * `:file` - the file to be considered in the evaluation - * `:line` - the line on which the script starts - * `:delegate_locals_to` - delegate local calls to the given module, - the default is to not delegate + Options can be: + + * `:file` - the file to be considered in the evaluation + * `:line` - the line on which the script starts Additionally, the following scope values can be configured: @@ -81,7 +134,7 @@ defmodule Code do Notice that setting any of the values above overrides Elixir's default values. For example, setting `:requires` to `[]`, will no longer automatically require the `Kernel` module; in the same way setting - `:macros` will no longer auto-import `Kernel` macros like `if`, `case`, + `:macros` will no longer auto-import `Kernel` macros like `if/2`, `case/2`, etc. Returns a tuple of the form `{value, binding}`, @@ -103,7 +156,7 @@ defmodule Code do iex> Code.eval_string("a = a + b", [a: 1, b: 2]) {3, [a: 3, b: 2]} - For convenience, you can pass `__ENV__` as the `opts` argument and + For convenience, you can pass `__ENV__/0` as the `opts` argument and all imports, requires and aliases defined in the current environment will be automatically carried over: @@ -114,20 +167,25 @@ defmodule Code do def eval_string(string, binding \\ [], opts \\ []) def eval_string(string, binding, %Macro.Env{} = env) do - {value, binding, _env, _scope} = :elixir.eval to_char_list(string), binding, Map.to_list(env) + {value, binding, _env, _scope} = :elixir.eval to_charlist(string), binding, Map.to_list(env) {value, binding} end def eval_string(string, binding, opts) when is_list(opts) do validate_eval_opts(opts) - {value, binding, _env, _scope} = :elixir.eval to_char_list(string), binding, opts + {value, binding, _env, _scope} = :elixir.eval to_charlist(string), binding, opts {value, binding} end @doc """ - Evaluate the quoted contents. + Evaluates the quoted contents. + + **Warning**: Calling this function inside a macro is considered bad + practice as it will attempt to evaluate runtime values at compile time. + Macro arguments are typically transformed by unquoting them into the + returned quoted expressions (instead of evaluated). - See `eval_string/3` for a description of arguments and return values. + See `eval_string/3` for a description of bindings and options. ## Examples @@ -135,7 +193,7 @@ defmodule Code do iex> Code.eval_quoted(contents, [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line) {3, [a: 1, b: 2]} - For convenience, you can pass `__ENV__` as the `opts` argument and + For convenience, you can pass `__ENV__/0` as the `opts` argument and all options will be automatically extracted from the current environment: iex> contents = quote(do: var!(a) + var!(b)) @@ -194,7 +252,7 @@ defmodule Code do end @doc """ - Convert the given string to its quoted form. + Converts the given string to its quoted form. Returns `{:ok, quoted_form}` if it succeeds, `{:error, {line, error, token}}` otherwise. @@ -202,9 +260,9 @@ defmodule Code do ## Options * `:file` - the filename to be used in stacktraces - and the file reported in the `__ENV__` variable + and the file reported in the `__ENV__/0` macro - * `:line` - the line reported in the `__ENV__` variable + * `:line` - the line reported in the `__ENV__/0` macro * `:existing_atoms_only` - when `true`, raises an error when non-existing atoms are found by the tokenizer @@ -218,11 +276,11 @@ defmodule Code do def string_to_quoted(string, opts \\ []) when is_list(opts) do file = Keyword.get opts, :file, "nofile" line = Keyword.get opts, :line, 1 - :elixir.string_to_quoted(to_char_list(string), line, file, opts) + :elixir.string_to_quoted(to_charlist(string), line, file, opts) end @doc """ - Convert the given string to its quoted form. + Converts the given string to its quoted form. It returns the ast if it succeeds, raises an exception otherwise. The exception is a `TokenMissingError` @@ -234,7 +292,7 @@ defmodule Code do def string_to_quoted!(string, opts \\ []) when is_list(opts) do file = Keyword.get opts, :file, "nofile" line = Keyword.get opts, :line, 1 - :elixir.string_to_quoted!(to_char_list(string), line, file, opts) + :elixir.string_to_quoted!(to_charlist(string), line, file, opts) end @doc """ @@ -243,7 +301,7 @@ defmodule Code do Accepts `relative_to` as an argument to tell where the file is located. While `load_file` loads a file and returns the loaded modules and their - byte code, `eval_file` simply evalutes the file contents and returns the + byte code, `eval_file` simply evaluates the file contents and returns the evaluation result and its bindings. """ def eval_file(file, relative_to \\ nil) do @@ -252,7 +310,7 @@ defmodule Code do end @doc """ - Load the given file. + Loads the given file. Accepts `relative_to` as an argument to tell where the file is located. If the file was already required/loaded, loads it again. @@ -263,6 +321,12 @@ defmodule Code do Notice that if `load_file` is invoked by different processes concurrently, the target file will be loaded concurrently many times. Check `require_file/2` if you don't want a file to be loaded concurrently. + + ## Examples + + Code.load_file("eex_test.exs", "../eex/test") |> List.first + #=> {EExTest.Compiled, <<70, 79, 82, 49, ...>>} + """ def load_file(file, relative_to \\ nil) when is_binary(file) do file = find_file(file, relative_to) @@ -285,7 +349,20 @@ defmodule Code do N times with a given file, it will be loaded only once. The first process to call `require_file` will get the list of loaded modules, others will get `nil`. - Check `load_file/2` if you want a file to be loaded multiple times. + Check `load_file/2` if you want a file to be loaded multiple times. See also + `unload_files/1` + + ## Examples + + If the code is already loaded, it returns `nil`: + + Code.require_file("eex_test.exs", "../eex/test") #=> nil + + If the code is not loaded yet, it returns the same as `load_file/2`: + + Code.require_file("eex_test.exs", "../eex/test") |> List.first + #=> {EExTest.Compiled, <<70, 79, 82, 49, ...>>} + """ def require_file(file, relative_to \\ nil) when is_binary(file) do file = find_file(file, relative_to) @@ -306,18 +383,31 @@ defmodule Code do Gets the compilation options from the code server. Check `compiler_options/1` for more information. + + ## Examples + + Code.compiler_options + #=> %{debug_info: true, docs: true, + warnings_as_errors: false, ignore_module_conflict: false} + """ def compiler_options do - :elixir_code_server.call :compiler_options + :elixir_config.get :compiler_options end @doc """ Returns a list with the available compiler options. See `Code.compiler_options/1` for more info. + + ## Examples + + iex> Code.available_compiler_options + [:docs, :debug_info, :ignore_module_conflict, :relative_paths, :warnings_as_errors] + """ def available_compiler_options do - [:docs, :debug_info, :ignore_module_conflict, :warnings_as_errors] + [:docs, :debug_info, :ignore_module_conflict, :relative_paths, :warnings_as_errors] end @doc """ @@ -337,17 +427,37 @@ defmodule Code do * `:ignore_module_conflict` - when `true`, override modules that were already defined without raising errors, `false` by default - * `:warnings_as_errors` - cause compilation to fail when warnings are + * `:relative_paths` - when `true`, use relative paths in quoted nodes, + warnings and errors generated by the compiler, `true` by default. + Note disabling this option won't affect runtime warnings and errors. + + * `:warnings_as_errors` - causes compilation to fail when warnings are generated + It returns the new list of compiler options. + + ## Examples + + Code.compiler_options(debug_info: true) + #=> %{debug_info: true, docs: true, + warnings_as_errors: false, ignore_module_conflict: false} + """ def compiler_options(opts) do - {opts, bad} = Keyword.split(opts, available_compiler_options) - if bad != [] do - bad = bad |> Keyword.keys |> Enum.join(", ") - raise ArgumentError, message: "unknown compiler options: #{bad}" - end - :elixir_code_server.cast {:compiler_options, opts} + available = available_compiler_options() + + Enum.each(opts, fn({key, value}) -> + cond do + key not in available -> + raise "unknown compiler option: #{inspect(key)}" + not is_boolean(value) -> + raise "compiler option #{inspect(key)} should be a boolean, got: #{inspect(value)}" + true -> + :ok + end + end) + + :elixir_config.update :compiler_options, &Enum.into(opts, &1) end @doc """ @@ -359,7 +469,7 @@ defmodule Code do For compiling many files at once, check `Kernel.ParallelCompiler.files/2`. """ def compile_string(string, file \\ "nofile") when is_binary(file) do - :elixir_compiler.string to_char_list(string), file + :elixir_compiler.string to_charlist(string), file end @doc """ @@ -394,7 +504,7 @@ defmodule Code do module uses this function to check if a specific parser exists for a given URI scheme. - ## `Code.ensure_compiled/1` + ## `ensure_compiled/1` Elixir also contains an `ensure_compiled/1` function that is a superset of `ensure_loaded/1`. @@ -403,13 +513,29 @@ defmodule Code do you may need to use a module that was not yet compiled, therefore it can't even be loaded. - `ensure_compiled/1` halts the current process until the - module we are depending on is available. + When invoked, `ensure_compiled/1` halts the compilation of the caller + until the module given to `ensure_compiled/1` becomes available or + all files for the current project have been compiled. If compilation + finishes and the module is not available, an error tuple is returned. + + `ensure_compiled/1` does not apply to dependencies, as dependencies + must be compiled upfront. In most cases, `ensure_loaded/1` is enough. `ensure_compiled/1` must be used in rare cases, usually involving macros that need to invoke a module for callback information. + + ## Examples + + iex> Code.ensure_loaded(Atom) + {:module, Atom} + + iex> Code.ensure_loaded(DoesNotExist) + {:error, :nofile} + """ + @spec ensure_loaded(module) :: + {:module, module} | {:error, :embedded | :badfile | :nofile | :on_load_failure} def ensure_loaded(module) when is_atom(module) do :code.ensure_loaded(module) end @@ -420,8 +546,14 @@ defmodule Code do Similar to `ensure_loaded/1`, but returns `true` if the module is already loaded or was successfully loaded. Returns `false` otherwise. + + ## Examples + + iex> Code.ensure_loaded?(Atom) + true + """ - def ensure_loaded?(module) do + def ensure_loaded?(module) when is_atom(module) do match?({:module, ^module}, ensure_loaded(module)) end @@ -438,18 +570,16 @@ defmodule Code do Check `ensure_loaded/1` for more information on module loading and when to use `ensure_loaded/1` or `ensure_compiled/1`. """ + @spec ensure_compiled(module) :: + {:module, module} | {:error, :embedded | :badfile | :nofile | :on_load_failure} def ensure_compiled(module) when is_atom(module) do case :code.ensure_loaded(module) do {:error, :nofile} = error -> - case :erlang.get(:elixir_ensure_compiled) do - :undefined -> error - _ -> - try do - module.__info__(:module) - {:module, module} - rescue - UndefinedFunctionError -> error - end + if is_pid(:erlang.get(:elixir_compiler_pid)) and + Kernel.ErrorHandler.ensure_compiled(module, :module) do + {:module, module} + else + error end other -> other end @@ -462,11 +592,12 @@ defmodule Code do is already loaded or was successfully loaded and compiled. Returns `false` otherwise. """ - def ensure_compiled?(module) do + @spec ensure_compiled?(module) :: boolean + def ensure_compiled?(module) when is_atom(module) do match?({:module, ^module}, ensure_compiled(module)) end - @doc """ + @doc ~S""" Returns the docs for the given module. When given a module name, it finds its BEAM code and reads the docs from it. @@ -483,10 +614,32 @@ defmodule Code do which module definition starts and `doc` is the string attached to the module using the `@moduledoc` attribute - * `:all` - a keyword list with both `:docs` and `:moduledoc` + * `:callback_docs` - list of all docstrings attached to + `@callbacks` using the `@doc` attribute + + * `:type_docs` - list of all docstrings attached to + `@type` callbacks using the `@typedoc` attribute + + * `:all` - a keyword list with `:docs` and `:moduledoc`, `:callback_docs`, + and `:type_docs`. + + If the module cannot be found, it returns `nil`. + + ## Examples + + # Get the module documentation + iex> {_line, text} = Code.get_docs(Atom, :moduledoc) + iex> String.split(text, "\n") |> Enum.at(0) + "Convenience functions for working with atoms." + + # Module doesn't exist + iex> Code.get_docs(ModuleNotGood, :all) + nil """ - def get_docs(module, kind) when is_atom(module) do + @doc_kinds [:docs, :moduledoc, :callback_docs, :type_docs, :all] + + def get_docs(module, kind) when is_atom(module) and kind in @doc_kinds do case :code.get_object_code(module) do {_module, bin, _beam_path} -> do_get_docs(bin, kind) @@ -495,8 +648,8 @@ defmodule Code do end end - def get_docs(binpath, kind) when is_binary(binpath) do - do_get_docs(String.to_char_list(binpath), kind) + def get_docs(binpath, kind) when is_binary(binpath) and kind in @doc_kinds do + do_get_docs(String.to_charlist(binpath), kind) end @docs_chunk 'ExDc' @@ -517,7 +670,7 @@ defmodule Code do defp lookup_docs(_, _), do: nil defp do_lookup_docs(docs, :all), do: docs - defp do_lookup_docs(docs, kind) when kind in [:docs, :moduledoc], + defp do_lookup_docs(docs, kind), do: Keyword.get(docs, kind) ## Helpers diff --git a/lib/elixir/lib/collectable.ex b/lib/elixir/lib/collectable.ex index 426f290a4e7..541ad7a72f1 100644 --- a/lib/elixir/lib/collectable.ex +++ b/lib/elixir/lib/collectable.ex @@ -8,18 +8,12 @@ defprotocol Collectable do iex> Enum.into([a: 1, b: 2], %{}) %{a: 1, b: 2} - If a collection implements both `Enumerable` and `Collectable`, both - operations can be combined with `Enum.traverse/2`: - - iex> Enum.traverse(%{a: 1, b: 2}, fn {k, v} -> {k, v * 2} end) - %{a: 2, b: 4} - ## Why Collectable? The `Enumerable` protocol is useful to take values out of a collection. In order to support a wide range of values, the functions provided by the `Enumerable` protocol do not keep shape. For example, passing a - dictionary to `Enum.map/2` always returns a list. + map to `Enum.map/2` always returns a list. This design is intentional. `Enumerable` was designed to support infinite collections, resources and other structures with fixed shape. For example, @@ -27,51 +21,66 @@ defprotocol Collectable do shape where just the range limits are stored. The `Collectable` module was designed to fill the gap left by the - `Enumerable` protocol. It provides two functions: `into/1` and `empty/1`. + `Enumerable` protocol. `into/1` can be seen as the opposite of + `Enumerable.reduce/3`. If `Enumerable` is about taking values out, + `Collectable.into/1` is about collecting those values into a structure. + + ## Examples + + To show how to manually use the `Collectable` protocol, let's play with its + implementation for `MapSet`. + + iex> {initial_acc, collector_fun} = Collectable.into(MapSet.new()) + iex> updated_acc = Enum.reduce([1, 2, 3], initial_acc, fn elem, acc -> + ...> collector_fun.(acc, {:cont, elem}) + ...> end) + iex> collector_fun.(updated_acc, :done) + #MapSet<[1, 2, 3]> + + To show how the protocol can be implemented, we can take again a look at the + implementation for `MapSet`. In this implementation "collecting" elements + simply means inserting them in the set through `MapSet.put/2`. + + defimpl Collectable do + def into(original) do + collector_fun = fn + set, {:cont, elem} -> MapSet.put(set, elem) + set, :done -> set + _set, :halt -> :ok + end + + {original, collector_fun} + end + end - `into/1` can be seen as the opposite of `Enumerable.reduce/3`. If - `Enumerable` is about taking values out, `Collectable.into/1` is about - collecting those values into a structure. - - `empty/1` receives a collectable and returns an empty version of the - same collectable. By combining the enumerable functionality with `into/1` - and `empty/1`, one can, for example, implement a traversal mechanism. """ @type command :: {:cont, term} | :done | :halt @doc """ - Receives a collectable structure and returns an empty one. - """ - @spec empty(t) :: t - def empty(collectable) + Returns an initial accumulator and a "collector" function. - @doc """ - Returns a function that collects values alongside - the initial accumulation value. + The returned function receives a term and a command and injects the term into + the collectable on every `{:cont, term}` command. - The returned function receives a collectable and injects a given - value into it for every `{:cont, term}` instruction. + `:done` is passed as a command when no further values will be injected. This + is useful when there's a need to close resources or normalizing values. A + collectable must be returned when the command is `:done`. - `:done` is passed when no further values will be injected, useful - for closing resources and normalizing values. A collectable must - be returned on `:done`. + If injection is suddenly interrupted, `:halt` is passed and the function + can return any value as it won't be used. - If injection is suddenly interrupted, `:halt` is passed and it can - return any value, as it won't be used. + For examples on how to use the `Collectable` protocol and `into/1` see the + module documentation. """ @spec into(t) :: {term, (term, command -> t | term)} def into(collectable) end defimpl Collectable, for: List do - def empty(_list) do - [] - end - def into(original) do {[], fn - list, {:cont, x} -> [x|list] + list, {:cont, x} -> [x | list] list, :done -> original ++ :lists.reverse(list) _, :halt -> :ok end} @@ -79,34 +88,16 @@ defimpl Collectable, for: List do end defimpl Collectable, for: BitString do - def empty(_bitstring) do - "" - end - def into(original) do {original, fn - bitstring, {:cont, x} -> <> - bitstring, :done -> bitstring + acc, {:cont, x} when is_bitstring(x) -> [acc | x] + acc, :done -> IO.iodata_to_binary(acc) _, :halt -> :ok end} end end -defimpl Collectable, for: Function do - def empty(function) do - function - end - - def into(function) do - {function, function} - end -end - defimpl Collectable, for: Map do - def empty(_map) do - %{} - end - def into(original) do {original, fn map, {:cont, {k, v}} -> :maps.put(k, v, map) diff --git a/lib/elixir/lib/dict.ex b/lib/elixir/lib/dict.ex index 3f3b5d3e444..43ff8818cc5 100644 --- a/lib/elixir/lib/dict.ex +++ b/lib/elixir/lib/dict.ex @@ -1,139 +1,26 @@ defmodule Dict do @moduledoc ~S""" - This module specifies the Dict API expected to be - implemented by different dictionaries. It also provides - functions that redirect to the underlying Dict, allowing - a developer to work with different Dict implementations - using one API. + WARNING: this module is deprecated. - To create a new dict, use the `new` functions defined - by each dict type: - - HashDict.new #=> creates an empty HashDict - - In the examples below, `dict_impl` means a specific - `Dict` implementation, for example `HashDict` or `Map`. - - ## Protocols - - Besides implementing the functions in this module, all - dictionaries are required to implement the `Access` - protocol: - - iex> dict = dict_impl.new - iex> dict = Dict.put(dict, :hello, :world) - iex> dict[:hello] - :world - - As well as the `Enumerable` and `Collectable` protocols. - - ## Match - - Dictionaries are required to implement all operations - using the match (`===`) operator. - - ## Default implementation - - Default implementations for some functions in the `Dict` module - are provided via `use Dict`. - - For example: - - defmodule MyDict do - use Dict - - # implement required functions (see below) - # override default implementations if optimization - # is needed - end - - The client module must contain the following functions: - - * `delete/2` - * `fetch/2` - * `put/3` - * `reduce/3` - * `size/1` - - All functions, except `reduce/3`, are required by the Dict behaviour. - `reduce/3` must be implemtented as per the Enumerable protocol. - - Based on these functions, `Dict` generates default implementations - for the following functions: - - * `drop/2` - * `equal?/2` - * `fetch!/2` - * `get/2` - * `get/3` - * `has_key?/2` - * `keys/1` - * `merge/2` - * `merge/3` - * `pop/2` - * `pop/3` - * `put_new/3` - * `split/2` - * `take/2` - * `to_list/1` - * `update/4` - * `update!/3` - * `values/1` - - All of these functions are defined as overridable, so you can provide - your own implementation if needed. - - Note you can also test your custom module via `Dict`'s doctests: - - defmodule MyDict do - # ... - end - - defmodule MyTests do - use ExUnit.Case - doctest Dict - defp dict_impl, do: MyDict - end + If you need a general dictionary, use the `Map` module. + If you need to manipulate keyword lists, use `Keyword`. + To convert maps into keywords and vice-versa, use the + `new` function in the respective modules. """ - use Behaviour - @type key :: any @type value :: any @type t :: list | map - defcallback new :: t - defcallback delete(t, key) :: t - defcallback drop(t, Enum.t) :: t - defcallback equal?(t, t) :: boolean - defcallback get(t, key) :: value - defcallback get(t, key, value) :: value - defcallback fetch(t, key) :: {:ok, value} | :error - defcallback fetch!(t, key) :: value | no_return - defcallback has_key?(t, key) :: boolean - defcallback keys(t) :: [key] - defcallback merge(t, t) :: t - defcallback merge(t, t, (key, value, value -> value)) :: t - defcallback pop(t, key) :: {value, t} - defcallback pop(t, key, value) :: {value, t} - defcallback put(t, key, value) :: t - defcallback put_new(t, key, value) :: t - defcallback size(t) :: non_neg_integer() - defcallback split(t, Enum.t) :: {t, t} - defcallback take(t, Enum.t) :: t - defcallback to_list(t) :: list() - defcallback update(t, key, value, (value -> value)) :: t - defcallback update!(t, key, (value -> value)) :: t | no_return - defcallback values(t) :: list(value) + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) defmacro __using__(_) do # Use this import to guarantee proper code expansion import Kernel, except: [size: 1] quote do - @behaviour Dict - def get(dict, key, default \\ nil) do case fetch(dict, key) do {:ok, value} -> value @@ -141,6 +28,19 @@ defmodule Dict do end end + def get_lazy(dict, key, fun) when is_function(fun, 0) do + case fetch(dict, key) do + {:ok, value} -> value + :error -> fun.() + end + end + + def get_and_update(dict, key, fun) do + current_value = get(dict, key) + {get, new_value} = fun.(current_value) + {get, put(dict, key, new_value)} + end + def fetch!(dict, key) do case fetch(dict, key) do {:ok, value} -> value @@ -159,12 +59,19 @@ defmodule Dict do end end + def put_new_lazy(dict, key, fun) when is_function(fun, 0) do + case has_key?(dict, key) do + true -> dict + false -> put(dict, key, fun.()) + end + end + def drop(dict, keys) do Enum.reduce(keys, dict, &delete(&2, &1)) end def take(dict, keys) do - Enum.reduce(keys, new, fn key, acc -> + Enum.reduce(keys, new(), fn key, acc -> case fetch(dict, key) do {:ok, value} -> put(acc, key, value) :error -> acc @@ -174,19 +81,19 @@ defmodule Dict do def to_list(dict) do reduce(dict, {:cont, []}, fn - kv, acc -> {:cont, [kv|acc]} + kv, acc -> {:cont, [kv | acc]} end) |> elem(1) |> :lists.reverse end def keys(dict) do reduce(dict, {:cont, []}, fn - {k, _}, acc -> {:cont, [k|acc]} + {k, _}, acc -> {:cont, [k | acc]} end) |> elem(1) |> :lists.reverse end def values(dict) do reduce(dict, {:cont, []}, fn - {_, v}, acc -> {:cont, [v|acc]} + {_, v}, acc -> {:cont, [v | acc]} end) |> elem(1) |> :lists.reverse end @@ -248,8 +155,17 @@ defmodule Dict do end end + def pop_lazy(dict, key, fun) when is_function(fun, 0) do + case fetch(dict, key) do + {:ok, value} -> + {value, delete(dict, key)} + :error -> + {fun.(), dict} + end + end + def split(dict, keys) do - Enum.reduce(keys, {new, dict}, fn key, {inc, exc} = acc -> + Enum.reduce(keys, {new(), dict}, fn key, {inc, exc} = acc -> case fetch(exc, key) do {:ok, value} -> {put(inc, key, value), delete(exc, key)} @@ -262,11 +178,11 @@ defmodule Dict do defoverridable merge: 2, merge: 3, equal?: 2, to_list: 1, keys: 1, values: 1, take: 2, drop: 2, get: 2, get: 3, fetch!: 2, has_key?: 2, put_new: 3, pop: 2, pop: 3, split: 2, - update: 4, update!: 3 + update: 4, update!: 3, get_and_update: 3, get_lazy: 3, + pop_lazy: 3, put_new_lazy: 3 end end - defmacrop target(dict) do quote do case unquote(dict) do @@ -282,380 +198,141 @@ defmodule Dict do end end - @doc """ - Returns a list of all keys in `dict`. - The keys are not guaranteed to be in any order. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> Enum.sort(Dict.keys(d)) - [:a,:b] - - """ @spec keys(t) :: [key] def keys(dict) do target(dict).keys(dict) end - @doc """ - Returns a list of all values in `dict`. - The values are not guaranteed to be in any order. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> Enum.sort(Dict.values(d)) - [1,2] - - """ @spec values(t) :: [value] def values(dict) do target(dict).values(dict) end - @doc """ - Returns the number of elements in `dict`. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> Dict.size(d) - 2 - - """ @spec size(t) :: non_neg_integer def size(dict) do target(dict).size(dict) end - @doc """ - Returns whether the given `key` exists in the given `dict`. - - ## Examples - - iex> d = Enum.into([a: 1], dict_impl.new) - iex> Dict.has_key?(d, :a) - true - iex> Dict.has_key?(d, :b) - false - - """ @spec has_key?(t, key) :: boolean def has_key?(dict, key) do target(dict).has_key?(dict, key) end - @doc """ - Returns the value associated with `key` in `dict`. If `dict` does not - contain `key`, returns `default` (or `nil` if not provided). - - ## Examples - - iex> d = Enum.into([a: 1], dict_impl.new) - iex> Dict.get(d, :a) - 1 - iex> Dict.get(d, :b) - nil - iex> Dict.get(d, :b, 3) - 3 - """ @spec get(t, key, value) :: value def get(dict, key, default \\ nil) do target(dict).get(dict, key, default) end - @doc """ - Returns `{:ok, value}` associated with `key` in `dict`. - If `dict` does not contain `key`, returns `:error`. - - ## Examples + @spec get_lazy(t, key, (() -> value)) :: value + def get_lazy(dict, key, fun) do + target(dict).get_lazy(dict, key, fun) + end - iex> d = Enum.into([a: 1], dict_impl.new) - iex> Dict.fetch(d, :a) - {:ok, 1} - iex> Dict.fetch(d, :b) - :error + @spec get_and_update(t, key, (value -> {value, value})) :: {value, t} + def get_and_update(dict, key, fun) do + target(dict).get_and_update(dict, key, fun) + end - """ @spec fetch(t, key) :: value def fetch(dict, key) do target(dict).fetch(dict, key) end - @doc """ - Returns the value associated with `key` in `dict`. If `dict` does not - contain `key`, it raises `KeyError`. - - ## Examples - - iex> d = Enum.into([a: 1], dict_impl.new) - iex> Dict.fetch!(d, :a) - 1 - - """ @spec fetch!(t, key) :: value | no_return def fetch!(dict, key) do target(dict).fetch!(dict, key) end - @doc """ - Stores the given `value` under `key` in `dict`. - If `dict` already has `key`, the stored value is replaced by the new one. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d = Dict.put(d, :a, 3) - iex> Dict.get(d, :a) - 3 - - """ @spec put(t, key, value) :: t def put(dict, key, val) do target(dict).put(dict, key, val) end - @doc """ - Puts the given `value` under `key` in `dict` unless `key` already exists. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d = Dict.put_new(d, :a, 3) - iex> Dict.get(d, :a) - 1 - - """ @spec put_new(t, key, value) :: t def put_new(dict, key, val) do target(dict).put_new(dict, key, val) end - @doc """ - Removes the entry stored under the given `key` from `dict`. - If `dict` does not contain `key`, returns the dictionary unchanged. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d = Dict.delete(d, :a) - iex> Dict.get(d, :a) - nil - - iex> d = Enum.into([b: 2], dict_impl.new) - iex> Dict.delete(d, :a) == d - true + @spec put_new_lazy(t, key, (() -> value)) :: t + def put_new_lazy(dict, key, fun) do + target(dict).put_new_lazy(dict, key, fun) + end - """ @spec delete(t, key) :: t def delete(dict, key) do target(dict).delete(dict, key) end - @doc """ - Merges the dict `b` into dict `a`. - - If one of the dict `b` entries already exists in the `dict`, - the functions in entries in `b` have higher precedence unless a - function is given to resolve conflicts. - - Notice this function is polymorphic as it merges dicts of any - type. Each dict implementation also provides a `merge` function, - but they can only merge dicts of the same type. - - ## Examples - - iex> d1 = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d2 = Enum.into([a: 3, d: 4], dict_impl.new) - iex> d = Dict.merge(d1, d2) - iex> [a: Dict.get(d, :a), b: Dict.get(d, :b), d: Dict.get(d, :d)] - [a: 3, b: 2, d: 4] + @spec merge(t, t) :: t + def merge(dict1, dict2) do + target1 = target(dict1) + target2 = target(dict2) - iex> d1 = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d2 = Enum.into([a: 3, d: 4], dict_impl.new) - iex> d = Dict.merge(d1, d2, fn(_k, v1, v2) -> - ...> v1 + v2 - ...> end) - iex> [a: Dict.get(d, :a), b: Dict.get(d, :b), d: Dict.get(d, :d)] - [a: 4, b: 2, d: 4] + if target1 == target2 do + target1.merge(dict1, dict2) + else + do_merge(target1, dict1, dict2, fn(_k, _v1, v2) -> v2 end) + end + end - """ @spec merge(t, t, (key, value, value -> value)) :: t - def merge(dict1, dict2, fun \\ fn(_k, _v1, v2) -> v2 end) do + def merge(dict1, dict2, fun) do target1 = target(dict1) target2 = target(dict2) if target1 == target2 do target1.merge(dict1, dict2, fun) else - Enumerable.reduce(dict2, {:cont, dict1}, fn({k, v}, acc) -> - {:cont, target1.update(acc, k, v, fn(other) -> fun.(k, other, v) end)} - end) |> elem(1) + do_merge(target1, dict1, dict2, fun) end end - @doc """ - Returns the value associated with `key` in `dict` as - well as the `dict` without `key`. - - ## Examples - - iex> dict = Enum.into([a: 1], dict_impl.new) - iex> {v, d} = Dict.pop dict, :a - iex> {v, Enum.sort(d)} - {1,[]} - - iex> dict = Enum.into([a: 1], dict_impl.new) - iex> {v, d} = Dict.pop dict, :b - iex> {v, Enum.sort(d)} - {nil,[a: 1]} - - iex> dict = Enum.into([a: 1], dict_impl.new) - iex> {v, d} = Dict.pop dict, :b, 3 - iex> {v, Enum.sort(d)} - {3,[a: 1]} + defp do_merge(target1, dict1, dict2, fun) do + Enumerable.reduce(dict2, {:cont, dict1}, fn({k, v}, acc) -> + {:cont, target1.update(acc, k, v, fn(other) -> fun.(k, other, v) end)} + end) |> elem(1) + end - """ @spec pop(t, key, value) :: {value, t} def pop(dict, key, default \\ nil) do target(dict).pop(dict, key, default) end - @doc """ - Update a value in `dict` by calling `fun` on the value to get a new - value. An exception is generated if `key` is not present in the dict. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d = Dict.update!(d, :a, fn(val) -> -val end) - iex> Dict.get(d, :a) - -1 + @spec pop_lazy(t, key, (() -> value)) :: {value, t} + def pop_lazy(dict, key, fun) do + target(dict).pop_lazy(dict, key, fun) + end - """ @spec update!(t, key, (value -> value)) :: t def update!(dict, key, fun) do target(dict).update!(dict, key, fun) end - @doc """ - Update a value in `dict` by calling `fun` on the value to get a new value. If - `key` is not present in `dict` then `initial` will be stored as the first - value. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d = Dict.update(d, :c, 3, fn(val) -> -val end) - iex> Dict.get(d, :c) - 3 - - """ @spec update(t, key, value, (value -> value)) :: t def update(dict, key, initial, fun) do target(dict).update(dict, key, initial, fun) end - @doc """ - Returns a tuple of two dicts, where the first dict contains only - entries from `dict` with keys in `keys`, and the second dict - contains only entries from `dict` with keys not in `keys` - - Any non-member keys are ignored. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2, c: 3, d: 4], dict_impl.new) - iex> {d1, d2} = Dict.split(d, [:a, :c, :e]) - iex> {Dict.to_list(d1) |> Enum.sort, Dict.to_list(d2) |> Enum.sort} - {[a: 1, c: 3], [b: 2, d: 4]} - - iex> d = Enum.into([], dict_impl.new) - iex> {d1, d2} = Dict.split(d, [:a, :c]) - iex> {Dict.to_list(d1), Dict.to_list(d2)} - {[], []} - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> {d1, d2} = Dict.split(d, [:a, :b, :c]) - iex> {Dict.to_list(d1) |> Enum.sort, Dict.to_list(d2)} - {[a: 1, b: 2], []} - - """ @spec split(t, [key]) :: {t, t} def split(dict, keys) do target(dict).split(dict, keys) end - @doc """ - Returns a new dict where the given `keys` are removed from `dict`. - Any non-member keys are ignored. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d = Dict.drop(d, [:a, :c, :d]) - iex> Dict.to_list(d) - [b: 2] - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d = Dict.drop(d, [:c, :d]) - iex> Dict.to_list(d) |> Enum.sort - [a: 1, b: 2] - - """ @spec drop(t, [key]) :: t def drop(dict, keys) do target(dict).drop(dict, keys) end - @doc """ - Returns a new dict where only the keys in `keys` from `dict` are included. - - Any non-member keys are ignored. - - ## Examples - - iex> d = Enum.into([a: 1, b: 2], dict_impl.new) - iex> d = Dict.take(d, [:a, :c, :d]) - iex> Dict.to_list(d) - [a: 1] - iex> d = Dict.take(d, [:c, :d]) - iex> Dict.to_list(d) - [] - - """ @spec take(t, [key]) :: t def take(dict, keys) do target(dict).take(dict, keys) end - @doc false @spec empty(t) :: t def empty(dict) do target(dict).empty(dict) end - @doc """ - Check if two dicts are equal using `===`. - - Notice this function is polymorphic as it compares dicts of any - type. Each dict implementation also provides an `equal?` function, - but they can only compare dicts of the same type. - - ## Examples - - iex> a = Enum.into([a: 2, b: 3, f: 5, c: 123], dict_impl.new) - iex> b = [a: 2, b: 3, f: 5, c: 123] - iex> Dict.equal?(a, b) - true - - iex> a = Enum.into([a: 2, b: 3, f: 5, c: 123], dict_impl.new) - iex> b = [] - iex> Dict.equal?(a, b) - false - - """ @spec equal?(t, t) :: boolean def equal?(dict1, dict2) do target1 = target(dict1) @@ -669,7 +346,7 @@ defmodule Dict do Enumerable.reduce(dict2, {:cont, true}, fn({k, v}, _acc) -> case target1.fetch(dict1, k) do {:ok, ^v} -> {:cont, true} - _ -> {:halt, false} + _ -> {:halt, false} end end) |> elem(1) @@ -678,15 +355,12 @@ defmodule Dict do end end - @doc """ - Returns a list of key-value pairs stored in `dict`. - No particular order is enforced. - """ @spec to_list(t) :: list def to_list(dict) do target(dict).to_list(dict) end + @spec unsupported_dict(t) :: no_return defp unsupported_dict(dict) do raise ArgumentError, "unsupported dict: #{inspect dict}" end diff --git a/lib/elixir/lib/enum.ex b/lib/elixir/lib/enum.ex index 46c97a9bc5a..d0abf190ebd 100644 --- a/lib/elixir/lib/enum.ex +++ b/lib/elixir/lib/enum.ex @@ -3,35 +3,35 @@ defprotocol Enumerable do Enumerable protocol used by `Enum` and `Stream` modules. When you invoke a function in the `Enum` module, the first argument - is usually a collection that must implement this protocol. For example, - the expression + is usually a collection that must implement this protocol. + For example, the expression: Enum.map([1, 2, 3], &(&1 * 2)) - invokes underneath `Enumerable.reduce/3` to perform the reducing + invokes `Enumerable.reduce/3` to perform the reducing operation that builds a mapped list by calling the mapping function - `&(&1 * 2)` on every element in the collection and cons'ing the + `&(&1 * 2)` on every element in the collection and consuming the element with an accumulated list. Internally, `Enum.map/2` is implemented as follows: def map(enum, fun) do - reducer = fn x, acc -> {:cont, [fun.(x)|acc]} end + reducer = fn x, acc -> {:cont, [fun.(x) | acc]} end Enumerable.reduce(enum, {:cont, []}, reducer) |> elem(1) |> :lists.reverse() end - Notice the user given function is wrapped into a `reducer` function. - The `reducer` function must return a tagged tuple after each step, - as described in the `acc/0` type. + Notice the user-supplied function is wrapped into a `t:reducer/0` function. + The `t:reducer/0` function must return a tagged tuple after each step, + as described in the `t:acc/0` type. The reason the accumulator requires a tagged tuple is to allow the - reducer function to communicate to the underlying enumerable the end - of enumeration, allowing any open resource to be properly closed. It - also allows suspension of the enumeration, which is useful when + `t:reducer/0` function to communicate the end of enumeration to the underlying + enumerable, allowing any open resources to be properly closed. + It also allows suspension of the enumeration, which is useful when interleaving between many enumerables is required (as in zip). Finally, `Enumerable.reduce/3` will return another tagged tuple, - as represented by the `result/0` type. + as represented by the `t:result/0` type. """ @typedoc """ @@ -44,10 +44,10 @@ defprotocol Enumerable do * `:suspend` - the enumeration should be suspended immediately Depending on the accumulator value, the result returned by - `Enumerable.reduce/3` will change. Please check the `result` - type docs for more information. + `Enumerable.reduce/3` will change. Please check the `t:result/0` + type documentation for more information. - In case a reducer function returns a `:suspend` accumulator, + In case a `t:reducer/0` function returns a `:suspend` accumulator, it must be explicitly handled by the caller and never leak. """ @type acc :: {:cont, term} | {:halt, term} | {:suspend, term} @@ -55,9 +55,10 @@ defprotocol Enumerable do @typedoc """ The reducer function. - Should be called with the collection element and the - accumulator contents. Returns the accumulator for - the next enumeration step. + Should be called with the enumerable element and the + accumulator contents. + + Returns the accumulator for the next enumeration step. """ @type reducer :: (term, term -> acc) @@ -66,9 +67,9 @@ defprotocol Enumerable do It may be *done* when the enumeration is finished by reaching its end, or *halted*/*suspended* when the enumeration was halted - or suspended by the reducer function. + or suspended by the `t:reducer/0` function. - In case a reducer function returns the `:suspend` accumulator, the + In case a `t:reducer/0` function returns the `:suspend` accumulator, the `:suspended` tuple must be explicitly handled by the caller and never leak. In practice, this means regular enumeration functions just need to be concerned about `:done` and `:halted` results. @@ -76,7 +77,9 @@ defprotocol Enumerable do Furthermore, a `:suspend` call must always be followed by another call, eventually halting or continuing until the end. """ - @type result :: {:done, term} | {:halted, term} | {:suspended, term, continuation} + @type result :: {:done, term} | + {:halted, term} | + {:suspended, term, continuation} @typedoc """ A partially applied reduce function. @@ -94,109 +97,117 @@ defprotocol Enumerable do @type continuation :: (acc -> result) @doc """ - Reduces the collection into a value. + Reduces the enumerable into an element. Most of the operations in `Enum` are implemented in terms of reduce. - This function should apply the given `reducer` function to each - item in the collection and proceed as expected by the returned accumulator. + This function should apply the given `t:reducer/0` function to each + item in the enumerable and proceed as expected by the returned + accumulator. As an example, here is the implementation of `reduce` for lists: - def reduce(_, {:halt, acc}, _fun), do: {:halted, acc} - def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)} - def reduce([], {:cont, acc}, _fun), do: {:done, acc} - def reduce([h|t], {:cont, acc}, fun), do: reduce(t, fun.(h, acc), fun) + def reduce(_, {:halt, acc}, _fun), do: {:halted, acc} + def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)} + def reduce([], {:cont, acc}, _fun), do: {:done, acc} + def reduce([h | t], {:cont, acc}, fun), do: reduce(t, fun.(h, acc), fun) """ @spec reduce(t, acc, reducer) :: result - def reduce(collection, acc, fun) + def reduce(enumerable, acc, fun) @doc """ - Checks if a value exists within the collection. + Checks if an element exists within the enumerable. It should return `{:ok, boolean}`. - If `{:error, __MODULE__}` is returned a default algorithm using `reduce` and - the match (`===`) operator is used. This algorithm runs in linear time. + If `{:error, __MODULE__}` is returned a default algorithm using + `reduce` and the match (`===`) operator is used. This algorithm runs + in linear time. Please force use of the default algorithm unless you can implement an algorithm that is significantly faster. """ @spec member?(t, term) :: {:ok, boolean} | {:error, module} - def member?(collection, value) + def member?(enumerable, element) @doc """ - Retrieves the collection's size. + Retrieves the enumerable's size. It should return `{:ok, size}`. - If `{:error, __MODULE__}` is returned a default algorithm using `reduce` and - the match (`===`) operator is used. This algorithm runs in linear time. + If `{:error, __MODULE__}` is returned a default algorithm using + `reduce` is used. This algorithm runs in linear time. Please force use of the default algorithm unless you can implement an algorithm that is significantly faster. """ @spec count(t) :: {:ok, non_neg_integer} | {:error, module} - def count(collection) + def count(enumerable) end defmodule Enum do import Kernel, except: [max: 2, min: 2] @moduledoc """ - Provides a set of algorithms that enumerate over collections according to the - `Enumerable` protocol: + Provides a set of algorithms that enumerate over enumerables according + to the `Enumerable` protocol. iex> Enum.map([1, 2, 3], fn(x) -> x * 2 end) - [2,4,6] + [2, 4, 6] - Some particular types, like dictionaries, yield a specific format on - enumeration. For dicts, the argument is always a `{key, value}` tuple: + Some particular types, like maps, yield a specific format on enumeration. + For example, the argument is always a `{key, value}` tuple for maps: - iex> dict = %{a: 1, b: 2} - iex> Enum.map(dict, fn {k, v} -> {k, v * 2} end) + iex> map = %{a: 1, b: 2} + iex> Enum.map(map, fn {k, v} -> {k, v * 2} end) [a: 2, b: 4] - Note that the functions in the `Enum` module are eager: they always start - the enumeration of the given collection. The `Stream` module allows - lazy enumeration of collections and provides infinite streams. + Note that the functions in the `Enum` module are eager: they always + start the enumeration of the given enumerable. The `Stream` module + allows lazy enumeration of enumerables and provides infinite streams. Since the majority of the functions in `Enum` enumerate the whole - collection and return a list as result, infinite streams need to + enumerable and return a list as result, infinite streams need to be carefully used with such functions, as they can potentially run forever. For example: - Enum.each Stream.cycle([1,2,3]), &IO.puts(&1) + Enum.each Stream.cycle([1, 2, 3]), &IO.puts(&1) """ @compile :inline_list_funcs @type t :: Enumerable.t + @type acc :: any @type element :: any - @type index :: non_neg_integer + @type index :: integer @type default :: any # Require Stream.Reducers and its callbacks require Stream.Reducers, as: R - defmacrop cont(_, entry, acc) do - quote do: {:cont, [unquote(entry)|unquote(acc)]} + defmacrop skip(acc) do + acc + end + + defmacrop next(_, entry, acc) do + quote do: [unquote(entry) | unquote(acc)] end - defmacrop acc(h, n, _) do - quote do: {unquote(h), unquote(n)} + defmacrop acc(head, state, _) do + quote do: {unquote(head), unquote(state)} end - defmacrop cont_with_acc(f, entry, h, n, _) do + defmacrop next_with_acc(_, entry, head, state, _) do quote do - {:cont, {[unquote(entry)|unquote(h)], unquote(n)}} + {[unquote(entry) | unquote(head)], unquote(state)} end end @doc """ - Invokes the given `fun` for each item in the `collection` and returns `false` - if at least one invocation returns `false`. Otherwise returns `true`. + Returns true if the given `fun` evaluates to true on all of the items in the enumerable. + + It stops the iteration at the first invocation that returns `false` or `nil`. ## Examples @@ -207,7 +218,7 @@ defmodule Enum do false If no function is given, it defaults to checking if - all items in the collection evaluate to `true`. + all items in the enumerable are truthy values. iex> Enum.all?([1, 2, 3]) true @@ -216,24 +227,24 @@ defmodule Enum do false """ - @spec all?(t) :: boolean @spec all?(t, (element -> as_boolean(term))) :: boolean - def all?(collection, fun \\ fn(x) -> x end) + def all?(enumerable, fun \\ fn(x) -> x end) - def all?(collection, fun) when is_list(collection) do - do_all?(collection, fun) + def all?(enumerable, fun) when is_list(enumerable) do + all_list(enumerable, fun) end - def all?(collection, fun) do - Enumerable.reduce(collection, {:cont, true}, fn(entry, _) -> + def all?(enumerable, fun) do + Enumerable.reduce(enumerable, {:cont, true}, fn(entry, _) -> if fun.(entry), do: {:cont, true}, else: {:halt, false} end) |> elem(1) end @doc """ - Invokes the given `fun` for each item in the `collection` and returns `true` if - at least one invocation returns `true`. Returns `false` otherwise. + Returns true if the given `fun` evaluates to true on any of the items in the enumerable. + + It stops the iteration at the first invocation that returns a truthy value (not `false` or `nil`). ## Examples @@ -243,8 +254,8 @@ defmodule Enum do iex> Enum.any?([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) true - If no function is given, it defaults to checking if - at least one item in the collection evaluates to `true`. + If no function is given, it defaults to checking if at least one item + in the enumerable is a truthy value. iex> Enum.any?([false, false, false]) false @@ -253,24 +264,32 @@ defmodule Enum do true """ - @spec any?(t) :: boolean @spec any?(t, (element -> as_boolean(term))) :: boolean - def any?(collection, fun \\ fn(x) -> x end) + def any?(enumerable, fun \\ fn(x) -> x end) - def any?(collection, fun) when is_list(collection) do - do_any?(collection, fun) + def any?(enumerable, fun) when is_list(enumerable) do + any_list(enumerable, fun) end - def any?(collection, fun) do - Enumerable.reduce(collection, {:cont, false}, fn(entry, _) -> + def any?(enumerable, fun) do + Enumerable.reduce(enumerable, {:cont, false}, fn(entry, _) -> if fun.(entry), do: {:halt, true}, else: {:cont, false} end) |> elem(1) end @doc """ - Finds the element at the given index (zero-based). - Returns `default` if index is out of bounds. + Finds the element at the given `index` (zero-based). + + Returns `default` if `index` is out of bounds. + + A negative `index` can be passed, which means the `enumerable` is + enumerated once and the `index` is counted from the end (e.g. + `-1` finds the last element). + + Note this operation takes linear time. In order to access + the element at index `index`, it will need to traverse `index` + previous elements. ## Examples @@ -287,33 +306,35 @@ defmodule Enum do :none """ - @spec at(t, integer) :: element | nil - @spec at(t, integer, default) :: element | default - def at(collection, n, default \\ nil) do - case fetch(collection, n) do + @spec at(t, index, default) :: element | default + def at(enumerable, index, default \\ nil) do + case fetch(enumerable, index) do {:ok, h} -> h - :error -> default + :error -> default end end @doc """ - Shortcut to `chunk(coll, n, n)`. + Shortcut to `chunk(enumerable, count, count)`. """ - @spec chunk(t, non_neg_integer) :: [list] - def chunk(coll, n), do: chunk(coll, n, n, nil) + @spec chunk(t, pos_integer) :: [list] + def chunk(enumerable, count), do: chunk(enumerable, count, count, nil) @doc """ - Returns a collection of lists containing `n` items each, where - each new chunk starts `step` elements into the collection. + Returns list of lists containing `count` items each, where + each new chunk starts `step` elements into the enumerable. + + `step` is optional and, if not passed, defaults to `count`, i.e. + chunks do not overlap. - `step` is optional and, if not passed, defaults to `n`, i.e. - chunks do not overlap. If the final chunk does not have `n` - elements to fill the chunk, elements are taken as necessary - from `pad` if it was passed. If `pad` is passed and does not - have enough elements to fill the chunk, then the chunk is - returned anyway with less than `n` elements. If `pad` is not - passed at all or is `nil`, then the partial chunk is discarded - from the result. + If the final chunk does not have `count` elements to fill the chunk, + the final chunk is dropped unless `leftover` is given. + + If `leftover` is given, elements are taken from `leftover` to fill in + the chunk. If `leftover` is passed and does not have enough elements + to fill the chunk, then a partial chunk is returned with less than + `count` elements. Therefore, an empty list can be given to `leftover` + when one simply desires for the last chunk to not be discarded. ## Examples @@ -326,28 +347,72 @@ defmodule Enum do iex> Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2, [7]) [[1, 2, 3], [3, 4, 5], [5, 6, 7]] - iex> Enum.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) - [[1, 2, 3], [4, 5, 6]] + iex> Enum.chunk([1, 2, 3, 4], 3, 3, []) + [[1, 2, 3], [4]] + + iex> Enum.chunk([1, 2, 3, 4], 10) + [] + + iex> Enum.chunk([1, 2, 3, 4], 10, 10, []) + [[1, 2, 3, 4]] """ - @spec chunk(t, non_neg_integer, non_neg_integer) :: [list] - @spec chunk(t, non_neg_integer, non_neg_integer, t | nil) :: [list] - def chunk(coll, n, step, pad \\ nil) when n > 0 and step > 0 do - limit = :erlang.max(n, step) + @spec chunk(t, pos_integer, pos_integer, t | nil) :: [list] + def chunk(enumerable, count, step, leftover \\ nil) + when is_integer(count) and count > 0 and is_integer(step) and step > 0 do + limit = :erlang.max(count, step) - {_, {acc, {buffer, i}}} = - Enumerable.reduce(coll, {:cont, {[], {[], 0}}}, R.chunk(n, step, limit)) + {acc, {buffer, i}} = + reduce(enumerable, {[], {[], 0}}, R.chunk(count, step, limit)) - if nil?(pad) || i == 0 do + if is_nil(leftover) || i == 0 do :lists.reverse(acc) else - buffer = :lists.reverse(buffer) ++ take(pad, n - i) - :lists.reverse([buffer|acc]) + buffer = :lists.reverse(buffer, take(leftover, count - i)) + :lists.reverse([buffer | acc]) end end @doc """ - Splits `coll` on every element for which `fun` returns a new value. + Splits enumerable on every element for which `fun` returns a new + value. + + Returns a list of lists. + + ## Examples + + iex> chunk_fun = fn i, acc -> + ...> if rem(i, 2) == 0 do + ...> {:cont, Enum.reverse([i | acc]), []} + ...> else + ...> {:cont, [i | acc]} + ...> end + ...> end + iex> after_fun = fn + ...> [] -> {:cont, []} + ...> acc -> {:cont, Enum.reverse(acc), []} + ...> end + iex> Enum.chunk_by(1..10, [], chunk_fun, after_fun) + [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]] + + """ + @spec chunk_by(t, acc, + (element, acc -> {:cont, chunk, acc} | {:cont, acc}), + (acc -> {:cont, chunk, acc} | {:cont, acc})) :: Enumerable.t when chunk: any + def chunk_by(enum, acc, chunk_fun, after_fun) do + {res, acc} = reduce(enum, {[], acc}, R.chunk_by(chunk_fun)) + + case after_fun.(acc) do + {:cont, _acc} -> :lists.reverse(res) + {:cont, elem, _acc} -> :lists.reverse([elem | res]) + end + end + + @doc """ + Splits enumerable on every element for which `fun` returns a new + value. + + Returns a list of lists. ## Examples @@ -356,47 +421,54 @@ defmodule Enum do """ @spec chunk_by(t, (element -> any)) :: [list] - def chunk_by(coll, fun) do - {_, {acc, res}} = - Enumerable.reduce(coll, {:cont, {[], nil}}, R.chunk_by(fun)) - - case res do - {buffer, _} -> - :lists.reverse([:lists.reverse(buffer) | acc]) - nil -> - [] - end + def chunk_by(enumerable, fun) do + chunk_by(enumerable, nil, fn + entry, nil -> + {:cont, {[entry], fun.(entry)}} + entry, {acc, value} -> + case fun.(entry) do + ^value -> {:cont, {[entry | acc], value}} + new_value -> {:cont, :lists.reverse(acc), {[entry], new_value}} + end + end, fn + nil -> {:cont, :done} + {acc, _value} -> {:cont, :lists.reverse(acc), :done} + end) end @doc """ - Given an enumerable of enumerables, concatenate the enumerables into a single list. + Given an enumerable of enumerables, concatenates the enumerables into + a single list. ## Examples iex> Enum.concat([1..3, 4..6, 7..9]) - [1,2,3,4,5,6,7,8,9] + [1, 2, 3, 4, 5, 6, 7, 8, 9] iex> Enum.concat([[1, [2], 3], [4], [5, 6]]) - [1,[2],3,4,5,6] + [1, [2], 3, 4, 5, 6] """ @spec concat(t) :: t def concat(enumerables) do - do_concat(enumerables) + fun = &[&1 | &2] + reduce(enumerables, [], &reduce(&1, &2, fun)) |> :lists.reverse end @doc """ - Concatenates the enumerable on the right with the enumerable on the left. + Concatenates the enumerable on the right with the enumerable on the + left. - This function produces the same result as the `Kernel.++/2` operator for lists. + This function produces the same result as the `Kernel.++/2` operator + for lists. ## Examples iex> Enum.concat(1..3, 4..6) - [1,2,3,4,5,6] + [1, 2, 3, 4, 5, 6] iex> Enum.concat([1, 2, 3], [4, 5, 6]) - [1,2,3,4,5,6] + [1, 2, 3, 4, 5, 6] """ @spec concat(t, t) :: t @@ -405,16 +477,11 @@ defmodule Enum do end def concat(left, right) do - do_concat([left, right]) - end - - defp do_concat(enumerable) do - fun = &[&1|&2] - reduce(enumerable, [], &reduce(&1, &2, fun)) |> :lists.reverse + concat([left, right]) end @doc """ - Returns the collection's size. + Returns the size of the enumerable. ## Examples @@ -423,24 +490,24 @@ defmodule Enum do """ @spec count(t) :: non_neg_integer - def count(collection) when is_list(collection) do - :erlang.length(collection) + def count(enumerable) when is_list(enumerable) do + :erlang.length(enumerable) end - def count(collection) do - case Enumerable.count(collection) do + def count(enumerable) do + case Enumerable.count(enumerable) do {:ok, value} when is_integer(value) -> value {:error, module} -> - module.reduce(collection, {:cont, 0}, fn + module.reduce(enumerable, {:cont, 0}, fn _, acc -> {:cont, acc + 1} end) |> elem(1) end end @doc """ - Returns the count of items in the collection for which - `fun` returns `true`. + Returns the count of items in the enumerable for which `fun` returns + a truthy value. ## Examples @@ -449,19 +516,64 @@ defmodule Enum do """ @spec count(t, (element -> as_boolean(term))) :: non_neg_integer - def count(collection, fun) do - Enumerable.reduce(collection, {:cont, 0}, fn(entry, acc) -> - {:cont, if(fun.(entry), do: acc + 1, else: acc)} - end) |> elem(1) + def count(enumerable, fun) do + reduce(enumerable, 0, fn(entry, acc) -> + if(fun.(entry), do: acc + 1, else: acc) + end) + end + + @doc """ + Enumerates the `enumerable`, returning a list where all consecutive + duplicated elements are collapsed to a single element. + + Elements are compared using `===`. + + If you want to remove all duplicated elements, regardless of order, + see `uniq/1`. + + ## Examples + + iex> Enum.dedup([1, 2, 3, 3, 2, 1]) + [1, 2, 3, 2, 1] + + iex> Enum.dedup([1, 1, 2, 2.0, :three, :"three"]) + [1, 2, 2.0, :three] + + """ + @spec dedup(t) :: list + def dedup(enumerable) do + dedup_by(enumerable, fn x -> x end) + end + + @doc """ + Enumerates the `enumerable`, returning a list where all consecutive + duplicated elements are collapsed to a single element. + + The function `fun` maps every element to a term which is used to + determine if two elements are duplicates. + + ## Examples + + iex> Enum.dedup_by([{1, :a}, {2, :b}, {2, :c}, {1, :a}], fn {x, _} -> x end) + [{1, :a}, {2, :b}, {1, :a}] + + iex> Enum.dedup_by([5, 1, 2, 3, 2, 1], fn x -> x > 2 end) + [5, 1, 3, 2] + + """ + @spec dedup_by(t, (element -> term)) :: list + def dedup_by(enumerable, fun) do + {list, _} = reduce(enumerable, {[], []}, R.dedup(fun)) + :lists.reverse(list) end @doc """ - Drops the first `count` items from `collection`. + Drops the `amount` of items from the enumerable. + + If a negative `amount` is given, the `amount` of last values will be dropped. - If a negative value `count` is given, the last `count` - values will be dropped. The collection is enumerated - once to retrieve the proper index and the remaining - calculation is performed from the end. + The `enumerable` is enumerated once to retrieve the proper index and + the remaining calculation is performed from the end. ## Examples @@ -472,53 +584,83 @@ defmodule Enum do [] iex> Enum.drop([1, 2, 3], 0) - [1,2,3] + [1, 2, 3] iex> Enum.drop([1, 2, 3], -1) - [1,2] + [1, 2] """ @spec drop(t, integer) :: list - def drop(collection, count) when is_list(collection) and count >= 0 do - do_drop(collection, count) + def drop(enumerable, amount) + when is_list(enumerable) and is_integer(amount) and amount >= 0 do + drop_list(enumerable, amount) end - def drop(collection, count) when count >= 0 do - res = - reduce(collection, count, fn - x, acc when is_list(acc) -> [x|acc] - x, 0 -> [x] - _, acc when acc > 0 -> acc - 1 - end) - if is_list(res), do: :lists.reverse(res), else: [] + def drop(enumerable, amount) when is_integer(amount) and amount >= 0 do + {result, _} = reduce(enumerable, {[], amount}, R.drop()) + if is_list(result), do: :lists.reverse(result), else: [] + end + + def drop(enumerable, amount) when is_integer(amount) and amount < 0 do + drop_list(reverse(enumerable), -amount) |> :lists.reverse end - def drop(collection, count) when count < 0 do - do_drop(reverse(collection), abs(count)) |> :lists.reverse + @doc """ + Returns a list of every `nth` item in the enumerable dropped, + starting with the first element. + + The first item is always dropped, unless `nth` is 0. + + The second argument specifying every `nth` item must be a non-negative + integer. + + ## Examples + + iex> Enum.drop_every(1..10, 2) + [2, 4, 6, 8, 10] + + iex> Enum.drop_every(1..10, 0) + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + iex> Enum.drop_every([1, 2, 3], 1) + [] + + """ + @spec drop_every(t, non_neg_integer) :: list + def drop_every(enumerable, nth) + + def drop_every(_enumerable, 1), do: [] + def drop_every(enumerable, 0), do: to_list(enumerable) + def drop_every([], nth) when is_integer(nth), do: [] + + def drop_every(enumerable, nth) when is_integer(nth) and nth > 1 do + {res, _} = reduce(enumerable, {[], :first}, R.drop_every(nth)) + :lists.reverse(res) end @doc """ - Drops items at the beginning of `collection` while `fun` returns `true`. + Drops items at the beginning of the enumerable while `fun` returns a + truthy value. ## Examples - iex> Enum.drop_while([1, 2, 3, 4, 5], fn(x) -> x < 3 end) - [3,4,5] + iex> Enum.drop_while([1, 2, 3, 2, 1], fn(x) -> x < 3 end) + [3, 2, 1] """ @spec drop_while(t, (element -> as_boolean(term))) :: list - def drop_while(collection, fun) when is_list(collection) do - do_drop_while(collection, fun) + def drop_while(enumerable, fun) when is_list(enumerable) do + drop_while_list(enumerable, fun) end - def drop_while(collection, fun) do - {_, {res, _}} = - Enumerable.reduce(collection, {:cont, {[], true}}, R.drop_while(fun)) + def drop_while(enumerable, fun) do + {res, _} = reduce(enumerable, {[], true}, R.drop_while(fun)) :lists.reverse(res) end @doc """ - Invokes the given `fun` for each item in the `collection`. + Invokes the given `fun` for each item in the enumerable. + Returns `:ok`. ## Examples @@ -530,13 +672,13 @@ defmodule Enum do """ @spec each(t, (element -> any)) :: :ok - def each(collection, fun) when is_list(collection) do - :lists.foreach(fun, collection) + def each(enumerable, fun) when is_list(enumerable) do + :lists.foreach(fun, enumerable) :ok end - def each(collection, fun) do - reduce(collection, nil, fn(entry, _) -> + def each(enumerable, fun) do + reduce(enumerable, nil, fn(entry, _) -> fun.(entry) nil end) @@ -544,7 +686,9 @@ defmodule Enum do end @doc """ - Returns `true` if the collection is empty, otherwise `false`. + Determines if the enumerable is empty. + + Returns `true` if `enumerable` is empty, otherwise `false`. ## Examples @@ -556,27 +700,42 @@ defmodule Enum do """ @spec empty?(t) :: boolean - def empty?(collection) when is_list(collection) do - collection == [] + def empty?(enumerable) when is_list(enumerable) do + enumerable == [] end - def empty?(collection) do - Enumerable.reduce(collection, {:cont, true}, fn(_, _) -> {:halt, false} end) |> elem(1) + def empty?(enumerable) do + case Enumerable.count(enumerable) do + {:ok, value} when is_integer(value) -> + value == 0 + {:error, module} -> + module.reduce(enumerable, {:cont, true}, + fn(_, _) -> {:halt, false} end) + |> elem(1) + end end @doc """ - Finds the element at the given index (zero-based). + Finds the element at the given `index` (zero-based). + Returns `{:ok, element}` if found, otherwise `:error`. - A negative index can be passed, which means the collection is - enumerated once and the index is counted from the end (i.e. + A negative `index` can be passed, which means the `enumerable` is + enumerated once and the `index` is counted from the end (e.g. `-1` fetches the last element). + Note this operation takes linear time. In order to access + the element at index `index`, it will need to traverse `index` + previous elements. + ## Examples iex> Enum.fetch([2, 4, 6], 0) {:ok, 2} + iex> Enum.fetch([2, 4, 6], -3) + {:ok, 2} + iex> Enum.fetch([2, 4, 6], 2) {:ok, 6} @@ -584,35 +743,71 @@ defmodule Enum do :error """ - @spec fetch(t, integer) :: {:ok, element} | :error - def fetch(collection, n) when is_list(collection) and n >= 0 do - do_fetch(collection, n) + @spec fetch(t, index) :: {:ok, element} | :error + def fetch(enumerable, index) + + def fetch(enumerable, index) when is_list(enumerable) and is_integer(index) do + if index < 0 do + enumerable |> :lists.reverse |> fetch_list((-index) - 1) + else + fetch_list(enumerable, index) + end end - def fetch(collection, n) when n >= 0 do - res = - Enumerable.reduce(collection, {:cont, 0}, fn(entry, acc) -> - if acc == n do - {:halt, entry} - else - {:cont, acc + 1} - end - end) + def fetch(first..last, index) when is_integer(index) do + fetch_range(first, last, index) + end - case res do - {:halted, entry} -> {:ok, entry} - {:done, _} -> :error + def fetch(enumerable, index) when is_integer(index) and index < 0 do + module = Enumerable.impl_for!(enumerable) + case module.count(enumerable) do + {:error, module} -> + reversed = module.reduce(enumerable, {:cont, []}, fn item, acc -> + {:cont, [item | acc]} + end) |> elem(1) + fetch_list(reversed, (-index) - 1) + {:ok, count} when (count + index) < 0 -> + :error + {:ok, count} -> + fetch_enumerable(enumerable, count + index, module) + end + end + + def fetch(enumerable, index) when is_integer(index) do + module = Enumerable.impl_for!(enumerable) + case module.count(enumerable) do + {:error, module} -> + fetch_enumerable(enumerable, index, module) + {:ok, count} when count <= index -> + :error + {:ok, _count} -> + fetch_enumerable(enumerable, index, module) end end - def fetch(collection, n) when n < 0 do - do_fetch(reverse(collection), abs(n + 1)) + defp fetch_enumerable(enumerable, index, module) do + reduce_result = + module.reduce(enumerable, {:cont, {:not_found, 0}}, fn + entry, {_, ^index} -> + {:halt, {:found, entry}} + _entry, {_, index} -> + {:cont, {:not_found, index + 1}} + end) + + case elem(reduce_result, 1) do + {:found, entry} -> {:ok, entry} + {:not_found, _} -> :error + end end @doc """ - Finds the element at the given index (zero-based). - Raises `OutOfBoundsError` if the given position - is outside the range of the collection. + Finds the element at the given `index` (zero-based). + + Raises `OutOfBoundsError` if the given `index` is outside the range of + the enumerable. + + Note this operation takes linear time. In order to access the element + at index `index`, it will need to traverse `index` previous elements. ## Examples @@ -626,17 +821,19 @@ defmodule Enum do ** (Enum.OutOfBoundsError) out of bounds error """ - @spec fetch!(t, integer) :: element | no_return - def fetch!(collection, n) do - case fetch(collection, n) do + @spec fetch!(t, index) :: element | no_return + def fetch!(enumerable, index) do + case fetch(enumerable, index) do {:ok, h} -> h - :error -> raise Enum.OutOfBoundsError + :error -> raise Enum.OutOfBoundsError end end @doc """ - Filters the collection, i.e. returns only those elements - for which `fun` returns `true`. + Filters the enumerable, i.e. returns only those elements + for which `fun` returns a truthy value. + + See also `reject/2`. ## Examples @@ -645,37 +842,30 @@ defmodule Enum do """ @spec filter(t, (element -> as_boolean(term))) :: list - def filter(collection, fun) when is_list(collection) do - for item <- collection, fun.(item), do: item + def filter(enumerable, fun) when is_list(enumerable) do + filter_list(enumerable, fun) end - def filter(collection, fun) do - Enumerable.reduce(collection, {:cont, []}, R.filter(fun)) - |> elem(1) |> :lists.reverse + def filter(enumerable, fun) do + reduce(enumerable, [], R.filter(fun)) |> :lists.reverse end - @doc """ - Filters the collection and maps its values in one pass. - - ## Examples - - iex> Enum.filter_map([1, 2, 3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) - [4] - - """ - @spec filter_map(t, (element -> as_boolean(term)), (element -> element)) :: list - def filter_map(collection, filter, mapper) when is_list(collection) do - for item <- collection, filter.(item), do: mapper.(item) + @doc false + # TODO: Remove on 2.0 + # (hard-deprecated in elixir_dispatch) + def filter_map(enumerable, filter, mapper) when is_list(enumerable) do + for item <- enumerable, filter.(item), do: mapper.(item) end - def filter_map(collection, filter, mapper) do - Enumerable.reduce(collection, {:cont, []}, R.filter_map(filter, mapper)) - |> elem(1) |> :lists.reverse + def filter_map(enumerable, filter, mapper) do + enumerable + |> reduce([], R.filter_map(filter, mapper)) + |> :lists.reverse end @doc """ - Returns the first item for which `fun` returns a truthy value. If no such - item is found, returns `ifnone`. + Returns the first item for which `fun` returns a truthy value. + If no such item is found, returns `default`. ## Examples @@ -689,107 +879,129 @@ defmodule Enum do 3 """ - @spec find(t, (element -> any)) :: element | nil @spec find(t, default, (element -> any)) :: element | default - def find(collection, ifnone \\ nil, fun) + def find(enumerable, default \\ nil, fun) - def find(collection, ifnone, fun) when is_list(collection) do - do_find(collection, ifnone, fun) + def find(enumerable, default, fun) when is_list(enumerable) do + find_list(enumerable, default, fun) end - def find(collection, ifnone, fun) do - Enumerable.reduce(collection, {:cont, ifnone}, fn(entry, ifnone) -> - if fun.(entry), do: {:halt, entry}, else: {:cont, ifnone} + def find(enumerable, default, fun) do + Enumerable.reduce(enumerable, {:cont, default}, fn(entry, default) -> + if fun.(entry), do: {:halt, entry}, else: {:cont, default} end) |> elem(1) end @doc """ - Similar to `find/3`, but returns the value of the function - invocation instead of the element itself. + Similar to `find/3`, but returns the index (zero-based) + of the element instead of the element itself. ## Examples - iex> Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) + iex> Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) nil - iex> Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) - true + iex> Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) + 1 """ - @spec find_value(t, (element -> any)) :: any | :nil - @spec find_value(t, any, (element -> any)) :: any | :nil - def find_value(collection, ifnone \\ nil, fun) - - def find_value(collection, ifnone, fun) when is_list(collection) do - do_find_value(collection, ifnone, fun) + @spec find_index(t, (element -> any)) :: non_neg_integer | nil + def find_index(enumerable, fun) when is_list(enumerable) do + find_index_list(enumerable, 0, fun) end - def find_value(collection, ifnone, fun) do - Enumerable.reduce(collection, {:cont, ifnone}, fn(entry, ifnone) -> - fun_entry = fun.(entry) - if fun_entry, do: {:halt, fun_entry}, else: {:cont, ifnone} - end) |> elem(1) + def find_index(enumerable, fun) do + result = + Enumerable.reduce(enumerable, {:cont, {:not_found, 0}}, fn(entry, {_, index}) -> + if fun.(entry), do: {:halt, {:found, index}}, else: {:cont, {:not_found, index + 1}} + end) + + case elem(result, 1) do + {:found, index} -> index + {:not_found, _} -> nil + end end @doc """ - Similar to `find/3`, but returns the index (zero-based) - of the element instead of the element itself. + Similar to `find/3`, but returns the value of the function + invocation instead of the element itself. ## Examples - iex> Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) + iex> Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) nil - iex> Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) - 1 + iex> Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) + true + + iex> Enum.find_value([1, 2, 3], "no bools!", &is_boolean/1) + "no bools!" """ - @spec find_index(t, (element -> any)) :: index | :nil - def find_index(collection, fun) when is_list(collection) do - do_find_index(collection, 0, fun) - end + @spec find_value(t, any, (element -> any)) :: any | nil + def find_value(enumerable, default \\ nil, fun) - def find_index(collection, fun) do - res = - Enumerable.reduce(collection, {:cont, 0}, fn(entry, acc) -> - if fun.(entry), do: {:halt, acc}, else: {:cont, acc + 1} - end) + def find_value(enumerable, default, fun) when is_list(enumerable) do + find_value_list(enumerable, default, fun) + end - case res do - {:halted, entry} -> entry - {:done, _} -> nil - end + def find_value(enumerable, default, fun) do + Enumerable.reduce(enumerable, {:cont, default}, fn(entry, default) -> + fun_entry = fun.(entry) + if fun_entry, do: {:halt, fun_entry}, else: {:cont, default} + end) |> elem(1) end @doc """ - Returns a new collection appending the result of invoking `fun` - on each corresponding item of `collection`. + Maps the given `fun` over `enumerable` and flattens the result. - The given function should return an enumerable. + This function returns a new enumerable built by appending the result of invoking `fun` + on each element of `enumerable` together; conceptually, this is similar to a + combination of `map/2` and `concat/1`. ## Examples iex> Enum.flat_map([:a, :b, :c], fn(x) -> [x, x] end) [:a, :a, :b, :b, :c, :c] - iex> Enum.flat_map([{1,3}, {4,6}], fn({x,y}) -> x..y end) + iex> Enum.flat_map([{1, 3}, {4, 6}], fn({x, y}) -> x..y end) [1, 2, 3, 4, 5, 6] + iex> Enum.flat_map([:a, :b, :c], fn(x) -> [[x]] end) + [[:a], [:b], [:c]] + """ @spec flat_map(t, (element -> t)) :: list - def flat_map(collection, fun) do - reduce(collection, [], fn(entry, acc) -> - reduce(fun.(entry), acc, &[&1|&2]) + def flat_map(enumerable, fun) when is_list(enumerable) do + flat_map_list(enumerable, fun) + end + + def flat_map(enumerable, fun) do + reduce(enumerable, [], fn(entry, acc) -> + case fun.(entry) do + list when is_list(list) -> :lists.reverse(list, acc) + other -> reduce(other, acc, &[&1 | &2]) + end end) |> :lists.reverse end + defp flat_map_list([head | tail], fun) do + case fun.(head) do + list when is_list(list) -> list ++ flat_map_list(tail, fun) + other -> to_list(other) ++ flat_map_list(tail, fun) + end + end + defp flat_map_list([], _fun) do + [] + end + @doc """ - Maps and reduces a collection, flattening the given results. + Maps and reduces an enumerable, flattening the given results (only one level deep). - It expects an accumulator and a function that receives each stream item - and an accumulator, and must return a tuple containing a new stream - (often a list) with the new accumulator or a tuple with `:halt` as first - element and the accumulator as second. + It expects an accumulator and a function that receives each enumerable + item, and must return a tuple containing a new enumerable (often a list) + with the new accumulator or a tuple with `:halt` as first element and + the accumulator as second. ## Examples @@ -798,30 +1010,76 @@ defmodule Enum do iex> Enum.flat_map_reduce(enum, 0, fn i, acc -> ...> if acc < n, do: {[i], acc + 1}, else: {:halt, acc} ...> end) - {[1,2,3], 3} + {[1, 2, 3], 3} + + iex> Enum.flat_map_reduce(1..5, 0, fn(i, acc) -> {[[i]], acc + i} end) + {[[1], [2], [3], [4], [5]], 15} """ - @spec flat_map_reduce(t, acc, fun) :: {[any], any} when - fun: (element, acc -> {t, acc} | {:halt, acc}), - acc: any - def flat_map_reduce(collection, acc, fun) do + @spec flat_map_reduce(t, acc, fun) :: {[any], any} + when fun: (element, acc -> {t, acc} | {:halt, acc}), + acc: any + def flat_map_reduce(enumerable, acc, fun) do {_, {list, acc}} = - Enumerable.reduce(collection, {:cont, {[], acc}}, fn(entry, {list, acc}) -> - case fun.(entry, acc) do - {:halt, acc} -> - {:halt, {list, acc}} - {entries, acc} -> - {:cont, {reduce(entries, list, &[&1|&2]), acc}} - end + Enumerable.reduce(enumerable, {:cont, {[], acc}}, + fn(entry, {list, acc}) -> + case fun.(entry, acc) do + {:halt, acc} -> + {:halt, {list, acc}} + {[], acc} -> + {:cont, {list, acc}} + {[entry], acc} -> + {:cont, {[entry | list], acc}} + {entries, acc} -> + {:cont, {reduce(entries, list, &[&1 | &2]), acc}} + end end) {:lists.reverse(list), acc} end + @doc """ + Splits the enumerable into groups based on `key_fun`. + + The result is a map where each key is given by `key_fun` and each + value is a list of elements given by `value_fun`. Ordering is preserved. + + ## Examples + + iex> Enum.group_by(~w{ant buffalo cat dingo}, &String.length/1) + %{3 => ["ant", "cat"], 7 => ["buffalo"], 5 => ["dingo"]} + + iex> Enum.group_by(~w{ant buffalo cat dingo}, &String.length/1, &String.first/1) + %{3 => ["a", "c"], 7 => ["b"], 5 => ["d"]} + + """ + @spec group_by(t, (element -> any), (element -> any)) :: map + def group_by(enumerable, key_fun, value_fun \\ fn x -> x end) + + def group_by(enumerable, key_fun, value_fun) when is_function(key_fun) do + reduce(reverse(enumerable), %{}, fn entry, categories -> + value = value_fun.(entry) + Map.update(categories, key_fun.(entry), [value], &[value | &1]) + end) + end + + # TODO: Remove on 2.0 + def group_by(enumerable, dict, fun) do + IO.warn "Enum.group_by/3 with a map/dictionary as second element is deprecated. " <> + "A map is used by default and it is no longer required to pass one to this function" + + # Avoid warnings about Dict + dict_module = Dict + + reduce(reverse(enumerable), dict, fn(entry, categories) -> + dict_module.update(categories, fun.(entry), [entry], &[entry | &1]) + end) + end + @doc """ Intersperses `element` between each element of the enumeration. - Complexity: O(n) + Complexity: O(n). ## Examples @@ -836,20 +1094,20 @@ defmodule Enum do """ @spec intersperse(t, element) :: list - def intersperse(collection, element) do + def intersperse(enumerable, element) do list = - reduce(collection, [], fn(x, acc) -> + reduce(enumerable, [], fn(x, acc) -> [x, element | acc] end) |> :lists.reverse() case list do - [] -> [] - [_|t] -> t # Head is a superfluous intersperser element + [] -> [] + [_ | t] -> t # Head is a superfluous intersperser element end end @doc """ - Inserts the given enumerable into a collectable. + Inserts the given `enumerable` into a `collectable`. ## Examples @@ -859,49 +1117,80 @@ defmodule Enum do iex> Enum.into([a: 1, b: 2], %{}) %{a: 1, b: 2} + iex> Enum.into(%{a: 1}, %{b: 2}) + %{a: 1, b: 2} + + iex> Enum.into([a: 1, a: 2], %{}) + %{a: 2} + """ @spec into(Enumerable.t, Collectable.t) :: Collectable.t - def into(collection, list) when is_list(list) do - list ++ to_list(collection) + def into(enumerable, collectable) when is_list(collectable) do + collectable ++ to_list(enumerable) + end + + def into(%_{} = enumerable, collectable) do + into_protocol(enumerable, collectable) + end + + def into(enumerable, %_{} = collectable) do + into_protocol(enumerable, collectable) end - def into(collection, %{} = map) when is_list(collection) and map_size(map) == 0 do - :maps.from_list(collection) + def into(%{} = enumerable, %{} = collectable) do + Map.merge(collectable, enumerable) end - def into(collection, collectable) do + def into(enumerable, %{} = collectable) when is_list(enumerable) do + Map.merge(collectable, :maps.from_list(enumerable)) + end + + def into(enumerable, %{} = collectable) do + reduce(enumerable, collectable, fn {key, val}, acc -> + Map.put(acc, key, val) + end) + end + + def into(enumerable, collectable) do + into_protocol(enumerable, collectable) + end + + defp into_protocol(enumerable, collectable) do {initial, fun} = Collectable.into(collectable) - into(collection, initial, fun, fn x, acc -> - fun.(acc, {:cont, x}) + into(enumerable, initial, fun, fn entry, acc -> + fun.(acc, {:cont, entry}) end) end @doc """ - Inserts the given enumerable into a collectable - according to the transformation function. + Inserts the given `enumerable` into a `collectable` according to the + transformation function. ## Examples iex> Enum.into([2, 3], [3], fn x -> x * 3 end) [3, 6, 9] + iex> Enum.into(%{a: 1, b: 2}, %{c: 3}, fn {k, v} -> {k, v * 2} end) + %{a: 2, b: 4, c: 3} + """ @spec into(Enumerable.t, Collectable.t, (term -> term)) :: Collectable.t - def into(collection, list, transform) when is_list(list) and is_function(transform, 1) do - list ++ map(collection, transform) + def into(enumerable, collectable, transform) when is_list(collectable) do + collectable ++ map(enumerable, transform) end - def into(collection, collectable, transform) when is_function(transform, 1) do + def into(enumerable, collectable, transform) do {initial, fun} = Collectable.into(collectable) - into(collection, initial, fun, fn x, acc -> - fun.(acc, {:cont, transform.(x)}) + into(enumerable, initial, fun, fn entry, acc -> + fun.(acc, {:cont, transform.(entry)}) end) end - defp into(collection, initial, fun, callback) do + defp into(enumerable, initial, fun, callback) do try do - reduce(collection, initial, callback) + reduce(enumerable, initial, callback) catch kind, reason -> stacktrace = System.stacktrace @@ -913,14 +1202,13 @@ defmodule Enum do end @doc """ - Joins the given `collection` according to `joiner`. - `joiner` can be either a binary or a list and the - result will be of the same type as `joiner`. If - `joiner` is not passed at all, it defaults to an - empty binary. + Joins the given enumerable into a binary using `joiner` as a + separator. - All items in the collection must be convertible - to a binary, otherwise an error is raised. + If `joiner` is not passed at all, it defaults to the empty binary. + + All items in the enumerable must be convertible to a binary, + otherwise an error is raised. ## Examples @@ -931,14 +1219,13 @@ defmodule Enum do "1 = 2 = 3" """ - @spec join(t) :: String.t @spec join(t, String.t) :: String.t - def join(collection, joiner \\ "") + def join(enumerable, joiner \\ "") - def join(collection, joiner) when is_binary(joiner) do - reduced = reduce(collection, :first, fn - entry, :first -> enum_to_string(entry) - entry, acc -> [acc, joiner|enum_to_string(entry)] + def join(enumerable, joiner) when is_binary(joiner) do + reduced = reduce(enumerable, :first, fn + entry, :first -> entry_to_string(entry) + entry, acc -> [acc, joiner | entry_to_string(entry)] end) if reduced == :first do "" @@ -948,10 +1235,10 @@ defmodule Enum do end @doc """ - Returns a new collection, where each item is the result - of invoking `fun` on each corresponding item of `collection`. + Returns a list where each item is the result of invoking + `fun` on each corresponding item of `enumerable`. - For dicts, the function expects a key-value tuple. + For maps, the function expects a key-value tuple. ## Examples @@ -963,23 +1250,64 @@ defmodule Enum do """ @spec map(t, (element -> any)) :: list - def map(collection, fun) when is_list(collection) do - for item <- collection, do: fun.(item) + def map(enumerable, fun) + + def map(enumerable, fun) when is_list(enumerable) do + :lists.map(fun, enumerable) + end + + def map(enumerable, fun) do + reduce(enumerable, [], R.map(fun)) |> :lists.reverse end - def map(collection, fun) do - Enumerable.reduce(collection, {:cont, []}, R.map(fun)) |> elem(1) |> :lists.reverse + @doc """ + Returns a list of results of invoking `fun` on every `nth` + item of `enumerable`, starting with the first element. + + The first item is always passed to the given function, unless `nth` is `0`. + + The second argument specifying every `nth` item must be a non-negative + integer. + + If `nth` is `0`, then `enumerable` is directly converted to a list, + without `fun` being ever applied. + + ## Examples + + iex> Enum.map_every(1..10, 2, fn x -> x + 1000 end) + [1001, 2, 1003, 4, 1005, 6, 1007, 8, 1009, 10] + + iex> Enum.map_every(1..10, 3, fn x -> x + 1000 end) + [1001, 2, 3, 1004, 5, 6, 1007, 8, 9, 1010] + + iex> Enum.map_every(1..5, 0, fn x -> x + 1000 end) + [1, 2, 3, 4, 5] + + iex> Enum.map_every([1, 2, 3], 1, fn x -> x + 1000 end) + [1001, 1002, 1003] + + """ + @spec map_every(t, non_neg_integer, (element -> any)) :: list + def map_every(enumerable, nth, fun) + + def map_every(enumerable, 1, fun), do: map(enumerable, fun) + def map_every(enumerable, 0, _fun), do: to_list(enumerable) + def map_every([], nth, _fun) when is_integer(nth) and nth > 1, do: [] + + def map_every(enumerable, nth, fun) when is_integer(nth) and nth > 1 do + {res, _} = reduce(enumerable, {[], :first}, R.map_every(nth, fun)) + :lists.reverse(res) end @doc """ - Maps and joins the given `collection` in one pass. - `joiner` can be either a binary or a list and the - result will be of the same type as `joiner`. If - `joiner` is not passed at all, it defaults to an - empty binary. + Maps and joins the given enumerable in one pass. + + `joiner` can be either a binary or a list and the result will be of + the same type as `joiner`. + If `joiner` is not passed at all, it defaults to an empty binary. - All items in the collection must be convertible - to a binary, otherwise an error is raised. + All items returned from invoking the `mapper` must be convertible to + a binary, otherwise an error is raised. ## Examples @@ -990,14 +1318,13 @@ defmodule Enum do "2 = 4 = 6" """ - @spec map_join(t, (element -> any)) :: String.t - @spec map_join(t, String.t, (element -> any)) :: String.t - def map_join(collection, joiner \\ "", mapper) + @spec map_join(t, String.t, (element -> String.Chars.t)) :: String.t + def map_join(enumerable, joiner \\ "", mapper) - def map_join(collection, joiner, mapper) when is_binary(joiner) do - reduced = reduce(collection, :first, fn - entry, :first -> enum_to_string(mapper.(entry)) - entry, acc -> [acc, joiner|enum_to_string(mapper.(entry))] + def map_join(enumerable, joiner, mapper) when is_binary(joiner) do + reduced = reduce(enumerable, :first, fn + entry, :first -> entry_to_string(mapper.(entry)) + entry, acc -> [acc, joiner | entry_to_string(mapper.(entry))] end) if reduced == :first do @@ -1008,13 +1335,17 @@ defmodule Enum do end @doc """ - Invokes the given `fun` for each item in the `collection` - while also keeping an accumulator. Returns a tuple where - the first element is the mapped collection and the second - one is the final accumulator. + Invokes the given function to each item in the enumerable to reduce + it to a single element, while keeping an accumulator. - For dicts, the first tuple element must be a `{key, value}` - tuple. + Returns a tuple where the first element is the mapped enumerable and + the second one is the final accumulator. + + The function, `fun`, receives two arguments: the first one is the + element, and the second one is the accumulator. `fun` must return + a tuple with two elements in the form of `{result, accumulator}`. + + For maps, the first tuple element must be a `{key, value}` tuple. ## Examples @@ -1022,246 +1353,390 @@ defmodule Enum do {[2, 4, 6], 6} """ - @spec map_reduce(t, any, (element, any -> any)) :: any - def map_reduce(collection, acc, fun) when is_list(collection) do - :lists.mapfoldl(fun, acc, collection) + @spec map_reduce(t, any, (element, any -> {any, any})) :: {any, any} + def map_reduce(enumerable, acc, fun) when is_list(enumerable) do + :lists.mapfoldl(fun, acc, enumerable) end - def map_reduce(collection, acc, fun) do - {list, acc} = reduce(collection, {[], acc}, fn(entry, {list, acc}) -> - {new_entry, acc} = fun.(entry, acc) - {[new_entry|list], acc} + def map_reduce(enumerable, acc, fun) do + {list, acc} = reduce(enumerable, {[], acc}, + fn(entry, {list, acc}) -> + {new_entry, acc} = fun.(entry, acc) + {[new_entry | list], acc} end) {:lists.reverse(list), acc} end @doc """ - Returns the maximum value. - Raises `EmptyError` if the collection is empty. + Returns the maximal element in the enumerable according + to Erlang's term ordering. + + If multiple elements are considered maximal, the first one that was found + is returned. + + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. ## Examples iex> Enum.max([1, 2, 3]) 3 + iex> Enum.max([], fn -> 0 end) + 0 + """ - @spec max(t) :: element | no_return - def max(collection) do - reduce(collection, &Kernel.max(&1, &2)) + @spec max(t, (() -> empty_result)) :: element | empty_result | no_return when empty_result: any + def max(enumerable, empty_fallback \\ fn -> raise Enum.EmptyError end) + + def max(enumerable, empty_fallback) do + aggregate(enumerable, &(&1), &Kernel.max/2, empty_fallback) end @doc """ - Returns the maximum value as calculated by the given function. - Raises `EmptyError` if the collection is empty. + Returns the maximal element in the enumerable as calculated + by the given function. + + If multiple elements are considered maximal, the first one that was found + is returned. + + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. ## Examples iex> Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) "aaa" - """ - @spec max_by(t, (element -> any)) :: element | no_return - def max_by([h|t], fun) do - reduce(t, {h, fun.(h)}, fn(entry, {_, fun_max} = old) -> - fun_entry = fun.(entry) - if(fun_entry > fun_max, do: {entry, fun_entry}, else: old) - end) |> elem(0) - end + iex> Enum.max_by(["a", "aa", "aaa", "b", "bbb"], &String.length/1) + "aaa" - def max_by([], _fun) do - raise Enum.EmptyError - end + iex> Enum.max_by([], &String.length/1, fn -> nil end) + nil - def max_by(collection, fun) do - result = - reduce(collection, :first, fn - entry, {_, fun_max} = old -> - fun_entry = fun.(entry) - if(fun_entry > fun_max, do: {entry, fun_entry}, else: old) - entry, :first -> - {entry, fun.(entry)} - end) + """ + @spec max_by(t, (element -> any), (() -> empty_result)) :: element | empty_result | no_return when empty_result: any + def max_by(enumerable, fun, empty_fallback \\ fn -> raise Enum.EmptyError end) - case result do - :first -> raise Enum.EmptyError - {entry, _} -> entry - end + def max_by(enumerable, fun, empty_fallback) do + aggregate_by(enumerable, &{&1, fun.(&1)}, fn entry, {_, fun_max} = old -> + fun_entry = fun.(entry) + if(fun_entry > fun_max, do: {entry, fun_entry}, else: old) + end, empty_fallback) end @doc """ - Checks if `value` exists within the `collection`. + Checks if `element` exists within the enumerable. - Membership is tested with the match (`===`) operator, although - enumerables like ranges may include floats inside the given - range. + Membership is tested with the match (`===`) operator. ## Examples iex> Enum.member?(1..10, 5) true + iex> Enum.member?(1..10, 5.0) + false + + iex> Enum.member?([1.0, 2.0, 3.0], 2) + false + iex> Enum.member?([1.0, 2.0, 3.0], 2.000) + true iex> Enum.member?([:a, :b, :c], :d) false """ @spec member?(t, element) :: boolean - def member?(collection, value) when is_list(collection) do - :lists.member(value, collection) + def member?(enumerable, element) when is_list(enumerable) do + :lists.member(element, enumerable) end - def member?(collection, value) do - case Enumerable.member?(collection, value) do - {:ok, value} when is_boolean(value) -> - value + def member?(enumerable, element) do + case Enumerable.member?(enumerable, element) do + {:ok, element} when is_boolean(element) -> + element {:error, module} -> - module.reduce(collection, {:cont, false}, fn - v, _ when v === value -> {:halt, true} - _, _ -> {:cont, false} + module.reduce(enumerable, {:cont, false}, fn + v, _ when v === element -> {:halt, true} + _, _ -> {:cont, false} end) |> elem(1) end end @doc """ - Returns the minimum value. - Raises `EmptyError` if the collection is empty. + Returns the minimal element in the enumerable according + to Erlang's term ordering. + + If multiple elements are considered minimal, the first one that was found + is returned. + + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. ## Examples iex> Enum.min([1, 2, 3]) 1 + iex> Enum.min([], fn -> 0 end) + 0 + """ - @spec min(t) :: element | no_return - def min(collection) do - reduce(collection, &Kernel.min(&1, &2)) + @spec min(t, (() -> empty_result)) :: element | empty_result | no_return when empty_result: any + def min(enumerable, empty_fallback \\ fn -> raise Enum.EmptyError end) + + def min(enumerable, empty_fallback) do + aggregate(enumerable, &(&1), &Kernel.min/2, empty_fallback) end @doc """ - Returns the minimum value as calculated by the given function. - Raises `EmptyError` if the collection is empty. + Returns the minimal element in the enumerable as calculated + by the given function. + + If multiple elements are considered minimal, the first one that was found + is returned. + + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. ## Examples iex> Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) "a" + iex> Enum.min_by(["a", "aa", "aaa", "b", "bbb"], &String.length/1) + "a" + + iex> Enum.min_by([], &String.length/1, fn -> nil end) + nil + """ - @spec min_by(t, (element -> any)) :: element | no_return - def min_by([h|t], fun) do - reduce(t, {h, fun.(h)}, fn(entry, {_, fun_min} = old) -> + @spec min_by(t, (element -> any), (() -> empty_result)) :: element | empty_result | no_return when empty_result: any + def min_by(enumerable, fun, empty_fallback \\ fn -> raise Enum.EmptyError end) + + def min_by(enumerable, fun, empty_fallback) do + aggregate_by(enumerable, &{&1, fun.(&1)}, fn entry, {_, fun_min} = old -> fun_entry = fun.(entry) if(fun_entry < fun_min, do: {entry, fun_entry}, else: old) - end) |> elem(0) + end, empty_fallback) end - def min_by([], _fun) do - raise Enum.EmptyError - end + @doc """ + Returns a tuple with the minimal and the maximal elements in the + enumerable according to Erlang's term ordering. - def min_by(collection, fun) do - result = - reduce(collection, :first, fn - entry, {_, fun_min} = old -> - fun_entry = fun.(entry) - if(fun_entry < fun_min, do: {entry, fun_entry}, else: old) - entry, :first -> - {entry, fun.(entry)} - end) + If multiple elements are considered maximal or minimal, the first one + that was found is returned. - case result do - :first -> raise Enum.EmptyError - {entry, _} -> entry - end + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. + + ## Examples + + iex> Enum.min_max([2, 3, 1]) + {1, 3} + + iex> Enum.min_max([], fn -> {nil, nil} end) + {nil, nil} + + """ + @spec min_max(t, (() -> empty_result)) :: {element, element} | empty_result | no_return when empty_result: any + def min_max(enumerable, empty_fallback \\ fn -> raise Enum.EmptyError end) + + def min_max(enumerable, empty_fallback) do + aggregate(enumerable, &{&1, &1}, fn entry, {min_value, max_value} -> + {Kernel.min(entry, min_value), Kernel.max(entry, max_value)} + end, empty_fallback) end @doc """ - Returns the sum of all values. + Returns a tuple with the minimal and the maximal elements in the + enumerable as calculated by the given function. - Raises `ArithmeticError` if collection contains a non-numeric value. + If multiple elements are considered maximal or minimal, the first one + that was found is returned. + + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. ## Examples - iex> Enum.sum([1, 2, 3]) - 6 + iex> Enum.min_max_by(["aaa", "bb", "c"], fn(x) -> String.length(x) end) + {"c", "aaa"} + + iex> Enum.min_max_by(["aaa", "a", "bb", "c", "ccc"], &String.length/1) + {"a", "aaa"} + + iex> Enum.min_max_by([], &String.lenth/1, fn -> {nil, nil} end) + {nil, nil} + + """ + @spec min_max_by(t, (element -> any), (() -> empty_result)) :: {element, element} | empty_result | no_return when empty_result: any + def min_max_by(enumerable, fun, empty_fallback \\ fn -> raise Enum.EmptyError end) + + def min_max_by(enumerable, fun, empty_fallback) do + aggregate_by(enumerable, + fn entry -> + fun_entry = fun.(entry) + {{entry, entry}, {fun_entry, fun_entry}} + end, + fn entry, {{prev_min, prev_max}, {fun_min, fun_max}} = acc -> + fun_entry = fun.(entry) + cond do + fun_entry < fun_min -> + {{entry, prev_max}, {fun_entry, fun_max}} + fun_entry > fun_max -> + {{prev_min, entry}, {fun_min, fun_entry}} + true -> + acc + end + end, + empty_fallback) + end - """ - @spec sum(t) :: number - def sum(collection) do - reduce(collection, 0, &+/2) + defp aggregate([head | tail], first, fun, _empty) do + :lists.foldl(fun, first.(head), tail) + end + defp aggregate(enumerable, first, fun, empty) do + ref = make_ref() + reduce(enumerable, ref, fn + element, ^ref -> first.(element) + element, acc -> fun.(element, acc) + end) |> apply_if_ref_or_return(ref, empty) + end + + defp apply_if_ref_or_return(ref, ref, fun), do: fun.() + defp apply_if_ref_or_return(val, _, _fun), do: val + + defp aggregate_by([head | tail], first, fun, _empty) do + :lists.foldl(fun, first.(head), tail) |> elem(0) + end + defp aggregate_by(enumerable, first, fun, empty) do + reduce(enumerable, :empty, fn + element, :empty -> first.(element) + element, acc -> fun.(element, acc) + end) |> apply_if_empty_or_zeroth(empty) end + defp apply_if_empty_or_zeroth(:empty, fun), do: fun.() + defp apply_if_empty_or_zeroth(tuple, _fun) when is_tuple(tuple), do: elem(tuple, 0) + @doc """ - Partitions `collection` into two collections, where the first one contains elements - for which `fun` returns a truthy value, and the second one -- for which `fun` - returns `false` or `nil`. + Splits the `enumerable` in two lists according to the given function `fun`. + + Splits the given `enumerable` in two lists by calling `fun` with each element + in the `enumerable` as its only argument. Returns a tuple with the first list + containing all the elements in `enumerable` for which applying `fun` returned + a truthy value, and a second list with all the elements for which applying + `fun` returned a falsey value (`false` or `nil`). + + The elements in both the returned lists are in the same relative order as they + were in the original enumerable (if such enumerable was ordered, e.g., a + list); see the examples below. ## Examples - iex> Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) - {[2], [1,3]} + iex> Enum.split_with([5, 4, 3, 2, 1, 0], fn(x) -> rem(x, 2) == 0 end) + {[4, 2, 0], [5, 3, 1]} + + iex> Enum.split_with(%{a: 1, b: -2, c: 1, d: -3}, fn({_k, v}) -> v < 0 end) + {[b: -2, d: -3], [a: 1, c: 1]} + + iex> Enum.split_with(%{a: 1, b: -2, c: 1, d: -3}, fn({_k, v}) -> v > 50 end) + {[], [a: 1, b: -2, c: 1, d: -3]} + + iex> Enum.split_with(%{}, fn({_k, v}) -> v > 50 end) + {[], []} """ - @spec partition(t, (element -> any)) :: {list, list} - def partition(collection, fun) do + @spec split_with(t, (element -> any)) :: {list, list} + def split_with(enumerable, fun) do {acc1, acc2} = - reduce(collection, {[], []}, fn(entry, {acc1, acc2}) -> + reduce(enumerable, {[], []}, fn(entry, {acc1, acc2}) -> if fun.(entry) do - {[entry|acc1], acc2} + {[entry | acc1], acc2} else - {acc1, [entry|acc2]} + {acc1, [entry | acc2]} end end) - {:lists.reverse(acc1), :lists.reverse(acc2)} end - @doc """ - Splits `collection` into groups based on `fun`. + @doc false + # TODO: Deprecate by v1.6 (hard-deprecation) + @spec partition(t, (element -> any)) :: {list, list} + def partition(enumerable, fun) do + split_with(enumerable, fun) + end - The result is a dict (by default a map) where each key is - a group and each value is a list of elements from `collection` - for which `fun` returned that group. Ordering is not necessarily - preserved. + @doc """ + Returns a random element of an enumerable. - ## Examples + Raises `Enum.EmptyError` if `enumerable` is empty. - iex> Enum.group_by(~w{ant buffalo cat dingo}, &String.length/1) - %{3 => ["cat", "ant"], 7 => ["buffalo"], 5 => ["dingo"]} + This function uses Erlang's [`:rand` module](http://www.erlang.org/doc/man/rand.html) to calculate + the random value. Check its documentation for setting a + different random algorithm or a different seed. - """ - @spec group_by(t, dict, (element -> any)) :: dict when dict: Dict.t - def group_by(collection, dict \\ %{}, fun) do - reduce(collection, dict, fn(entry, categories) -> - Dict.update(categories, fun.(entry), [entry], &[entry|&1]) - end) - end + The implementation is based on the + [reservoir sampling](https://en.wikipedia.org/wiki/Reservoir_sampling#Relation_to_Fisher-Yates_shuffle) + algorithm. + It assumes that the sample being returned can fit into memory; + the input `enumerable` doesn't have to, as it is traversed just once. - @doc """ - Invokes `fun` for each element in the collection passing that element and the - accumulator `acc` as arguments. `fun`'s return value is stored in `acc`. - Returns the accumulator. + If a range is passed into the function, this function will pick a + random value between the range limits, without traversing the whole + range (thus executing in constant time and constant memory). ## Examples - iex> Enum.reduce([1, 2, 3], 0, fn(x, acc) -> x + acc end) - 6 - - """ - @spec reduce(t, any, (element, any -> any)) :: any - def reduce(collection, acc, fun) when is_list(collection) do - :lists.foldl(fun, acc, collection) - end - - def reduce(collection, acc, fun) do - Enumerable.reduce(collection, {:cont, acc}, - fn x, acc -> {:cont, fun.(x, acc)} end) |> elem(1) + # Although not necessary, let's seed the random algorithm + iex> :rand.seed(:exsplus, {101, 102, 103}) + iex> Enum.random([1, 2, 3]) + 2 + iex> Enum.random([1, 2, 3]) + 1 + iex> Enum.random(1..1_000) + 776 + + """ + @spec random(t) :: element | no_return + def random(enumerable) + + def random(first..last), + do: random_integer(first, last) + + def random(enumerable) do + case Enumerable.count(enumerable) do + {:ok, 0} -> + raise Enum.EmptyError + {:ok, count} -> + at(enumerable, random_integer(0, count - 1)) + {:error, _} -> + case take_random(enumerable, 1) do + [] -> raise Enum.EmptyError + [elem] -> elem + end + end end @doc """ - Invokes `fun` for each element in the collection passing that element and the - accumulator `acc` as arguments. `fun`'s return value is stored in `acc`. - The first element of the collection is used as the initial value of `acc`. - Returns the accumulator. + Invokes `fun` for each element in the `enumerable` with the + accumulator. + + The first element of the enumerable is used as the initial value + of the accumulator. Then the function is invoked with the next + element and the accumulator. The result returned by the function + is used as the accumulator for the next iteration, recursively. + When the enumerable is done, the last accumulator is returned. + + Since the first element of the enumerable is used as the initial + value of the accumulator, `fun` will only be executed `n - 1` times + where `n` is the length of the enumerable. This function won't call + the specified function for enumerables that are one-element long. + + If you wish to use another value for the accumulator, use + `Enumerable.reduce/3`. ## Examples @@ -1270,7 +1745,9 @@ defmodule Enum do """ @spec reduce(t, (element, any -> any)) :: any - def reduce([h|t], fun) do + def reduce(enumerable, fun) + + def reduce([h | t], fun) do reduce(t, h, fun) end @@ -1278,9 +1755,9 @@ defmodule Enum do raise Enum.EmptyError end - def reduce(collection, fun) do + def reduce(enumerable, fun) do result = - Enumerable.reduce(collection, {:cont, :first}, fn + Enumerable.reduce(enumerable, {:cont, :first}, fn x, :first -> {:cont, {:acc, x}} x, {:acc, acc} -> @@ -1288,13 +1765,119 @@ defmodule Enum do end) |> elem(1) case result do - :first -> raise Enum.EmptyError + :first -> raise Enum.EmptyError {:acc, acc} -> acc end end @doc """ - Returns elements of collection for which `fun` returns `false`. + Invokes `fun` for each element in the `enumerable` with the accumulator. + + The initial value of the accumulator is `acc`. The function is invoked for + each element in the enumerable with the accumulator. The result returned + by the function is used as the accumulator for the next iteration. + The function returns the last accumulator. + + ## Examples + + iex> Enum.reduce([1, 2, 3], 0, fn(x, acc) -> x + acc end) + 6 + + ## Reduce as a building block + + Reduce (sometimes called `fold`) is a basic building block in functional + programming. Almost all of the functions in the `Enum` module can be + implemented on top of reduce. Those functions often rely on other operations, + such as `Enum.reverse/1`, which are optimized by the runtime. + + For example, we could implement `map/2` in terms of `reduce/3` as follows: + + def my_map(enumerable, fun) do + enumerable + |> Enum.reduce(enumerable, [], fn(x, acc) -> [fun.(x) | acc] end) + |> Enum.reverse + end + + In the example above, `Enum.reduce/3` accumulates the result of each call + to `fun` into a list in reverse order, which is correctly ordered at the + end by calling `Enum.reverse/1`. + + Implementing functions like `map/2`, `filter/2` and others are a good + exercise for understanding the power behind `Enum.reduce/3`. When an + operation cannot be expressed by any of the functions in the `Enum` + module, developers will most likely resort to `reduce/3`. + """ + @spec reduce(t, any, (element, any -> any)) :: any + def reduce(enumerable, acc, fun) when is_list(enumerable) do + :lists.foldl(fun, acc, enumerable) + end + + def reduce(first..last, acc, fun) do + if first <= last do + reduce_range_inc(first, last, acc, fun) + else + reduce_range_dec(first, last, acc, fun) + end + end + + def reduce(%{__struct__: _} = enumerable, acc, fun) do + Enumerable.reduce(enumerable, {:cont, acc}, + fn x, acc -> {:cont, fun.(x, acc)} end) |> elem(1) + end + + def reduce(%{} = enumerable, acc, fun) do + :maps.fold(fn k, v, acc -> fun.({k, v}, acc) end, acc, enumerable) + end + + def reduce(enumerable, acc, fun) do + Enumerable.reduce(enumerable, {:cont, acc}, + fn x, acc -> {:cont, fun.(x, acc)} end) |> elem(1) + end + + defp reduce_range_inc(first, first, acc, fun) do + fun.(first, acc) + end + + defp reduce_range_inc(first, last, acc, fun) do + reduce_range_inc(first + 1, last, fun.(first, acc), fun) + end + + defp reduce_range_dec(first, first, acc, fun) do + fun.(first, acc) + end + + defp reduce_range_dec(first, last, acc, fun) do + reduce_range_dec(first - 1, last, fun.(first, acc), fun) + end + + @doc """ + Reduces the enumerable until `fun` returns `{:halt, term}`. + + The return value for `fun` is expected to be + + * `{:cont, acc}` to continue the reduction with `acc` as the new + accumulator or + * `{:halt, acc}` to halt the reduction and return `acc` as the return + value of this function + + ## Examples + + iex> Enum.reduce_while(1..100, 0, fn i, acc -> + ...> if i < 3, do: {:cont, acc + i}, else: {:halt, acc} + ...> end) + 3 + + """ + @spec reduce_while(t, any, (element, any -> {:cont, any} | {:halt, any})) :: any + def reduce_while(enumerable, acc, fun) do + Enumerable.reduce(enumerable, {:cont, acc}, fun) |> elem(1) + end + + @doc """ + Returns elements of `enumerable` for which the function `fun` returns + `false` or `nil`. + + See also `filter/2`. ## Examples @@ -1303,16 +1886,16 @@ defmodule Enum do """ @spec reject(t, (element -> as_boolean(term))) :: list - def reject(collection, fun) when is_list(collection) do - for item <- collection, !fun.(item), do: item + def reject(enumerable, fun) when is_list(enumerable) do + reject_list(enumerable, fun) end - def reject(collection, fun) do - Enumerable.reduce(collection, {:cont, []}, R.reject(fun)) |> elem(1) |> :lists.reverse + def reject(enumerable, fun) do + reduce(enumerable, [], R.reject(fun)) |> :lists.reverse end @doc """ - Reverses the collection. + Returns a list of elements in `enumerable` in reverse order. ## Examples @@ -1321,18 +1904,20 @@ defmodule Enum do """ @spec reverse(t) :: list - def reverse(collection) when is_list(collection) do - :lists.reverse(collection) - end + def reverse(enumerable) - def reverse(collection) do - reverse(collection, []) - end + def reverse([]), do: [] + def reverse([_] = list), do: list + def reverse([item1, item2]), do: [item2, item1] + def reverse([item1, item2 | rest]), do: :lists.reverse(rest, [item2, item1]) + def reverse(enumerable), do: reduce(enumerable, [], &[&1 | &2]) @doc """ - Reverses the collection and appends the tail. + Reverses the elements in `enumerable`, appends the tail, and returns + it as a list. + This is an optimization for - `Enum.concat(Enum.reverse(collection), tail)`. + `Enum.concat(Enum.reverse(enumerable), tail)`. ## Examples @@ -1341,182 +1926,240 @@ defmodule Enum do """ @spec reverse(t, t) :: list - def reverse(collection, tail) when is_list(collection) and is_list(tail) do - :lists.reverse(collection, tail) + def reverse(enumerable, tail) when is_list(enumerable) do + :lists.reverse(enumerable, to_list(tail)) end - def reverse(collection, tail) do - reduce(collection, to_list(tail), fn(entry, acc) -> - [entry|acc] + def reverse(enumerable, tail) do + reduce(enumerable, to_list(tail), fn(entry, acc) -> + [entry | acc] end) end @doc """ - Applies the given function to each element in the collection, + Reverses the enumerable in the range from initial position `start` + through `count` elements. + + If `count` is greater than the size of the rest of the enumerable, + then this function will reverse the rest of the enumerable. + + ## Examples + + iex> Enum.reverse_slice([1, 2, 3, 4, 5, 6], 2, 4) + [1, 2, 6, 5, 4, 3] + + """ + @spec reverse_slice(t, non_neg_integer, non_neg_integer) :: list + def reverse_slice(enumerable, start, count) + when is_integer(start) and start >= 0 and is_integer(count) and count >= 0 do + list = reverse(enumerable) + length = length(list) + count = Kernel.min(count, length - start) + + if count > 0 do + reverse_slice(list, length, start + count, count, []) + else + :lists.reverse(list) + end + end + + @doc """ + Applies the given function to each element in the enumerable, storing the result in a list and passing it as the accumulator - for the next computation. + for the next computation. Uses the first element in the enumerable + as the starting value. ## Examples iex> Enum.scan(1..5, &(&1 + &2)) - [1,3,6,10,15] + [1, 3, 6, 10, 15] """ @spec scan(t, (element, any -> any)) :: list - def scan(enum, fun) do - {_, {res, _}} = - Enumerable.reduce(enum, {:cont, {[], :first}}, R.scan_2(fun)) + def scan(enumerable, fun) do + {res, _} = reduce(enumerable, {[], :first}, R.scan2(fun)) :lists.reverse(res) end @doc """ - Applies the given function to each element in the collection, + Applies the given function to each element in the enumerable, storing the result in a list and passing it as the accumulator for the next computation. Uses the given `acc` as the starting value. ## Examples iex> Enum.scan(1..5, 0, &(&1 + &2)) - [1,3,6,10,15] + [1, 3, 6, 10, 15] """ @spec scan(t, any, (element, any -> any)) :: list - def scan(enum, acc, fun) do - {_, {res, _}} = - Enumerable.reduce(enum, {:cont, {[], acc}}, R.scan_3(fun)) + def scan(enumerable, acc, fun) do + {res, _} = reduce(enumerable, {[], acc}, R.scan3(fun)) :lists.reverse(res) end @doc """ - Returns a list of collection elements shuffled. - - Notice that you need to explicitly call `:random.seed/1` and - set a seed value for the random algorithm. Otherwise, the - default seed will be set which will always return the same - result. For example, one could do the following to set a seed - dynamically: + Returns a list with the elements of `enumerable` shuffled. - :random.seed(:erlang.now) + This function uses Erlang's [`:rand` module](http://www.erlang.org/doc/man/rand.html) to calculate + the random value. Check its documentation for setting a + different random algorithm or a different seed. ## Examples + # Although not necessary, let's seed the random algorithm + iex> :rand.seed(:exsplus, {1, 2, 3}) iex> Enum.shuffle([1, 2, 3]) - [3, 2, 1] + [2, 1, 3] iex> Enum.shuffle([1, 2, 3]) - [3, 1, 2] + [2, 3, 1] """ @spec shuffle(t) :: list - def shuffle(collection) do - randomized = reduce(collection, [], fn x, acc -> - [{:random.uniform, x}|acc] + def shuffle(enumerable) do + randomized = reduce(enumerable, [], fn x, acc -> + [{:rand.uniform, x} | acc] end) unwrap(:lists.keysort(1, randomized), []) end @doc """ - Returns a subset list of the given collection. Drops elements - until element position `start`, then takes `count` elements. - - If the count is greater than collection length, it returns as - much as possible. If zero, then it returns `[]`. + Returns a subset list of the given enumerable, from `range.first` to `range.last` positions. + + Given `enumerable`, it drops elements until element position `range.first`, + then takes elements until element position `range.last` (inclusive). + + Positions are normalized, meaning that negative positions will be counted from the end + (e.g. `-1` means the last element of the enumerable). + If `range.last` is out of bounds, then it is assigned as the position of the last element. + + If the normalized `range.first` position is out of bounds of the given enumerable, + or this one is greater than the normalized `range.last` position, then `[]` is returned. ## Examples - iex> Enum.slice(1..100, 5, 10) - [6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + iex> Enum.slice(1..100, 5..10) + [6, 7, 8, 9, 10, 11] - iex> Enum.slice(1..10, 5, 100) + iex> Enum.slice(1..10, 5..20) [6, 7, 8, 9, 10] - iex> Enum.slice(1..10, 5, 0) + # last five elements (negative positions) + iex> Enum.slice(1..30, -5..-1) + [26, 27, 28, 29, 30] + + # last five elements (mixed positive and negative positions) + iex> Enum.slice(1..30, 25..-1) + [26, 27, 28, 29, 30] + + # out of bounds + iex> Enum.slice(1..10, 11..20) + [] + + # range.first is greater than range.last + iex> Enum.slice(1..10, 6..5) [] """ - @spec slice(t, integer, non_neg_integer) :: list - - def slice(_coll, _start, 0), do: [] + @spec slice(t, Range.t) :: list + def slice(enumerable, range) - def slice(coll, start, count) when start < 0 do - {list, new_start} = enumerate_and_count(coll, start) - if new_start >= 0 do - slice(list, new_start, count) + def slice(enumerable, first..last) do + {enumerable, count} = enumerable_and_count(enumerable, 0) + corr_first = if first >= 0, do: first, else: first + count + corr_last = if last >= 0, do: last, else: last + count + amount = corr_last - corr_first + 1 + if corr_first >= 0 and amount > 0 do + slice(enumerable, corr_first, amount) else [] end end - def slice(coll, start, count) when is_list(coll) and start >= 0 and count > 0 do - do_slice(coll, start, count) - end - - def slice(coll, start, count) when start >= 0 and count > 0 do - {_, _, list} = Enumerable.reduce(coll, {:cont, {start, count, []}}, fn - _entry, {start, count, _list} when start > 0 -> - {:cont, {start-1, count, []}} - entry, {start, count, list} when count > 1 -> - {:cont, {start, count-1, [entry|list]}} - entry, {start, count, list} -> - {:halt, {start, count, [entry|list]}} - end) |> elem(1) - - :lists.reverse(list) - end - @doc """ - Returns a subset list of the given collection. Drops elements - until element position `range.first`, then takes elements until element - position `range.last` (inclusive). + Returns a subset list of the given enumerable, from `start` position with `amount` of elements if available. - Positions are calculated by adding the number of items in the collection to - negative positions (so position -3 in a collection with count 5 becomes - position 2). + Given `enumerable`, it drops elements until element position `start`, + then takes `amount` of elements until the end of the enumerable. - The first position (after adding count to negative positions) must be smaller - or equal to the last position. + If `start` is out of bounds, it returns `[]`. - If the start of the range is not a valid offset for the given - collection or if the range is in reverse order, returns `[]`. + If `amount` is greater than `enumerable` length, it returns as many elements as possible. + If `amount` is zero, then `[]` is returned. ## Examples - iex> Enum.slice(1..100, 5..10) - [6, 7, 8, 9, 10, 11] + iex> Enum.slice(1..100, 5, 10) + [6, 7, 8, 9, 10, 11, 12, 13, 14, 15] - iex> Enum.slice(1..10, 5..20) + # amount to take is greater than the number of elements + iex> Enum.slice(1..10, 5, 100) [6, 7, 8, 9, 10] - iex> Enum.slice(1..10, 11..20) + iex> Enum.slice(1..10, 5, 0) [] - iex> Enum.slice(1..10, 6..5) + # out of bound start position + iex> Enum.slice(1..10, 10, 5) + [] + + # out of bound start position (negative) + iex> Enum.slice(1..10, -11, 5) [] """ - @spec slice(t, Range.t) :: list - def slice(coll, first..last) when first >= 0 and last >= 0 do - # Simple case, which works on infinite collections - if last - first >= 0 do - slice(coll, first, last - first + 1) + @spec slice(t, index, non_neg_integer) :: list + def slice(_enumerable, start, 0) when is_integer(start), do: [] + + def slice(enumerable, start, amount) + when is_integer(start) and start < 0 and is_integer(amount) and amount >= 0 do + {enumerable, new_start} = enumerable_and_count(enumerable, start) + if new_start >= 0 do + slice(enumerable, new_start, amount) else [] end end - def slice(coll, first..last) do - {list, count} = enumerate_and_count(coll, 0) - corr_first = if first >= 0, do: first, else: first + count - corr_last = if last >= 0, do: last, else: last + count - length = corr_last - corr_first + 1 - if corr_first >= 0 and length > 0 do - slice(list, corr_first, length) - else - [] + def slice(first..last, start, amount) + when is_integer(start) and start >= 0 and is_integer(amount) and amount > 0 do + case fetch_range(first, last, start) do + {:ok, sliced_first} -> + finish = start + amount - 1 + case fetch_range(first, last, finish) do + {:ok, sliced_last} -> + reverse(sliced_last..sliced_first) + :error -> + reverse(last..sliced_first) + end + :error -> + [] end end + def slice(enumerable, start, amount) + when is_list(enumerable) and + is_integer(start) and start >= 0 and is_integer(amount) and amount > 0 do + slice_list(enumerable, start, amount) + end + + def slice(enumerable, start, amount) + when is_integer(start) and start >= 0 and is_integer(amount) and amount > 0 do + Enumerable.reduce(enumerable, {:cont, {start, amount, []}}, fn + _entry, {start, amount, _list} when start > 0 -> + {:cont, {start - 1, amount, []}} + entry, {start, amount, list} when amount > 1 -> + {:cont, {start, amount - 1, [entry | list]}} + entry, {start, amount, list} -> + {:halt, {start, amount, [entry | list]}} + end) + |> elem(1) + |> elem(2) + |> :lists.reverse() + end + @doc """ - Sorts the collection according to Elixir's term ordering. + Sorts the enumerable according to Erlang's term ordering. Uses the merge sort algorithm. @@ -1527,99 +2170,149 @@ defmodule Enum do """ @spec sort(t) :: list - def sort(collection) when is_list(collection) do - :lists.sort(collection) + def sort(enumerable) when is_list(enumerable) do + :lists.sort(enumerable) end - def sort(collection) do - sort(collection, &(&1 <= &2)) + def sort(enumerable) do + sort(enumerable, &(&1 <= &2)) end @doc """ - Sorts the collection by the given function. + Sorts the enumerable by the given function. - This function uses the merge sort algorithm. The given function - must return false if the first argument is less than right one. + This function uses the merge sort algorithm. The given function should compare + two arguments, and return `true` if the first argument precedes the second one. ## Examples - iex> Enum.sort([1, 2, 3], &(&1 > &2)) + iex> Enum.sort([1, 2, 3], &(&1 >= &2)) [3, 2, 1] The sorting algorithm will be stable as long as the given function - returns true for values considered equal: + returns `true` for values considered equal: iex> Enum.sort ["some", "kind", "of", "monster"], &(byte_size(&1) <= byte_size(&2)) ["of", "some", "kind", "monster"] - If the function does not return true, the sorting is not stable and - the order of equal terms may be shuffled: + If the function does not return `true` for equal values, the sorting + is not stable and the order of equal terms may be shuffled. + For example: iex> Enum.sort ["some", "kind", "of", "monster"], &(byte_size(&1) < byte_size(&2)) ["of", "kind", "some", "monster"] """ @spec sort(t, (element, element -> boolean)) :: list - def sort(collection, fun) when is_list(collection) do - :lists.sort(fun, collection) + def sort(enumerable, fun) when is_list(enumerable) do + :lists.sort(fun, enumerable) end - def sort(collection, fun) do - reduce(collection, [], &sort_reducer(&1, &2, fun)) |> sort_terminator(fun) + def sort(enumerable, fun) do + reduce(enumerable, [], &sort_reducer(&1, &2, fun)) + |> sort_terminator(fun) + end + + @doc """ + Sorts the mapped results of the enumerable according to the provided `sorter` + function. + + This function maps each element of the enumerable using the provided `mapper` + function. The enumerable is then sorted by the mapped elements + using the `sorter` function, which defaults to `Kernel.<=/2` + + `sort_by/3` differs from `sort/2` in that it only calculates the + comparison value for each element in the enumerable once instead of + once for each element in each comparison. + If the same function is being called on both element, it's also more + compact to use `sort_by/3`. + + This technique is also known as a + _[Schwartzian Transform](https://en.wikipedia.org/wiki/Schwartzian_transform)_, + or the _Lisp decorate-sort-undecorate idiom_ as the `mapper` + is decorating the original `enumerable`; then `sorter` is sorting the + decorations; and finally the enumerable is being undecorated so only + the original elements remain, but now in sorted order. + + ## Examples + + Using the default `sorter` of `<=/2`: + + iex> Enum.sort_by ["some", "kind", "of", "monster"], &byte_size/1 + ["of", "some", "kind", "monster"] + + Using a custom `sorter` to override the order: + + iex> Enum.sort_by ["some", "kind", "of", "monster"], &byte_size/1, &>=/2 + ["monster", "some", "kind", "of"] + + """ + @spec sort_by(t, (element -> mapped_element), + (mapped_element, mapped_element -> boolean)) + :: list when mapped_element: element + + def sort_by(enumerable, mapper, sorter \\ &<=/2) do + enumerable + |> map(&{&1, mapper.(&1)}) + |> sort(&sorter.(elem(&1, 1), elem(&2, 1))) + |> map(&elem(&1, 0)) end @doc """ - Splits the enumerable into two collections, leaving `count` - elements in the first one. If `count` is a negative number, - it starts counting from the back to the beginning of the - collection. + Splits the `enumerable` into two enumerables, leaving `count` + elements in the first one. + + If `count` is a negative number, it starts counting from the + back to the beginning of the enumerable. - Be aware that a negative `count` implies the collection + Be aware that a negative `count` implies the `enumerable` will be enumerated twice: once to calculate the position, and a second time to do the actual splitting. ## Examples iex> Enum.split([1, 2, 3], 2) - {[1,2], [3]} + {[1, 2], [3]} iex> Enum.split([1, 2, 3], 10) - {[1,2,3], []} + {[1, 2, 3], []} iex> Enum.split([1, 2, 3], 0) - {[], [1,2,3]} + {[], [1, 2, 3]} iex> Enum.split([1, 2, 3], -1) - {[1,2], [3]} + {[1, 2], [3]} iex> Enum.split([1, 2, 3], -5) - {[], [1,2,3]} + {[], [1, 2, 3]} """ @spec split(t, integer) :: {list, list} - def split(collection, count) when is_list(collection) and count >= 0 do - do_split(collection, count, []) + def split(enumerable, count) when is_list(enumerable) and count >= 0 do + split_list(enumerable, count, []) end - def split(collection, count) when count >= 0 do + def split(enumerable, count) when count >= 0 do {_, list1, list2} = - reduce(collection, {count, [], []}, fn(entry, {counter, acc1, acc2}) -> - if counter > 0 do - {counter - 1, [entry|acc1], acc2} - else - {counter, acc1, [entry|acc2]} - end + reduce(enumerable, {count, [], []}, + fn(entry, {counter, acc1, acc2}) -> + if counter > 0 do + {counter - 1, [entry | acc1], acc2} + else + {counter, acc1, [entry | acc2]} + end end) {:lists.reverse(list1), :lists.reverse(list2)} end - def split(collection, count) when count < 0 do - do_split_reverse(reverse(collection), abs(count), []) + def split(enumerable, count) when count < 0 do + split_reverse_list(reverse(enumerable), - count, []) end @doc """ - Splits `collection` in two while `fun` returns `true`. + Splits enumerable in two at the position of the element for which + `fun` returns `false` for the first time. ## Examples @@ -1628,37 +2321,66 @@ defmodule Enum do """ @spec split_while(t, (element -> as_boolean(term))) :: {list, list} - def split_while(collection, fun) when is_list(collection) do - do_split_while(collection, fun, []) + def split_while(enumerable, fun) when is_list(enumerable) do + split_while_list(enumerable, fun, []) end - def split_while(collection, fun) do + def split_while(enumerable, fun) do {list1, list2} = - reduce(collection, {[], []}, fn + reduce(enumerable, {[], []}, fn entry, {acc1, []} -> - if(fun.(entry), do: {[entry|acc1], []}, else: {acc1, [entry]}) + if(fun.(entry), do: {[entry | acc1], []}, else: {acc1, [entry]}) entry, {acc1, acc2} -> - {acc1, [entry|acc2]} + {acc1, [entry | acc2]} end) {:lists.reverse(list1), :lists.reverse(list2)} end @doc """ - Takes the first `count` items from the collection. + Returns the sum of all elements. + + Raises `ArithmeticError` if `enumerable` contains a non-numeric value. + + ## Examples + + iex> Enum.sum([1, 2, 3]) + 6 - If a negative `count` is given, the last `count` values will - be taken. For such, the collection is fully enumerated keeping up - to `2 * count` elements in memory. Once the end of the collection is + """ + @spec sum(t) :: number + def sum(enumerable) + + def sum(first..first), + do: first + + def sum(first..last) when last < first, + do: sum(last..first) + + def sum(first..last) when last > first do + div((last + first) * (last - first + 1), 2) + end + + def sum(enumerable) do + reduce(enumerable, 0, &+/2) + end + + @doc """ + Takes the first `count` items from the enumerable. + + `count` must be an integer. If a negative `count` is given, the last + `count` values will be taken. + For such, the enumerable is fully enumerated keeping up + to `2 * count` elements in memory. Once the end of the enumerable is reached, the last `count` elements are returned. ## Examples iex> Enum.take([1, 2, 3], 2) - [1,2] + [1, 2] iex> Enum.take([1, 2, 3], 10) - [1,2,3] + [1, 2, 3] iex> Enum.take([1, 2, 3], 0) [] @@ -1668,52 +2390,162 @@ defmodule Enum do """ @spec take(t, integer) :: list + def take(enumerable, count) - def take(_collection, 0) do - [] - end + def take(_enumerable, 0), do: [] + def take([], _count), do: [] - def take(collection, count) when is_list(collection) and count > 0 do - do_take(collection, count) + def take(enumerable, count) + when is_list(enumerable) and is_integer(count) and count > 0 do + take_list(enumerable, count) end - def take(collection, count) when count > 0 do + def take(enumerable, count) when is_integer(count) and count > 0 do {_, {res, _}} = - Enumerable.reduce(collection, {:cont, {[], count}}, fn(entry, {list, count}) -> - if count > 1 do - {:cont, {[entry|list], count - 1}} - else - {:halt, {[entry|list], count}} - end + Enumerable.reduce(enumerable, {:cont, {[], count}}, + fn(entry, {list, n}) -> + case n do + 0 -> {:halt, {list, n}} + 1 -> {:halt, {[entry | list], n - 1}} + _ -> {:cont, {[entry | list], n - 1}} + end end) :lists.reverse(res) end - def take(collection, count) when count < 0 do - Stream.take(collection, count).({:cont, []}, &{:cont, [&1|&2]}) - |> elem(1) |> :lists.reverse + def take(enumerable, count) when is_integer(count) and count < 0 do + count = -count + + {_count, buf1, buf2} = + reduce(enumerable, {0, [], []}, fn entry, {n, buf1, buf2} -> + buf1 = [entry | buf1] + n = n + 1 + if n == count do + {0, [], buf1} + else + {n, buf1, buf2} + end + end) + + take_last(buf1, buf2, count, []) end + defp take_last(_buf1, _buf2, 0, acc), + do: acc + defp take_last([], [], _, acc), + do: acc + defp take_last([], [head | tail], count, acc), + do: take_last([], tail, count - 1, [head | acc]) + defp take_last([head | tail], buf2, count, acc), + do: take_last(tail, buf2, count - 1, [head | acc]) + @doc """ - Returns a collection of every `nth` item in the collection, + Returns a list of every `nth` item in the enumerable, starting with the first element. + The first item is always included, unless `nth` is 0. + + The second argument specifying every `nth` item must be a non-negative + integer. + ## Examples iex> Enum.take_every(1..10, 2) [1, 3, 5, 7, 9] + iex> Enum.take_every(1..10, 0) + [] + + iex> Enum.take_every([1, 2, 3], 1) + [1, 2, 3] + """ - @spec take_every(t, integer) :: list - def take_every(_collection, 0), do: [] - def take_every(collection, nth) do - {_, {res, _}} = - Enumerable.reduce(collection, {:cont, {[], :first}}, R.take_every(nth)) + @spec take_every(t, non_neg_integer) :: list + def take_every(enumerable, nth) + + def take_every(enumerable, 1), do: to_list(enumerable) + def take_every(_enumerable, 0), do: [] + def take_every([], nth) when is_integer(nth) and nth > 1, do: [] + + def take_every(enumerable, nth) when is_integer(nth) and nth > 1 do + {res, _} = reduce(enumerable, {[], :first}, R.take_every(nth)) :lists.reverse(res) end @doc """ - Takes the items at the beginning of `collection` while `fun` returns `true`. + Takes `count` random items from `enumerable`. + + Notice this function will traverse the whole `enumerable` to + get the random sublist. + + See `random/1` for notes on implementation and random seed. + + ## Examples + + # Although not necessary, let's seed the random algorithm + iex> :rand.seed(:exsplus, {1, 2, 3}) + iex> Enum.take_random(1..10, 2) + [5, 4] + iex> Enum.take_random(?a..?z, 5) + 'ipybz' + + """ + @spec take_random(t, non_neg_integer) :: list + def take_random(enumerable, count) + + def take_random(_enumerable, 0), + do: [] + def take_random(first..first, count) when is_integer(count) and count >= 1, + do: [first] + + def take_random(enumerable, count) when is_integer(count) and count > 128 do + reducer = fn(elem, {idx, sample}) -> + jdx = random_integer(0, idx) + cond do + idx < count -> + value = Map.get(sample, jdx) + {idx + 1, Map.put(sample, idx, value) |> Map.put(jdx, elem)} + jdx < count -> + {idx + 1, Map.put(sample, jdx, elem)} + true -> + {idx + 1, sample} + end + end + + {size, sample} = reduce(enumerable, {0, %{}}, reducer) + take_random(sample, Kernel.min(count, size), []) + end + + def take_random(enumerable, count) when is_integer(count) and count > 0 do + sample = Tuple.duplicate(nil, count) + + reducer = fn(elem, {idx, sample}) -> + jdx = random_integer(0, idx) + cond do + idx < count -> + value = elem(sample, jdx) + {idx + 1, put_elem(sample, idx, value) |> put_elem(jdx, elem)} + jdx < count -> + {idx + 1, put_elem(sample, jdx, elem)} + true -> + {idx + 1, sample} + end + end + + {size, sample} = reduce(enumerable, {0, sample}, reducer) + sample |> Tuple.to_list |> take(Kernel.min(count, size)) + end + + defp take_random(_sample, 0, acc), do: acc + + defp take_random(sample, position, acc) do + position = position - 1 + take_random(sample, position, [Map.get(sample, position) | acc]) + end + + @doc """ + Takes the items from the beginning of the enumerable while `fun` returns + a truthy value. ## Examples @@ -1722,82 +2554,146 @@ defmodule Enum do """ @spec take_while(t, (element -> as_boolean(term))) :: list - def take_while(collection, fun) when is_list(collection) do - do_take_while(collection, fun) + def take_while(enumerable, fun) when is_list(enumerable) do + take_while_list(enumerable, fun) end - def take_while(collection, fun) do - Enumerable.reduce(collection, {:cont, []}, R.take_while(fun)) - |> elem(1) |> :lists.reverse + def take_while(enumerable, fun) do + {_, res} = + Enumerable.reduce(enumerable, {:cont, []}, fn(entry, acc) -> + if fun.(entry) do + {:cont, [entry | acc]} + else + {:halt, acc} + end + end) + + :lists.reverse(res) end @doc """ - Convert `collection` to a list. + Converts `enumerable` to a list. ## Examples - iex> Enum.to_list(1 .. 3) + iex> Enum.to_list(1..3) [1, 2, 3] """ - @spec to_list(t) :: [term] - def to_list(collection) when is_list(collection) do - collection + @spec to_list(t) :: [element] + def to_list(enumerable) when is_list(enumerable) do + enumerable end - def to_list(collection) do - reverse(collection) |> :lists.reverse + def to_list(enumerable) do + reverse(enumerable) |> :lists.reverse end + @doc """ + Enumerates the `enumerable`, removing all duplicated elements. + + ## Examples + + iex> Enum.uniq([1, 2, 3, 3, 2, 1]) + [1, 2, 3] + + """ + @spec uniq(t) :: list + def uniq(enumerable) do + uniq_by(enumerable, fn x -> x end) + end + + @doc false + # TODO: Remove on 2.0 + # (hard-deprecated in elixir_dispatch) + def uniq(enumerable, fun) do + uniq_by(enumerable, fun) + end @doc """ - Traverses the given enumerable keeping its shape. + Enumerates the `enumerable`, by removing the elements for which + function `fun` returned duplicate items. - It also expects the enumerable to implement the `Collectable` protocol. + The function `fun` maps every element to a term. Two elements are + considered duplicates if the return value of `fun` is equal for + both of them. - ## Examples + The first occurrence of each element is kept. - iex> Enum.traverse(%{a: 1, b: 2}, fn {k, v} -> {k, v * 2} end) - %{a: 2, b: 4} + ## Example + + iex> Enum.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) + [{1, :x}, {2, :y}] + + iex> Enum.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end) + [a: {:tea, 2}, c: {:coffee, 1}] """ - @spec traverse(Enumerable.t, (term -> term)) :: Collectable.t - def traverse(collection, transform) when is_list(collection) do - :lists.map(transform, collection) + @spec uniq_by(t, (element -> term)) :: list + + def uniq_by(enumerable, fun) when is_list(enumerable) do + uniq_list(enumerable, %{}, fun) end - def traverse(collection, transform) do - into(collection, Collectable.empty(collection), transform) + def uniq_by(enumerable, fun) do + {list, _} = reduce(enumerable, {[], %{}}, R.uniq_by(fun)) + :lists.reverse(list) end @doc """ - Enumerates the collection, removing all duplicated items. + Opposite of `Enum.zip/2`; extracts a two-element tuples from the + enumerable and groups them together. + + It takes an enumerable with items being two-element tuples and returns + a tuple with two lists, each of which is formed by the first and + second element of each tuple, respectively. + + This function fails unless `enumerable` is or can be converted into a + list of tuples with *exactly* two elements in each tuple. ## Examples - iex> Enum.uniq([1, 2, 3, 2, 1]) - [1, 2, 3] + iex> Enum.unzip([{:a, 1}, {:b, 2}, {:c, 3}]) + {[:a, :b, :c], [1, 2, 3]} - iex> Enum.uniq([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) - [{1,:x}, {2,:y}] + iex> Enum.unzip(%{a: 1, b: 2}) + {[:a, :b], [1, 2]} """ - @spec uniq(t) :: list - @spec uniq(t, (element -> term)) :: list - def uniq(collection, fun \\ fn x -> x end) + @spec unzip(t) :: {[element], [element]} + def unzip(enumerable) do + {list1, list2} = reduce(enumerable, {[], []}, + fn({el1, el2}, {list1, list2}) -> + {[el1 | list1], [el2 | list2]} + end) - def uniq(collection, fun) when is_list(collection) do - do_uniq(collection, [], fun) + {:lists.reverse(list1), :lists.reverse(list2)} end - def uniq(collection, fun) do - {_, {list, _}} = - Enumerable.reduce(collection, {:cont, {[], []}}, R.uniq(fun)) - :lists.reverse(list) + @doc """ + Returns the enumerable with each element wrapped in a tuple + alongside its index. + + If an `offset` is given, we will index from the given offset instead of from zero. + + ## Examples + + iex> Enum.with_index([:a, :b, :c]) + [a: 0, b: 1, c: 2] + + iex> Enum.with_index([:a, :b, :c], 3) + [a: 3, b: 4, c: 5] + + """ + @spec with_index(t, integer) :: [{element, index}] + def with_index(enumerable, offset \\ 0) do + map_reduce(enumerable, offset, fn x, acc -> + {{x, acc}, acc + 1} + end) |> elem(0) end @doc """ - Zips corresponding elements from two collections into one list + Zips corresponding elements from two enumerables into one list of tuples. The zipping finishes as soon as any enumerable completes. @@ -1805,171 +2701,275 @@ defmodule Enum do ## Examples iex> Enum.zip([1, 2, 3], [:a, :b, :c]) - [{1,:a},{2,:b},{3,:c}] + [{1, :a}, {2, :b}, {3, :c}] - iex> Enum.zip([1,2,3,4,5], [:a, :b, :c]) - [{1,:a},{2,:b},{3,:c}] + iex> Enum.zip([1, 2, 3, 4, 5], [:a, :b, :c]) + [{1, :a}, {2, :b}, {3, :c}] """ @spec zip(t, t) :: [{any, any}] - def zip(coll1, coll2) when is_list(coll1) and is_list(coll2) do - do_zip(coll1, coll2) + def zip(enumerable1, enumerable2) + when is_list(enumerable1) and is_list(enumerable2) do + zip_list(enumerable1, enumerable2) end - def zip(coll1, coll2) do - Stream.zip(coll1, coll2).({:cont, []}, &{:cont, [&1|&2]}) |> elem(1) |> :lists.reverse + def zip(enumerable1, enumerable2) do + zip([enumerable1, enumerable2]) end @doc """ - Returns the collection with each element wrapped in a tuple - alongside its index. + Zips corresponding elements from a collection of enumerables + into one list of tuples. + + The zipping finishes as soon as any enumerable completes. ## Examples - iex> Enum.with_index [1,2,3] - [{1,0},{2,1},{3,2}] + iex> Enum.zip([[1, 2, 3], [:a, :b, :c], ["foo", "bar", "baz"]]) + [{1, :a, "foo"}, {2, :b, "bar"}, {3, :c, "baz"}] + + iex> Enum.zip([[1, 2, 3, 4, 5], [:a, :b, :c]]) + [{1, :a}, {2, :b}, {3, :c}] """ - @spec with_index(t) :: list({element, non_neg_integer}) - def with_index(collection) do - map_reduce(collection, 0, fn x, acc -> - {{x, acc}, acc + 1} - end) |> elem(0) + @spec zip([t]) :: t + + def zip([]), do: [] + + def zip(enumerables) do + Stream.zip(enumerables).({:cont, []}, &{:cont, [&1 | &2]}) + |> elem(1) + |> :lists.reverse end ## Helpers - @compile {:inline, enum_to_string: 1} + @compile {:inline, entry_to_string: 1, reduce: 3} + + defp entry_to_string(entry) when is_binary(entry), do: entry + defp entry_to_string(entry), do: String.Chars.to_string(entry) - defp enumerate_and_count(collection, count) when is_list(collection) do - {collection, length(collection) - abs(count)} + defp enumerable_and_count(enumerable, count) when is_list(enumerable) do + {enumerable, length(enumerable) - abs(count)} end - defp enumerate_and_count(collection, count) do - map_reduce(collection, -abs(count), fn(x, acc) -> {x, acc + 1} end) + defp enumerable_and_count(enumerable, count) do + case Enumerable.count(enumerable) do + {:ok, result} -> + {enumerable, result - abs(count)} + {:error, _module} -> + map_reduce(enumerable, -abs(count), fn(elem, acc) -> {elem, acc + 1} end) + end end - defp enum_to_string(entry) when is_binary(entry), do: entry - defp enum_to_string(entry), do: String.Chars.to_string(entry) + defp random_integer(limit, limit) when is_integer(limit), + do: limit + + defp random_integer(lower_limit, upper_limit) when upper_limit < lower_limit, + do: random_integer(upper_limit, lower_limit) + + defp random_integer(lower_limit, upper_limit) do + lower_limit + :rand.uniform(upper_limit - lower_limit + 1) - 1 + end ## Implementations ## all? - defp do_all?([h|t], fun) do + defp all_list([h | t], fun) do if fun.(h) do - do_all?(t, fun) + all_list(t, fun) else false end end - defp do_all?([], _) do + defp all_list([], _) do true end ## any? - defp do_any?([h|t], fun) do + defp any_list([h | t], fun) do if fun.(h) do true else - do_any?(t, fun) + any_list(t, fun) end end - defp do_any?([], _) do + defp any_list([], _) do false end - ## fetch - - defp do_fetch([h|_], 0), do: {:ok, h} - defp do_fetch([_|t], n), do: do_fetch(t, n - 1) - defp do_fetch([], _), do: :error - ## drop - defp do_drop([_|t], counter) when counter > 0 do - do_drop(t, counter - 1) + defp drop_list([_ | t], counter) when counter > 0 do + drop_list(t, counter - 1) end - defp do_drop(list, 0) do + defp drop_list(list, 0) do list end - defp do_drop([], _) do + defp drop_list([], _) do [] end ## drop_while - defp do_drop_while([h|t], fun) do - if fun.(h) do - do_drop_while(t, fun) + defp drop_while_list([head | tail], fun) do + if fun.(head) do + drop_while_list(tail, fun) else - [h|t] + [head | tail] end end - defp do_drop_while([], _) do + defp drop_while_list([], _) do + [] + end + + ## fetch + + defp fetch_list([], _index), + do: :error + defp fetch_list([head | _], 0), + do: {:ok, head} + defp fetch_list([_ | tail], index), + do: fetch_list(tail, index - 1) + + defp fetch_range(first, last, index) when first <= last and index >= 0 do + item = first + index + if item > last, do: :error, else: {:ok, item} + end + + defp fetch_range(first, last, index) when first <= last do + item = last + index + 1 + if item < first, do: :error, else: {:ok, item} + end + + defp fetch_range(first, last, index) when index >= 0 do + item = first - index + if item < last, do: :error, else: {:ok, item} + end + + defp fetch_range(first, last, index) do + item = last - index - 1 + if item > first, do: :error, else: {:ok, item} + end + + ## filter + + defp filter_list([head | tail], fun) do + if fun.(head) do + [head | filter_list(tail, fun)] + else + filter_list(tail, fun) + end + end + + defp filter_list([], _fun) do [] end ## find - defp do_find([h|t], ifnone, fun) do - if fun.(h) do - h + defp find_list([head | tail], default, fun) do + if fun.(head) do + head else - do_find(t, ifnone, fun) + find_list(tail, default, fun) end end - defp do_find([], ifnone, _) do - ifnone + defp find_list([], default, _) do + default end ## find_index - defp do_find_index([h|t], counter, fun) do - if fun.(h) do + defp find_index_list([head | tail], counter, fun) do + if fun.(head) do counter else - do_find_index(t, counter + 1, fun) + find_index_list(tail, counter + 1, fun) end end - defp do_find_index([], _, _) do + defp find_index_list([], _, _) do nil end ## find_value - defp do_find_value([h|t], ifnone, fun) do - fun.(h) || do_find_value(t, ifnone, fun) + defp find_value_list([head | tail], default, fun) do + fun.(head) || find_value_list(tail, default, fun) + end + + defp find_value_list([], default, _) do + default + end + + ## reject + + defp reject_list([head | tail], fun) do + if fun.(head) do + reject_list(tail, fun) + else + [head | reject_list(tail, fun)] + end end - defp do_find_value([], ifnone, _) do - ifnone + defp reject_list([], _fun) do + [] end ## shuffle - defp unwrap([{_, h} | collection], t) do - unwrap(collection, [h|t]) + defp unwrap([{_, h} | enumerable], t) do + unwrap(enumerable, [h | t]) end defp unwrap([], t), do: t + ## reverse_slice + + defp reverse_slice(rest, idx, idx, count, acc) do + {slice, rest} = head_slice(rest, count, []) + + :lists.reverse(rest, :lists.reverse(slice, acc)) + end + + defp reverse_slice([elem | rest], idx, start, count, acc) do + reverse_slice(rest, idx - 1, start, count, [elem | acc]) + end + + defp head_slice(rest, 0, acc), do: {acc, rest} + + defp head_slice([elem | rest], count, acc) do + head_slice(rest, count - 1, [elem | acc]) + end + + ## slice + + defp slice_list([], _start, _count), + do: [] + defp slice_list(_list, _start, 0), + do: [] + defp slice_list([head | tail], 0, count), + do: [head | slice_list(tail, 0, count - 1)] + defp slice_list([_ | tail], start, count), + do: slice_list(tail, start - 1, count) + ## sort defp sort_reducer(entry, {:split, y, x, r, rs, bool}, fun) do cond do fun.(y, entry) == bool -> - {:split, entry, y, [x|r], rs, bool} + {:split, entry, y, [x | r], rs, bool} fun.(x, entry) == bool -> - {:split, y, entry, [x|r], rs, bool} + {:split, y, entry, [x | r], rs, bool} r == [] -> {:split, y, x, [entry], rs, bool} true -> @@ -1995,7 +2995,7 @@ defmodule Enum do end defp sort_reducer(entry, acc, _fun) do - [entry|acc] + [entry | acc] end defp sort_terminator({:split, y, x, r, rs, bool}, fun) do @@ -2016,12 +3016,11 @@ defmodule Enum do defp sort_merge(list, fun, false), do: sort_merge(list, [], fun, false) - defp sort_merge([t1, [h2 | t2] | l], acc, fun, true), do: - sort_merge(l, [sort_merge_1(t1, h2, t2, [], fun, false) | acc], fun, true) + sort_merge(l, [sort_merge1(t1, h2, t2, [], fun, false) | acc], fun, true) defp sort_merge([[h2 | t2], t1 | l], acc, fun, false), do: - sort_merge(l, [sort_merge_1(t1, h2, t2, [], fun, false) | acc], fun, false) + sort_merge(l, [sort_merge1(t1, h2, t2, [], fun, false) | acc], fun, false) defp sort_merge([l], [], _fun, _bool), do: l @@ -2031,12 +3030,11 @@ defmodule Enum do defp sort_merge([], acc, fun, bool), do: reverse_sort_merge(acc, [], fun, bool) - defp reverse_sort_merge([[h2 | t2], t1 | l], acc, fun, true), do: - reverse_sort_merge(l, [sort_merge_1(t1, h2, t2, [], fun, true) | acc], fun, true) + reverse_sort_merge(l, [sort_merge1(t1, h2, t2, [], fun, true) | acc], fun, true) defp reverse_sort_merge([t1, [h2 | t2] | l], acc, fun, false), do: - reverse_sort_merge(l, [sort_merge_1(t1, h2, t2, [], fun, true) | acc], fun, false) + reverse_sort_merge(l, [sort_merge1(t1, h2, t2, [], fun, true) | acc], fun, false) defp reverse_sort_merge([l], acc, fun, bool), do: sort_merge([:lists.reverse(l, []) | acc], [], fun, bool) @@ -2044,162 +3042,138 @@ defmodule Enum do defp reverse_sort_merge([], acc, fun, bool), do: sort_merge(acc, [], fun, bool) - - defp sort_merge_1([h1 | t1], h2, t2, m, fun, bool) do + defp sort_merge1([h1 | t1], h2, t2, m, fun, bool) do if fun.(h1, h2) == bool do - sort_merge_2(h1, t1, t2, [h2 | m], fun, bool) + sort_merge2(h1, t1, t2, [h2 | m], fun, bool) else - sort_merge_1(t1, h2, t2, [h1 | m], fun, bool) + sort_merge1(t1, h2, t2, [h1 | m], fun, bool) end end - defp sort_merge_1([], h2, t2, m, _fun, _bool), do: + defp sort_merge1([], h2, t2, m, _fun, _bool), do: :lists.reverse(t2, [h2 | m]) - - defp sort_merge_2(h1, t1, [h2 | t2], m, fun, bool) do + defp sort_merge2(h1, t1, [h2 | t2], m, fun, bool) do if fun.(h1, h2) == bool do - sort_merge_2(h1, t1, t2, [h2 | m], fun, bool) + sort_merge2(h1, t1, t2, [h2 | m], fun, bool) else - sort_merge_1(t1, h2, t2, [h1 | m], fun, bool) + sort_merge1(t1, h2, t2, [h1 | m], fun, bool) end end - defp sort_merge_2(h1, t1, [], m, _fun, _bool), do: + defp sort_merge2(h1, t1, [], m, _fun, _bool), do: :lists.reverse(t1, [h1 | m]) ## split - defp do_split([h|t], counter, acc) when counter > 0 do - do_split(t, counter - 1, [h|acc]) + defp split_list([head | tail], counter, acc) when counter > 0 do + split_list(tail, counter - 1, [head | acc]) end - defp do_split(list, 0, acc) do + defp split_list(list, 0, acc) do {:lists.reverse(acc), list} end - defp do_split([], _, acc) do + defp split_list([], _, acc) do {:lists.reverse(acc), []} end - defp do_split_reverse([h|t], counter, acc) when counter > 0 do - do_split_reverse(t, counter - 1, [h|acc]) + defp split_reverse_list([head | tail], counter, acc) when counter > 0 do + split_reverse_list(tail, counter - 1, [head | acc]) end - defp do_split_reverse(list, 0, acc) do + defp split_reverse_list(list, 0, acc) do {:lists.reverse(list), acc} end - defp do_split_reverse([], _, acc) do + defp split_reverse_list([], _, acc) do {[], acc} end ## split_while - defp do_split_while([h|t], fun, acc) do - if fun.(h) do - do_split_while(t, fun, [h|acc]) + defp split_while_list([head | tail], fun, acc) do + if fun.(head) do + split_while_list(tail, fun, [head | acc]) else - {:lists.reverse(acc), [h|t]} + {:lists.reverse(acc), [head | tail]} end end - defp do_split_while([], _, acc) do + defp split_while_list([], _, acc) do {:lists.reverse(acc), []} end ## take - defp do_take([h|t], counter) when counter > 0 do - [h|do_take(t, counter - 1)] + defp take_list([head | tail], counter) when counter > 0 do + [head | take_list(tail, counter - 1)] end - defp do_take(_list, 0) do + defp take_list(_list, 0) do [] end - defp do_take([], _) do + defp take_list([], _counter) do [] end ## take_while - defp do_take_while([h|t], fun) do - if fun.(h) do - [h|do_take_while(t, fun)] + defp take_while_list([head | tail], fun) do + if fun.(head) do + [head | take_while_list(tail, fun)] else [] end end - defp do_take_while([], _) do + defp take_while_list([], _) do [] end ## uniq - defp do_uniq([h|t], acc, fun) do - fun_h = fun.(h) - case :lists.member(fun_h, acc) do - true -> do_uniq(t, acc, fun) - false -> [h|do_uniq(t, [fun_h|acc], fun)] + defp uniq_list([head | tail], set, fun) do + value = fun.(head) + case set do + %{^value => true} -> uniq_list(tail, set, fun) + %{} -> [head | uniq_list(tail, Map.put(set, value, true), fun)] end end - defp do_uniq([], _acc, _fun) do + defp uniq_list([], _set, _fun) do [] end ## zip - defp do_zip([h1|next1], [h2|next2]) do - [{h1, h2}|do_zip(next1, next2)] - end - - defp do_zip(_, []), do: [] - defp do_zip([], _), do: [] - - ## slice - - defp do_slice([], _start, _count) do - [] - end - - defp do_slice(_list, _start, 0) do - [] + defp zip_list([h1 | next1], [h2 | next2]) do + [{h1, h2} | zip_list(next1, next2)] end - defp do_slice([h|t], 0, count) do - [h|do_slice(t, 0, count-1)] - end - - defp do_slice([_|t], start, count) do - do_slice(t, start-1, count) - end + defp zip_list(_, []), do: [] + defp zip_list([], _), do: [] end defimpl Enumerable, for: List do - def reduce(_, {:halt, acc}, _fun), do: {:halted, acc} - def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)} - def reduce([], {:cont, acc}, _fun), do: {:done, acc} - def reduce([h|t], {:cont, acc}, fun), do: reduce(t, fun.(h, acc), fun) + def count(_list), + do: {:error, __MODULE__} def member?(_list, _value), do: {:error, __MODULE__} - def count(_list), - do: {:error, __MODULE__} + + def reduce(_, {:halt, acc}, _fun), do: {:halted, acc} + def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)} + def reduce([], {:cont, acc}, _fun), do: {:done, acc} + def reduce([h | t], {:cont, acc}, fun), do: reduce(t, fun.(h, acc), fun) end defimpl Enumerable, for: Map do - def reduce(map, acc, fun) do - do_reduce(:maps.to_list(map), acc, fun) + def count(map) do + {:ok, map_size(map)} end - defp do_reduce(_, {:halt, acc}, _fun), do: {:halted, acc} - defp do_reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &do_reduce(list, &1, fun)} - defp do_reduce([], {:cont, acc}, _fun), do: {:done, acc} - defp do_reduce([h|t], {:cont, acc}, fun), do: do_reduce(t, fun.(h, acc), fun) - def member?(map, {key, value}) do {:ok, match?({:ok, ^value}, :maps.find(key, map))} end @@ -2208,16 +3182,23 @@ defimpl Enumerable, for: Map do {:ok, false} end - def count(map) do - {:ok, map_size(map)} + def reduce(map, acc, fun) do + reduce_list(:maps.to_list(map), acc, fun) end + + defp reduce_list(_, {:halt, acc}, _fun), do: {:halted, acc} + defp reduce_list(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce_list(list, &1, fun)} + defp reduce_list([], {:cont, acc}, _fun), do: {:done, acc} + defp reduce_list([h | t], {:cont, acc}, fun), do: reduce_list(t, fun.(h, acc), fun) end defimpl Enumerable, for: Function do - def reduce(function, acc, fun) when is_function(function, 2), - do: function.(acc, fun) - def member?(_function, _value), - do: {:error, __MODULE__} def count(_function), do: {:error, __MODULE__} + + def member?(_function, _value), + do: {:error, __MODULE__} + + def reduce(function, acc, fun), + do: function.(acc, fun) end diff --git a/lib/elixir/lib/exception.ex b/lib/elixir/lib/exception.ex index 398f0f1f5a6..bc1bf24cc68 100644 --- a/lib/elixir/lib/exception.ex +++ b/lib/elixir/lib/exception.ex @@ -4,26 +4,31 @@ defmodule Exception do Note that stacktraces in Elixir are updated on throw, errors and exits. For example, at any given moment, - `System.stacktrace` will return the stacktrace for the - last throw/error/exit that ocurred in the current process. + `System.stacktrace/0` will return the stacktrace for the + last throw/error/exit that occurred in the current process. - Do not rely on the particular format returned by the `format` + Do not rely on the particular format returned by the `format*` functions in this module. They may be changed in future releases in order to better suit Elixir's tool chain. In other words, - by using the functions in this module it is guarantee you will + by using the functions in this module it is guaranteed you will format exceptions as in the current Elixir version being used. """ - @typedoc "The exception type (as generated by defexception)" - @type t :: %{__struct__: module, __exception__: true} + @typedoc "The exception type" + @type t :: %{ + required(:__struct__) => module, + required(:__exception__) => true, + atom => any + } @typedoc "The kind handled by formatting functions" - @type kind :: :error | :exit | :throw | {:EXIT, pid} + @type kind :: :error | non_error_kind + @typep non_error_kind :: :exit | :throw | {:EXIT, pid} @type stacktrace :: [stacktrace_entry] @type stacktrace_entry :: - {module, function, arity_or_args, location} | - {function, arity_or_args, location} + {module, atom, arity_or_args, location} | + {(... -> any), arity_or_args, location} @typep arity_or_args :: non_neg_integer | list @typep location :: Keyword.t @@ -32,22 +37,31 @@ defmodule Exception do @callback message(t) :: String.t @doc """ - Returns true if the given argument is an exception. + Returns `true` if the given `term` is an exception. """ - def exception?(%{__struct__: struct, __exception__: true}) when is_atom(struct), do: true + def exception?(term) + + def exception?(%{__struct__: struct, __exception__: true}) when is_atom(struct), + do: true + def exception?(_), do: false @doc """ - Gets the message for an exception. + Gets the message for an `exception`. """ def message(%{__struct__: module, __exception__: true} = exception) when is_atom(module) do try do module.message(exception) rescue e -> - raise ArgumentError, - "Got #{inspect e.__struct__} with message " <> - "\"#{message(e)}\" while retrieving message for #{inspect(exception)}" + "got #{inspect e.__struct__} with message #{inspect message(e)} " <> + "while retrieving Exception.message/1 for #{inspect(exception)}" + else + x when is_binary(x) -> x + x -> + "got #{inspect(x)} " <> + "while retrieving Exception.message/1 for #{inspect(exception)} " <> + "(expected a string)" end end @@ -67,7 +81,7 @@ defmodule Exception do an empty stacktrace, `[]`, must be used. """ @spec normalize(:error, any, stacktrace) :: t - @spec normalize(kind, payload, stacktrace) :: payload when payload: var + @spec normalize(non_error_kind, payload, stacktrace) :: payload when payload: var # Generating a stacktrace is expensive, default to nil # to only fetch it when needed. @@ -86,7 +100,7 @@ defmodule Exception do end @doc """ - Normalizes and formats any throw, error and exit. + Normalizes and formats any throw/error/exit. The message is formatted and displayed in the same format as used by Elixir's CLI. @@ -119,7 +133,7 @@ defmodule Exception do end @doc """ - Normalizes and formats throw/errors/exits and stacktrace. + Normalizes and formats throw/errors/exits and stacktraces. It relies on `format_banner/3` and `format_stacktrace/1` to generate the final format. @@ -127,9 +141,7 @@ defmodule Exception do Note that `{:EXIT, pid}` do not generate a stacktrace though (as they are retrieved as messages without stacktraces). """ - @spec format(kind, any, stacktrace | nil) :: String.t - def format(kind, payload, stacktrace \\ nil) def format({:EXIT, _} = kind, any, _) do @@ -146,7 +158,161 @@ defmodule Exception do end @doc """ - Formats an exit, returns a string. + Attaches information to exceptions for extra debugging. + + This operation is potentially expensive, as it reads data + from the filesystem, parse beam files, evaluates code and + so on. Currently the following exceptions may be annotated: + + * `FunctionClauseError` - annotated with the arguments + used on the call and available clauses + + """ + @spec blame(:error, any, stacktrace) :: {t, stacktrace} + @spec blame(non_error_kind, payload, stacktrace) :: {payload, stacktrace} when payload: var + def blame(:error, error, stacktrace) do + case normalize(:error, error, stacktrace) do + %{__struct__: FunctionClauseError} = struct -> + blame_function_clause_error(struct, stacktrace) + _ -> + {error, stacktrace} + end + end + + def blame(_kind, reason, stacktrace) do + {reason, stacktrace} + end + + defp blame_function_clause_error(%{module: module, function: function, arity: arity} = exception, + [{module, function, args, meta} | rest]) + when length(args) == arity do + exception = + case blame_mfa(module, function, args) do + {:ok, kind, clauses} -> %{exception | args: args, kind: kind, clauses: clauses} + :error -> %{exception | args: args} + end + {exception, [{module, function, arity, meta} | rest]} + end + defp blame_function_clause_error(exception, stacktrace) do + {exception, stacktrace} + end + + @doc """ + Blames the invocation of the given module, function and arguments. + + This function will retrieve the available clauses from bytecode + and evaluate them against the given arguments. The clauses are + returned as a list of `{args, guards}` pairs where each argument + and each top-level condition in a guard separated by `and`/`or` + is wrapped in a tuple with blame metadata. + + This function returns either `{:ok, definition, clauses}` or `:error`. + Where `definition` is `:def`, `:defp`, `:defmacro` or `:defmacrop`. + Note this functionality requires Erlang/OTP 20, otherwise `:error` + is always returned. + """ + @spec blame_mfa(module, function, args :: [term]) :: + {:ok, :def | :defp | :defmacro | :defmacrop, [{args :: [term], guards :: [term]}]} | :error + def blame_mfa(module, function, args) when is_atom(module) and is_atom(function) and is_list(args) do + try do + blame_mfa(module, function, length(args), args) + rescue + _ -> :error + end + end + + defp blame_mfa(module, function, arity, call_args) do + with path when is_list(path) <- :code.which(module), + {:ok, {_, [debug_info: {:debug_info_v1, backend, data}]}} <- :beam_lib.chunks(path, [:debug_info]), + {:ok, %{definitions: defs}} <- backend.debug_info(:elixir_v1, module, data, []), + {_, kind, _, clauses} <- List.keyfind(defs, {function, arity}, 0) do + clauses = + for {meta, ex_args, guards, _block} <- clauses do + scope = :elixir_erl.definition_scope(meta, kind, function, arity, "nofile") + {erl_args, scope} = + :elixir_erl_clauses.match(&:elixir_erl_pass.translate_args/2, ex_args, scope) + {args, binding} = + [call_args, ex_args, erl_args] + |> Enum.zip() + |> Enum.map_reduce([], &blame_arg/2) + guards = Enum.map(guards, &blame_guard(&1, scope, binding)) + {args, guards} + end + {:ok, kind, clauses} + else + _ -> :error + end + end + + defp blame_arg({call_arg, ex_arg, erl_arg}, binding) do + {match?, binding} = blame_arg(erl_arg, call_arg, binding) + {blame_wrap(match?, rewrite_arg(ex_arg)), binding} + end + + defp blame_arg(erl_arg, call_arg, binding) do + binding = :orddict.store(:VAR, call_arg, binding) + try do + {:value, _, binding} = :erl_eval.expr({:match, 0, erl_arg, {:var, 0, :VAR}}, binding, :none) + {true, binding} + rescue + _ -> {false, binding} + end + end + + defp rewrite_arg(arg) do + Macro.prewalk(arg, fn + {:%{}, meta, [__struct__: Range, first: first, last: last]} -> + {:.., meta, [first, last]} + other -> + other + end) + end + + defp blame_guard({{:., _, [:erlang, op]}, meta, [left, right]}, scope, binding) + when op == :andalso or op == :orelse do + {rewrite_guard_call(op), meta, [ + blame_guard(left, scope, binding), + blame_guard(right, scope, binding) + ]} + end + + defp blame_guard(ex_guard, scope, binding) do + {erl_guard, _} = :elixir_erl_pass.translate(ex_guard, scope) + match? = + try do + {:value, true, _} = :erl_eval.expr(erl_guard, binding, :none) + true + rescue + _ -> false + end + blame_wrap(match?, rewrite_guard(ex_guard)) + end + + defp rewrite_guard(guard) do + Macro.prewalk(guard, fn + {:., _, [:erlang, call]} -> rewrite_guard_call(call) + other -> other + end) + end + + defp rewrite_guard_call(:"orelse"), do: :or + defp rewrite_guard_call(:"andalso"), do: :and + defp rewrite_guard_call(:"=<"), do: :<= + defp rewrite_guard_call(:"/="), do: :!= + defp rewrite_guard_call(:"=:="), do: :=== + defp rewrite_guard_call(:"=/="), do: :!== + + defp rewrite_guard_call(op) when op in [:band, :bor, :bnot, :bsl, :bsr, :bxor], + do: {:., [], [Bitwise, op]} + defp rewrite_guard_call(op) when op in [:xor, :element, :size], + do: {:., [], [:erlang, op]} + defp rewrite_guard_call(op), + do: op + + defp blame_wrap(match?, ast), do: %{match?: match?, node: ast} + + @doc """ + Formats an exit. It returns a string. Often there are errors/exceptions inside exits. Exits are often wrapped by the caller and provide stacktraces too. This function @@ -221,12 +387,13 @@ defmodule Exception do "shutdown: #{inspect(reason)}" end + defp format_exit_reason(:calling_self), do: "process attempted to call itself" defp format_exit_reason(:timeout), do: "time out" defp format_exit_reason(:killed), do: "killed" defp format_exit_reason(:noconnection), do: "no connection" defp format_exit_reason(:noproc) do - "no process" + "no process: the process is not alive or there's no process currently associated with the given name, possibly because its application isn't started" end defp format_exit_reason({:nodedown, node_name}) when is_atom(node_name) do @@ -253,7 +420,7 @@ defmodule Exception do # :supervisor.start_link error reasons - # If value is a list will be be formatted by mfa exit in format_exit/1 + # If value is a list will be formatted by mfa exit in format_exit/1 defp format_exit_reason({:bad_return, {mod, :init, value}}) when is_atom(mod) do format_mfa(mod, :init, 1) <> " returned a bad value: " <> inspect(value) @@ -356,9 +523,10 @@ defmodule Exception do end defp format_application(module) do + # We cannot use Application due to bootstrap issues case :application.get_application(module) do {:ok, app} -> "(" <> Atom.to_string(app) <> ") " - :undefined -> "" + :undefined -> "" end end @@ -369,13 +537,13 @@ defmodule Exception do is retrieved from `Process.info/2`. """ def format_stacktrace(trace \\ nil) do - trace = trace || case Process.info(self, :current_stacktrace) do + trace = trace || case Process.info(self(), :current_stacktrace) do {:current_stacktrace, t} -> Enum.drop(t, 3) end case trace do [] -> "\n" - s -> " " <> Enum.map_join(s, "\n ", &format_stacktrace_entry(&1)) <> "\n" + _ -> " " <> Enum.map_join(trace, "\n ", &format_stacktrace_entry(&1)) <> "\n" end end @@ -385,7 +553,7 @@ defmodule Exception do ## Examples - Exception.format_fa(fn -> end, 1) + Exception.format_fa(fn -> nil end, 1) #=> "#Function<...>/1" """ @@ -414,17 +582,12 @@ defmodule Exception do "anonymous fn in func/arity" """ def format_mfa(module, fun, arity) when is_atom(module) and is_atom(fun) do - fun = - case inspect(fun) do - ":" <> fun -> fun - fun -> fun - end - - case match?("\"-" <> _, fun) and String.split(fun, "-") do - [ "\"", outer_fun, "fun", _count, "\"" ] -> - "anonymous fn#{format_arity(arity)} in #{inspect module}.#{outer_fun}" - _ -> - "#{inspect module}.#{fun}#{format_arity(arity)}" + case Inspect.Function.extract_anonymous_fun_parent(Atom.to_string(fun)) do + {outer_name, outer_arity} -> + "anonymous fn#{format_arity(arity)} in " <> + "#{inspect(module)}.#{Inspect.Function.escape_name(outer_name)}/#{outer_arity}" + :error -> + "#{inspect(module)}.#{Inspect.Function.escape_name(fun)}#{format_arity(arity)}" end end @@ -438,8 +601,8 @@ defmodule Exception do end @doc """ - Formats the given file and line as shown in stacktraces. - If any of the values are nil, they are omitted. + Formats the given `file` and `line` as shown in stacktraces. + If any of the values are `nil`, they are omitted. ## Examples @@ -453,11 +616,7 @@ defmodule Exception do "" """ - def format_file_line(file, line) do - format_file_line(file, line, "") - end - - defp format_file_line(file, line, suffix) do + def format_file_line(file, line, suffix \\ "") do if file do if line && line != 0 do "#{file}:#{line}:#{suffix}" @@ -474,41 +633,21 @@ defmodule Exception do end end -# Some exceptions implement `message/1` instead of `exception/1` mostly +# Some exceptions implement "message/1" instead of "exception/1" mostly # for bootstrap reasons. It is recommended for applications to implement -# `exception/1` instead of `message/1` as described in `defexception/1` +# "exception/1" instead of "message/1" as described in "defexception/1" # docs. defmodule RuntimeError do defexception message: "runtime error" - - def exception(msg) when is_binary(msg) do - %RuntimeError{message: msg} - end - - def exception(arg) do - super(arg) - end end defmodule ArgumentError do defexception message: "argument error" - - def exception(msg) when is_binary(msg) do - %ArgumentError{message: msg} - end - - def exception(arg) do - super(arg) - end end defmodule ArithmeticError do - defexception [] - - def message(_) do - "bad argument in arithmetic expression" - end + defexception message: "bad argument in arithmetic expression" end defmodule SystemLimitError do @@ -520,7 +659,7 @@ defmodule SystemLimitError do end defmodule SyntaxError do - defexception [file: nil, line: nil, description: "syntax error"] + defexception [:file, :line, description: "syntax error"] def message(exception) do Exception.format_file_line(Path.relative_to_cwd(exception.file), exception.line) <> @@ -529,25 +668,25 @@ defmodule SyntaxError do end defmodule TokenMissingError do - defexception [file: nil, line: nil, description: "expression is incomplete"] + defexception [:file, :line, description: "expression is incomplete"] - def message(exception) do - Exception.format_file_line(Path.relative_to_cwd(exception.file), exception.line) <> - " " <> exception.description + def message(%{file: file, line: line, description: description}) do + Exception.format_file_line(file && Path.relative_to_cwd(file), line) <> + " " <> description end end defmodule CompileError do - defexception [file: nil, line: nil, description: "compile error"] + defexception [:file, :line, description: "compile error"] - def message(exception) do - Exception.format_file_line(Path.relative_to_cwd(exception.file), exception.line) <> - " " <> exception.description + def message(%{file: file, line: line, description: description}) do + Exception.format_file_line(file && Path.relative_to_cwd(file), line) <> + " " <> description end end defmodule BadFunctionError do - defexception [term: nil] + defexception [:term] def message(exception) do "expected a function, got: #{inspect(exception.term)}" @@ -555,15 +694,31 @@ defmodule BadFunctionError do end defmodule BadStructError do - defexception [struct: nil, term: nil] + defexception [:struct, :term] def message(exception) do "expected a struct named #{inspect(exception.struct)}, got: #{inspect(exception.term)}" end end +defmodule BadMapError do + defexception [:term] + + def message(exception) do + "expected a map, got: #{inspect(exception.term)}" + end +end + +defmodule BadBooleanError do + defexception [:term, :operator] + + def message(exception) do + "expected a boolean on left-side of \"#{exception.operator}\", got: #{inspect(exception.term)}" + end +end + defmodule MatchError do - defexception [term: nil] + defexception [:term] def message(exception) do "no match of right hand side value: #{inspect(exception.term)}" @@ -571,13 +726,21 @@ defmodule MatchError do end defmodule CaseClauseError do - defexception [term: nil] + defexception [:term] def message(exception) do "no case clause matching: #{inspect(exception.term)}" end end +defmodule WithClauseError do + defexception [:term] + + def message(exception) do + "no with clause matching: #{inspect(exception.term)}" + end +end + defmodule CondClauseError do defexception [] @@ -587,7 +750,7 @@ defmodule CondClauseError do end defmodule TryClauseError do - defexception [term: nil] + defexception [:term] def message(exception) do "no try clause matching: #{inspect(exception.term)}" @@ -595,7 +758,7 @@ defmodule TryClauseError do end defmodule BadArityError do - defexception [function: nil, args: nil] + defexception [:function, :args] def message(exception) do fun = exception.function @@ -611,29 +774,145 @@ defmodule BadArityError do end defmodule UndefinedFunctionError do - defexception [module: nil, function: nil, arity: nil] + defexception [:module, :function, :arity, :reason, :exports] + + def message(%{reason: nil, module: module, function: function, arity: arity} = e) do + cond do + is_nil(function) or is_nil(arity) -> + "undefined function" + not is_nil(module) and :code.is_loaded(module) === false -> + message(%{e | reason: :"module could not be loaded"}) + true -> + message(%{e | reason: :"function not exported"}) + end + end - def message(exception) do - if exception.function do - formatted = Exception.format_mfa exception.module, exception.function, exception.arity - "undefined function: #{formatted}" + def message(%{reason: :"module could not be loaded", module: module, function: function, arity: arity}) do + "function " <> Exception.format_mfa(module, function, arity) <> + " is undefined (module #{inspect module} is not available)" + end + + def message(%{reason: :"function not exported", module: module, function: function, arity: arity}) do + IO.iodata_to_binary(function_not_exported(module, function, arity, nil)) + end + + def message(%{reason: :"function not available", module: module, function: function, arity: arity}) do + "nil." <> fa = Exception.format_mfa(nil, function, arity) + "function " <> Exception.format_mfa(module, function, arity) <> + " is undefined (function #{fa} is not available)" + end + + def message(%{reason: reason, module: module, function: function, arity: arity}) do + "function " <> Exception.format_mfa(module, function, arity) <> " is undefined (#{reason})" + end + + @doc false + def function_not_exported(module, function, arity, exports) do + suffix = + if macro_exported?(module, function, arity) do + ". However there is a macro with the same name and arity. " <> + "Be sure to require #{inspect(module)} if you intend to invoke this macro" + else + did_you_mean(module, function, exports) + end + + ["function ", Exception.format_mfa(module, function, arity), " is undefined or private", suffix] + end + + @function_threshold 0.77 + @max_suggestions 5 + + defp did_you_mean(module, function, exports) do + exports = exports || exports_for(module) + + result = + case Keyword.take(exports, [function]) do + [] -> + base = Atom.to_string(function) + for {key, val} <- exports, + dist = String.jaro_distance(base, Atom.to_string(key)), + dist >= @function_threshold, + do: {dist, key, val} + arities -> + for {key, val} <- arities, do: {1.0, key, val} + end + |> Enum.sort(&elem(&1, 0) >= elem(&2, 0)) + |> Enum.take(@max_suggestions) + |> Enum.sort(&elem(&1, 1) <= elem(&2, 1)) + + case result do + [] -> [] + suggestions -> [". Did you mean one of:\n\n" | Enum.map(suggestions, &format_fa/1)] + end + end + + defp format_fa({_dist, fun, arity}) do + fun = with ":" <> fun <- inspect(fun), do: fun + [" * ", fun, ?/, Integer.to_string(arity), ?\n] + end + + defp exports_for(module) do + if function_exported?(module, :__info__, 1) do + module.__info__(:macros) ++ module.__info__(:functions) else - "undefined function" + module.module_info(:exports) end + rescue + # In case the module was removed while we are computing this + UndefinedFunctionError -> [] end end defmodule FunctionClauseError do - defexception [module: nil, function: nil, arity: nil] + defexception [:module, :function, :arity, :kind, :args, :clauses] def message(exception) do - if exception.function do - formatted = Exception.format_mfa exception.module, exception.function, exception.arity - "no function clause matching in #{formatted}" - else - "no function clause matches" + case exception do + %{function: nil} -> + "no function clause matches" + %{module: module, function: function, arity: arity} -> + formatted = Exception.format_mfa module, function, arity + "no function clause matching in #{formatted}" <> blame(exception, &inspect/1, &blame_match/2) end end + + defp blame_match(%{match?: true, node: node}, _), + do: "+" <> Macro.to_string(node) <> "+" + defp blame_match(%{match?: false, node: node}, _), + do: "-" <> Macro.to_string(node) <> "-" + defp blame_match(_, string), + do: string + + @doc false + def blame(%{args: nil}, _, _) do + "" + end + def blame(%{module: module, function: function, arity: arity, + kind: kind, args: args, clauses: clauses}, inspect_fun, ast_fun) do + mfa = Exception.format_mfa(module, function, arity) + + formatted_args = + args + |> Enum.with_index(1) + |> Enum.map(fn {arg, i} -> "\n # #{i}\n #{inspect_fun.(arg)}\n" end) + + formatted_clauses = + if clauses do + top_10 = + clauses + |> Enum.take(10) + |> Enum.map(fn {args, guards} -> + code = Enum.reduce(guards, {function, [], args}, &{:when, [], [&2, &1]}) + " #{kind} " <> Macro.to_string(code, ast_fun) <> "\n" + end) + + "\nAttempted function clauses (showing #{length(top_10)} out of #{length(clauses)}):\n\n#{top_10}" + else + "" + end + + "\n\nThe following arguments were given to #{mfa}:\n#{formatted_args}#{formatted_clauses}" + end end defmodule Code.LoadError do @@ -646,23 +925,27 @@ defmodule Code.LoadError do end defmodule Protocol.UndefinedError do - defexception [protocol: nil, value: nil, description: nil] + defexception [:protocol, :value, description: ""] def message(exception) do msg = "protocol #{inspect exception.protocol} not implemented for #{inspect exception.value}" - if exception.description do - msg <> ", " <> exception.description - else - msg + case exception.description do + "" -> msg + descr -> msg <> ", " <> descr end end end defmodule KeyError do - defexception key: nil, term: nil + defexception [:key, :term] def message(exception) do - "key #{inspect exception.key} not found in: #{inspect exception.term}" + msg = "key #{inspect exception.key} not found" + if exception.term != nil do + msg <> " in: #{inspect exception.term}" + else + msg + end end end @@ -680,52 +963,73 @@ defmodule UnicodeConversionError do "encoding starting at #{inspect rest}" end - defp detail([h|_]) do + defp detail([h | _]) when is_integer(h) do "code point #{h}" end + + defp detail([h | _]) do + detail(h) + end end defmodule Enum.OutOfBoundsError do - defexception [] - - def message(_) do - "out of bounds error" - end + defexception message: "out of bounds error" end defmodule Enum.EmptyError do - defexception [] + defexception message: "empty error" +end - def message(_) do - "empty error" +defmodule File.Error do + defexception [:reason, :path, action: ""] + + def message(%{action: action, reason: reason, path: path}) do + formatted = + case {action, reason} do + {"remove directory", :eexist} -> + "directory is not empty" + _ -> + IO.iodata_to_binary(:file.format_error(reason)) + end + + "could not #{action} #{inspect(path)}: #{formatted}" end end -defmodule File.Error do - defexception [reason: nil, action: "", path: nil] +defmodule File.CopyError do + defexception [:reason, :source, :destination, on: "", action: ""] def message(exception) do - formatted = IO.iodata_to_binary(:file.format_error(exception.reason)) - "could not #{exception.action} #{exception.path}: #{formatted}" + formatted = + IO.iodata_to_binary(:file.format_error(exception.reason)) + + location = + case exception.on do + "" -> "" + on -> ". #{on}" + end + + "could not #{exception.action} from #{inspect(exception.source)} to " <> + "#{inspect(exception.destination)}#{location}: #{formatted}" end end -defmodule File.CopyError do - defexception [reason: nil, action: "", source: nil, destination: nil, on: nil] +defmodule File.LinkError do + defexception [:reason, :existing, :new, action: ""] def message(exception) do - formatted = IO.iodata_to_binary(:file.format_error(exception.reason)) - location = if on = exception.on, do: ". #{on}", else: "" - "could not #{exception.action} from #{exception.source} to " <> - "#{exception.destination}#{location}: #{formatted}" + formatted = + IO.iodata_to_binary(:file.format_error(exception.reason)) + "could not #{exception.action} from #{inspect(exception.existing)} to " <> + "#{inspect(exception.new)}: #{formatted}" end end defmodule ErlangError do - defexception [original: nil] + defexception [:original] def message(exception) do - "erlang error: #{inspect(exception.original)}" + "Erlang error: #{inspect(exception.original)}" end @doc false @@ -761,21 +1065,50 @@ defmodule ErlangError do %MatchError{term: term} end + def normalize({:badmap, term}, _stacktrace) do + %BadMapError{term: term} + end + + def normalize({:badbool, op, term}, _stacktrace) do + %BadBooleanError{operator: op, term: term} + end + + def normalize({:badkey, key}, stacktrace) do + term = + case ensure_stacktrace(stacktrace) do + [{Map, :get_and_update!, [map, _, _], _} | _] -> map + [{Map, :update!, [map, _, _], _} | _] -> map + [{:maps, :update, [_, _, map], _} | _] -> map + [{:maps, :get, [_, map], _} | _] -> map + _ -> nil + end + %KeyError{key: key, term: term} + end + + def normalize({:badkey, key, map}, _stacktrace) do + %KeyError{key: key, term: map} + end + def normalize({:case_clause, term}, _stacktrace) do %CaseClauseError{term: term} end + def normalize({:with_clause, term}, _stacktrace) do + %WithClauseError{term: term} + end + def normalize({:try_clause, term}, _stacktrace) do %TryClauseError{term: term} end def normalize(:undef, stacktrace) do - {mod, fun, arity} = from_stacktrace(stacktrace || :erlang.get_stacktrace) + stacktrace = ensure_stacktrace(stacktrace) + {mod, fun, arity} = from_stacktrace(stacktrace) %UndefinedFunctionError{module: mod, function: fun, arity: arity} end def normalize(:function_clause, stacktrace) do - {mod, fun, arity} = from_stacktrace(stacktrace || :erlang.get_stacktrace) + {mod, fun, arity} = from_stacktrace(ensure_stacktrace(stacktrace)) %FunctionClauseError{module: mod, function: fun, arity: arity} end @@ -787,11 +1120,23 @@ defmodule ErlangError do %ErlangError{original: other} end - defp from_stacktrace([{module, function, args, _}|_]) when is_list(args) do + defp ensure_stacktrace(nil) do + try do + :erlang.get_stacktrace() + rescue + _ -> [] + end + end + + defp ensure_stacktrace(stacktrace) do + stacktrace + end + + defp from_stacktrace([{module, function, args, _} | _]) when is_list(args) do {module, function, length(args)} end - defp from_stacktrace([{module, function, arity, _}|_]) do + defp from_stacktrace([{module, function, arity, _} | _]) do {module, function, arity} end diff --git a/lib/elixir/lib/file.ex b/lib/elixir/lib/file.ex index 23cc854db4c..8763e0ae608 100644 --- a/lib/elixir/lib/file.ex +++ b/lib/elixir/lib/file.ex @@ -3,35 +3,41 @@ defmodule File do This module contains functions to manipulate files. Some of those functions are low-level, allowing the user - to interact with the file or IO devices, like `open/2`, + to interact with files or IO devices, like `open/2`, `copy/3` and others. This module also provides higher level functions that work with filenames and have their naming based on UNIX variants. For example, one can copy a file via `cp/3` and remove files and directories recursively - via `rm_rf/1` + via `rm_rf/1`. + + Paths given to functions in this module can be either relative to the + current working directory (as returned by `File.cwd/0`), or absolute + paths. Shell conventions like `~` are not expanded automatically. + To use paths like `~/Downloads`, you can use `Path.expand/1` or + `Path.expand/2` to expand your path to an absolute path. ## Encoding In order to write and read files, one must use the functions - in the `IO` module. By default, a file is opened in binary mode + in the `IO` module. By default, a file is opened in binary mode, which requires the functions `IO.binread/2` and `IO.binwrite/2` to interact with the file. A developer may pass `:utf8` as an option when opening the file, then the slower `IO.read/2` and `IO.write/2` functions must be used as they are responsible for - doing the proper conversions and data guarantees. + doing the proper conversions and providing the proper data guarantees. - Note that filenames when given as char lists in Elixir are + Note that filenames when given as charlists in Elixir are always treated as UTF-8. In particular, we expect that the - shell and the operating system are configured to use UTF8 - encoding. Binary filenames are considering raw and passed + shell and the operating system are configured to use UTF-8 + encoding. Binary filenames are considered raw and passed to the OS as is. ## API Most of the functions in this module return `:ok` or `{:ok, result}` in case of success, `{:error, reason}` - otherwise. Those function are also followed by a variant - that ends with `!` which returns the result (without the + otherwise. Those functions also have a variant + that ends with `!` which returns the result (instead of the `{:ok, result}` tuple) in case of success or raises an exception in case it fails. For example: @@ -47,15 +53,15 @@ defmodule File do File.read!("invalid.txt") #=> raises File.Error - In general, a developer should use the former in case he wants + In general, a developer should use the former in case they want to react if the file does not exist. The latter should be used - when the developer expects his software to fail in case the + when the developer expects their software to fail in case the file cannot be read (i.e. it is literally an exception). ## Processes and raw files Every time a file is opened, Elixir spawns a new process. Writing - to a file is equivalent to sending messages to that process that + to a file is equivalent to sending messages to the process that writes to the file descriptor. This means files can be passed between nodes and message passing @@ -63,22 +69,29 @@ defmodule File do However, you may not always want to pay the price for this abstraction. In such cases, a file can be opened in `:raw` mode. The options `:read_ahead` - and `:delayed_write` are also useful when operating large files or + and `:delayed_write` are also useful when operating on large files or working with files in tight loops. - Check http://www.erlang.org/doc/man/file.html#open-2 for more information + Check [`:file.open/2`](http://www.erlang.org/doc/man/file.html#open-2) for more information about such options and other performance considerations. """ - alias :file, as: F - @type posix :: :file.posix() @type io_device :: :file.io_device() @type stat_options :: [time: :local | :universal | :posix] + @type mode :: :append | :binary | :charlist | :compressed | :delayed_write | :exclusive | + :raw | :read | :read_ahead | :sync | :utf8 | :write | + {:encoding, :latin1 | :unicode | :utf8 | :utf16 | :utf32 | + {:utf16, :big | :little} | {:utf32, :big | :little}} | + {:read_ahead, pos_integer} | + {:delayed_write, non_neg_integer, non_neg_integer} @doc """ Returns `true` if the path is a regular file. + This function follows symbolic links, so if a symbolic link points to a + regular file, `true` is returned. + ## Examples File.regular? __ENV__.file #=> true @@ -90,7 +103,28 @@ defmodule File do end @doc """ - Returns `true` if the path is a directory. + Returns `true` if the given path is a directory. + + This function follows symbolic links, so if a symbolic link points to a + directory, `true` is returned. + + ## Examples + + File.dir?("./test") + #=> true + + File.dir?("test") + #=> true + + File.dir?("/usr/bin") + #=> true + + File.dir?("~/Downloads") + #=> false + + "~/Downloads" |> Path.expand |> File.dir? + #=> true + """ @spec dir?(Path.t) :: boolean def dir?(path) do @@ -116,7 +150,7 @@ defmodule File do """ @spec exists?(Path.t) :: boolean def exists?(path) do - match?({:ok, _}, F.read_file_info(IO.chardata_to_string(path))) + match?({:ok, _}, :file.read_file_info(IO.chardata_to_string(path))) end @doc """ @@ -126,16 +160,16 @@ defmodule File do Typical error reasons are: * `:eacces` - missing search or write permissions for the parent - directories of `path` + directories of `path` * `:eexist` - there is already a file or directory named `path` * `:enoent` - a component of `path` does not exist - * `:enospc` - there is a no space left on the device + * `:enospc` - there is no space left on the device * `:enotdir` - a component of `path` is not a directory; - on some platforms, `:enoent` is returned instead + on some platforms, `:enoent` is returned instead """ @spec mkdir(Path.t) :: :ok | {:error, posix} def mkdir(path) do - F.make_dir(IO.chardata_to_string(path)) + :file.make_dir(IO.chardata_to_string(path)) end @doc """ @@ -143,11 +177,11 @@ defmodule File do """ @spec mkdir!(Path.t) :: :ok | no_return def mkdir!(path) do - path = IO.chardata_to_string(path) case mkdir(path) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "make directory", path: path + raise File.Error, reason: reason, action: "make directory", + path: IO.chardata_to_string(path) end end @@ -158,8 +192,8 @@ defmodule File do Typical error reasons are: * `:eacces` - missing search or write permissions for the parent - directories of `path` - * `:enospc` - there is a no space left on the device + directories of `path` + * `:enospc` - there is no space left on the device * `:enotdir` - a component of `path` is not a directory """ @spec mkdir_p(Path.t) :: :ok | {:error, posix} @@ -196,11 +230,11 @@ defmodule File do """ @spec mkdir_p!(Path.t) :: :ok | no_return def mkdir_p!(path) do - path = IO.chardata_to_string(path) case mkdir_p(path) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "make directory (with -p)", path: path + raise File.Error, reason: reason, action: "make directory (with -p)", + path: IO.chardata_to_string(path) end end @@ -212,31 +246,31 @@ defmodule File do * `:enoent` - the file does not exist * `:eacces` - missing permission for reading the file, - or for searching one of the parent directories + or for searching one of the parent directories * `:eisdir` - the named file is a directory * `:enotdir` - a component of the file name is not a directory; - on some platforms, `:enoent` is returned instead + on some platforms, `:enoent` is returned instead * `:enomem` - there is not enough memory for the contents of the file You can use `:file.format_error/1` to get a descriptive string of the error. """ @spec read(Path.t) :: {:ok, binary} | {:error, posix} def read(path) do - F.read_file(IO.chardata_to_string(path)) + :file.read_file(IO.chardata_to_string(path)) end @doc """ - Returns binary with the contents of the given filename or raises + Returns a binary with the contents of the given filename or raises `File.Error` if an error occurs. """ @spec read!(Path.t) :: binary | no_return def read!(path) do - path = IO.chardata_to_string(path) case read(path) do {:ok, binary} -> binary {:error, reason} -> - raise File.Error, reason: reason, action: "read file", path: path + raise File.Error, reason: reason, action: "read file", + path: IO.chardata_to_string(path) end end @@ -250,12 +284,20 @@ defmodule File do The accepted options are: - * `:time` - `:local | :universal | :posix`; default: `:local` + * `:time` - configures how the file timestamps are returned + + The values for `:time` can be: + + * `:universal` - returns a `{date, time}` tuple in UTC (default) + * `:local` - returns a `{date, time}` tuple using the same time zone as the + machine + * `:posix` - returns the time as integer seconds since epoch """ @spec stat(Path.t, stat_options) :: {:ok, File.Stat.t} | {:error, posix} def stat(path, opts \\ []) do - case F.read_file_info(IO.chardata_to_string(path), opts) do + opts = Keyword.put_new(opts, :time, :universal) + case :file.read_file_info(IO.chardata_to_string(path), opts) do {:ok, fileinfo} -> {:ok, File.Stat.from_record(fileinfo)} error -> @@ -264,16 +306,99 @@ defmodule File do end @doc """ - Same as `stat/2` but returns the `File.Stat` directly and + Same as `stat/2` but returns the `File.Stat` directly, or throws `File.Error` if an error is returned. """ @spec stat!(Path.t, stat_options) :: File.Stat.t | no_return def stat!(path, opts \\ []) do - path = IO.chardata_to_string(path) case stat(path, opts) do {:ok, info} -> info {:error, reason} -> - raise File.Error, reason: reason, action: "read file stats", path: path + raise File.Error, reason: reason, action: "read file stats", + path: IO.chardata_to_string(path) + end + end + + @doc """ + Returns information about the `path`. If the file is a symlink, sets + the `type` to `:symlink` and returns a `File.Stat` struct for the link. For any + other file, returns exactly the same values as `stat/2`. + + For more details, see [`:file.read_link_info/2`](http://www.erlang.org/doc/man/file.html#read_link_info-2). + + ## Options + + The accepted options are: + + * `:time` - configures how the file timestamps are returned + + The values for `:time` can be: + + * `:universal` - returns a `{date, time}` tuple in UTC (default) + * `:local` - returns a `{date, time}` tuple using the machine time + * `:posix` - returns the time as integer seconds since epoch + + """ + @spec lstat(Path.t, stat_options) :: {:ok, File.Stat.t} | {:error, posix} + def lstat(path, opts \\ []) do + opts = Keyword.put_new(opts, :time, :universal) + case :file.read_link_info(IO.chardata_to_string(path), opts) do + {:ok, fileinfo} -> + {:ok, File.Stat.from_record(fileinfo)} + error -> + error + end + end + + @doc """ + Same as `lstat/2` but returns the `File.Stat` struct directly, or + throws `File.Error` if an error is returned. + """ + @spec lstat!(Path.t, stat_options) :: File.Stat.t | no_return + def lstat!(path, opts \\ []) do + case lstat(path, opts) do + {:ok, info} -> info + {:error, reason} -> + raise File.Error, reason: reason, action: "read file stats", + path: IO.chardata_to_string(path) + end + end + + @doc """ + Reads the symbolic link at `path`. + + If `path` exists and is a symlink, returns `{:ok, target}`, otherwise returns + `{:error, reason}`. + + For more details, see + [`:file.read_link/1`](http://erlang.org/doc/man/file.html#read_link-1). + + Typical error reasons are: + + * `:einval` - path is not a symbolic link + * `:enoent` - path does not exist + * `:enotsup` - symbolic links are not supported on the current platform + + """ + @spec read_link(Path.t) :: {:ok, binary} | {:error, posix} + def read_link(path) do + case path |> IO.chardata_to_string |> :file.read_link do + {:ok, target} -> {:ok, IO.chardata_to_string(target)} + error -> error + end + end + + @doc """ + Same as `read_link/1` but returns the target directly or throws `File.Error` if an error is + returned. + """ + @spec read_link!(Path.t) :: binary | no_return + def read_link!(path) do + case read_link(path) do + {:ok, resolved} -> + resolved + {:error, reason} -> + raise File.Error, reason: reason, action: "read link", path: IO.chardata_to_string(path) end end @@ -283,7 +408,8 @@ defmodule File do """ @spec write_stat(Path.t, File.Stat.t, stat_options) :: :ok | {:error, posix} def write_stat(path, stat, opts \\ []) do - F.write_file_info(IO.chardata_to_string(path), File.Stat.to_record(stat), opts) + opts = Keyword.put_new(opts, :time, :universal) + :file.write_file_info(IO.chardata_to_string(path), File.Stat.to_record(stat), opts) end @doc """ @@ -292,41 +418,74 @@ defmodule File do """ @spec write_stat!(Path.t, File.Stat.t, stat_options) :: :ok | no_return def write_stat!(path, stat, opts \\ []) do - path = IO.chardata_to_string(path) case write_stat(path, stat, opts) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "write file stats", path: path + raise File.Error, reason: reason, action: "write file stats", + path: IO.chardata_to_string(path) end end @doc """ Updates modification time (mtime) and access time (atime) of - the given file. File is created if it doesn’t exist. + the given file. + + The file is created if it doesn’t exist. Requires datetime in UTC. """ @spec touch(Path.t, :calendar.datetime) :: :ok | {:error, posix} - def touch(path, time \\ :calendar.local_time) do + def touch(path, time \\ :calendar.universal_time) do path = IO.chardata_to_string(path) - case F.change_time(path, time) do - {:error, :enoent} -> - write(path, "") - F.change_time(path, time) - other -> - other + case :elixir_utils.change_universal_time(path, time) do + {:error, :enoent} -> touch_new(path, time) + other -> other + end + end + + defp touch_new(path, time) do + case write(path, "", [:append]) do + :ok -> :elixir_utils.change_universal_time(path, time) + {:error, _reason} = error -> error end end @doc """ Same as `touch/2` but raises an exception if it fails. - Returns `:ok` otherwise. + + Returns `:ok` otherwise. Requires datetime in UTC. """ @spec touch!(Path.t, :calendar.datetime) :: :ok | no_return - def touch!(path, time \\ :calendar.local_time) do - path = IO.chardata_to_string(path) + def touch!(path, time \\ :calendar.universal_time) do case touch(path, time) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "touch", path: path + raise File.Error, reason: reason, action: "touch", + path: IO.chardata_to_string(path) + end + end + + @doc """ + Creates a hard link `new` to the file `existing`. + + Returns `:ok` if successful, `{:error, reason}` otherwise. + If the operating system does not support hard links, returns + `{:error, :enotsup}`. + """ + def ln(existing, new) do + :file.make_link(IO.chardata_to_string(existing), IO.chardata_to_string(new)) + end + + @doc """ + Same as `ln/2` but raises an exception if it fails. + + Returns `:ok` otherwise + """ + def ln!(existing, new) do + case ln(existing, new) do + :ok -> :ok + {:error, reason} -> + raise File.LinkError, reason: reason, action: "create hard link", + existing: IO.chardata_to_string(existing), + new: IO.chardata_to_string(new) end end @@ -338,13 +497,28 @@ defmodule File do `{:error, :enotsup}`. """ def ln_s(existing, new) do - F.make_symlink(existing, new) + :file.make_symlink(IO.chardata_to_string(existing), IO.chardata_to_string(new)) + end + + @doc """ + Same as `ln_s/2` but raises an exception if it fails. + + Returns `:ok` otherwise + """ + def ln_s!(existing, new) do + case ln_s(existing, new) do + :ok -> :ok + {:error, reason} -> + raise File.LinkError, reason: reason, action: "create symlink", + existing: IO.chardata_to_string(existing), + new: IO.chardata_to_string(new) + end end @doc """ Copies the contents of `source` to `destination`. - Both parameters can be a filename or an io device opened + Both parameters can be a filename or an IO device opened with `open/2`. `bytes_count` specifies the number of bytes to copy, the default being `:infinity`. @@ -363,38 +537,62 @@ defmodule File do Typical error reasons are the same as in `open/2`, `read/1` and `write/3`. """ - @spec copy(Path.t, Path.t, pos_integer | :infinity) :: {:ok, non_neg_integer} | {:error, posix} + @spec copy(Path.t | io_device, Path.t | io_device, pos_integer | :infinity) :: {:ok, non_neg_integer} | {:error, posix} def copy(source, destination, bytes_count \\ :infinity) do - F.copy(IO.chardata_to_string(source), IO.chardata_to_string(destination), bytes_count) + :file.copy(maybe_to_string(source), maybe_to_string(destination), bytes_count) end @doc """ The same as `copy/3` but raises an `File.CopyError` if it fails. Returns the `bytes_copied` otherwise. """ - @spec copy!(Path.t, Path.t, pos_integer | :infinity) :: non_neg_integer | no_return + @spec copy!(Path.t | io_device, Path.t | io_device, pos_integer | :infinity) :: non_neg_integer | no_return def copy!(source, destination, bytes_count \\ :infinity) do - source = IO.chardata_to_string(source) - destination = IO.chardata_to_string(destination) case copy(source, destination, bytes_count) do {:ok, bytes_count} -> bytes_count {:error, reason} -> raise File.CopyError, reason: reason, action: "copy", - source: source, destination: destination + source: maybe_to_string(source), + destination: maybe_to_string(destination) end end + @doc """ + Renames the `source` file to `destination` file. It can be used to move files + (and directories) between directories. If moving a file, you must fully + specify the `destination` filename, it is not sufficient to simply specify + its directory. + + Returns `:ok` in case of success, `{:error, reason}` otherwise. + + Note: The command `mv` in Unix systems behaves differently depending + if `source` is a file and the `destination` is an existing directory. + We have chosen to explicitly disallow this behaviour. + + ## Examples + + # Rename file "a.txt" to "b.txt" + File.rename "a.txt", "b.txt" + + # Rename directory "samples" to "tmp" + File.rename "samples", "tmp" + """ + @spec rename(Path.t, Path.t) :: :ok | {:error, posix} + def rename(source, destination) do + :file.rename(source, destination) + end + @doc """ Copies the contents in `source` to `destination` preserving its mode. If a file already exists in the destination, it invokes a callback which should return `true` if the existing file - should be overwritten, `false` otherwise. It defaults to return `true`. + should be overwritten, `false` otherwise. The callback defaults to return `true`. - It returns `:ok` in case of success, returns + The function returns `:ok` in case of success, returns `{:error, reason}` otherwise. - If you want to copy contents from an io device to another device + If you want to copy contents from an IO device to another device or do a straight copy from a source to a destination without preserving modes, check `copy/3` instead. @@ -414,20 +612,24 @@ defmodule File do end end + defp path_differs?(path, path), + do: false + + defp path_differs?(p1, p2) do + Path.expand(p1) !== Path.expand(p2) + end + @doc """ The same as `cp/3`, but raises `File.CopyError` if it fails. - Returns the list of copied files otherwise. + Returns `:ok` otherwise. """ @spec cp!(Path.t, Path.t, (Path.t, Path.t -> boolean)) :: :ok | no_return def cp!(source, destination, callback \\ fn(_, _) -> true end) do - source = IO.chardata_to_string(source) - destination = IO.chardata_to_string(destination) - case cp(source, destination, callback) do :ok -> :ok {:error, reason} -> - raise File.CopyError, reason: reason, action: "copy recursively", - source: source, destination: destination + raise File.CopyError, reason: reason, action: "copy", + source: IO.chardata_to_string(source), destination: IO.chardata_to_string(destination) end end @@ -438,23 +640,22 @@ defmodule File do `destination`. If the source is a directory, it copies the contents inside source into the destination. - If a file already exists in the destination, - it invokes a callback which should return - `true` if the existing file should be overwritten, - `false` otherwise. It defaults to return `true`. + If a file already exists in the destination, it invokes `callback`. + `callback` must be a function that takes two arguments: `source` and `destination`. + The callback should return `true` if the existing file should be overwritten and `false` otherwise. If a directory already exists in the destination - where a file is meant to be (or otherwise), this + where a file is meant to be (or vice versa), this function will fail. This function may fail while copying files, in such cases, it will leave the destination - directory in a dirty state, where already - copied files won't be removed. + directory in a dirty state, where file which have already been copied + won't be removed. - It returns `{:ok, files_and_directories}` in case of - success with all files and directories copied in no - specific order, `{:error, reason, file}` otherwise. + The function returns `{:ok, files_and_directories}` in case of + success, `files_and_directories` lists all files and directories copied in no + specific order. It returns `{:error, reason, file}` otherwise. Note: The command `cp` in Unix systems behaves differently depending if `destination` is an existing directory or not. @@ -462,20 +663,20 @@ defmodule File do ## Examples - # Copies "a.txt" to "tmp" - File.cp_r "a.txt", "tmp.txt" + # Copies file "a.txt" to "b.txt" + File.cp_r "a.txt", "b.txt" # Copies all files in "samples" to "tmp" File.cp_r "samples", "tmp" # Same as before, but asks the user how to proceed in case of conflicts File.cp_r "samples", "tmp", fn(source, destination) -> - IO.gets("Overwriting #{destination} by #{source}. Type y to confirm.") == "y" + IO.gets("Overwriting #{destination} by #{source}. Type y to confirm. ") == "y\n" end """ @spec cp_r(Path.t, Path.t, (Path.t, Path.t -> boolean)) :: {:ok, [binary]} | {:error, posix, binary} - def cp_r(source, destination, callback \\ fn(_, _) -> true end) when is_function(callback) do + def cp_r(source, destination, callback \\ fn(_, _) -> true end) when is_function(callback, 2) do source = IO.chardata_to_string(source) destination = IO.chardata_to_string(destination) @@ -491,14 +692,11 @@ defmodule File do """ @spec cp_r!(Path.t, Path.t, (Path.t, Path.t -> boolean)) :: [binary] | no_return def cp_r!(source, destination, callback \\ fn(_, _) -> true end) do - source = IO.chardata_to_string(source) - destination = IO.chardata_to_string(destination) - case cp_r(source, destination, callback) do {:ok, files} -> files {:error, reason, file} -> - raise File.CopyError, reason: reason, action: "copy recursively", - source: source, destination: destination, on: file + raise File.CopyError, reason: reason, action: "copy recursively", on: file, + source: IO.chardata_to_string(source), destination: IO.chardata_to_string(destination) end end @@ -509,16 +707,16 @@ defmodule File do {:ok, :regular} -> do_cp_file(src, dest, callback, acc) {:ok, :symlink} -> - case F.read_link(src) do + case :file.read_link(src) do {:ok, link} -> do_cp_link(link, src, dest, callback, acc) {:error, reason} -> {:error, reason, src} end {:ok, :directory} -> - case F.list_dir(src) do + case :file.list_dir(src) do {:ok, files} -> case mkdir(dest) do success when success in [:ok, {:error, :eexist}] -> - Enum.reduce(files, [dest|acc], fn(x, acc) -> + Enum.reduce(files, [dest | acc], fn(x, acc) -> do_cp_r(Path.join(src, x), Path.join(dest, x), callback, acc) end) {:error, reason} -> {:error, reason, dest} @@ -542,17 +740,16 @@ defmodule File do # Both src and dest are files. defp do_cp_file(src, dest, callback, acc) do - case F.copy(src, {dest, [:exclusive]}) do + case :file.copy(src, {dest, [:exclusive]}) do {:ok, _} -> copy_file_mode!(src, dest) - [dest|acc] + [dest | acc] {:error, :eexist} -> - if callback.(src, dest) do - rm(dest) + if path_differs?(src, dest) and callback.(src, dest) do case copy(src, dest) do {:ok, _} -> copy_file_mode!(src, dest) - [dest|acc] + [dest | acc] {:error, reason} -> {:error, reason, src} end else @@ -564,14 +761,15 @@ defmodule File do # Both src and dest are files. defp do_cp_link(link, src, dest, callback, acc) do - case F.make_symlink(link, dest) do + case :file.make_symlink(link, dest) do :ok -> - [dest|acc] + [dest | acc] {:error, :eexist} -> - if callback.(src, dest) do - rm(dest) - case F.make_symlink(link, dest) do - :ok -> [dest|acc] + if path_differs?(src, dest) and callback.(src, dest) do + # If rm/1 fails, :file.make_symlink/2 will fail + _ = rm(dest) + case :file.make_symlink(link, dest) do + :ok -> [dest | acc] {:error, reason} -> {:error, reason, src} end else @@ -588,39 +786,44 @@ defmodule File do contents are overwritten. Returns `:ok` if successful, or `{:error, reason}` if an error occurs. + `content` must be `iodata` (a list of bytes or a binary). Setting the + encoding for this function has no effect. + **Warning:** Every time this function is invoked, a file descriptor is opened and a new process is spawned to write to the file. For this reason, if you are doing multiple writes in a loop, opening the file via `File.open/2` and using the functions in `IO` to write to the file will yield much better performance - then calling this function multiple times. + than calling this function multiple times. Typical error reasons are: * `:enoent` - a component of the file name does not exist * `:enotdir` - a component of the file name is not a directory; - on some platforms, enoent is returned instead - * `:enospc` - there is a no space left on the device + on some platforms, `:enoent` is returned instead + * `:enospc` - there is no space left on the device * `:eacces` - missing permission for writing the file or searching one of - the parent directories + the parent directories * `:eisdir` - the named file is a directory Check `File.open/2` for other available options. """ - @spec write(Path.t, iodata, list) :: :ok | {:error, posix} + @spec write(Path.t, iodata, [mode]) :: :ok | {:error, posix} def write(path, content, modes \\ []) do - F.write_file(IO.chardata_to_string(path), content, modes) + modes = normalize_modes(modes, false) + :file.write_file(IO.chardata_to_string(path), content, modes) end @doc """ Same as `write/3` but raises an exception if it fails, returns `:ok` otherwise. """ - @spec write!(Path.t, iodata, list) :: :ok | no_return + @spec write!(Path.t, iodata, [mode]) :: :ok | no_return def write!(path, content, modes \\ []) do - path = IO.chardata_to_string(path) - case F.write_file(path, content, modes) do + modes = normalize_modes(modes, false) + case :file.write_file(path, content, modes) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "write to file", path: path + raise File.Error, reason: reason, action: "write to file", + path: IO.chardata_to_string(path) end end @@ -628,6 +831,7 @@ defmodule File do Tries to delete the file `path`. Returns `:ok` if successful, or `{:error, reason}` if an error occurs. + Note the file is deleted even if in read-only mode. Typical error reasons are: @@ -636,22 +840,22 @@ defmodule File do * `:eacces` - missing permission for the file or one of its parents * `:eperm` - the file is a directory and user is not super-user * `:enotdir` - a component of the file name is not a directory; - on some platforms, enoent is returned instead + on some platforms, `:enoent` is returned instead * `:einval` - filename had an improper type, such as tuple ## Examples - File.rm('file.txt') + File.rm("file.txt") #=> :ok - File.rm('tmp_dir/') + File.rm("tmp_dir/") #=> {:error, :eperm} """ @spec rm(Path.t) :: :ok | {:error, posix} def rm(path) do path = IO.chardata_to_string(path) - case F.delete(path) do + case :file.delete(path) do :ok -> :ok {:error, :eacces} = e -> @@ -663,26 +867,32 @@ defmodule File do defp change_mode_windows(path) do if match? {:win32, _}, :os.type do - case F.read_file_info(IO.chardata_to_string(path)) do + case :file.read_file_info(path) do {:ok, file_info} when elem(file_info, 3) in [:read, :none] -> - File.chmod(path, (elem(file_info, 7) + 0200)) - F.delete(path) + change_mode_windows(path, file_info) _ -> nil end end end + defp change_mode_windows(path, file_info) do + case chmod(path, (elem(file_info, 7) + 0o200)) do + :ok -> :file.delete(path) + {:error, _reason} = error -> error + end + end + @doc """ Same as `rm/1`, but raises an exception in case of failure. Otherwise `:ok`. """ @spec rm!(Path.t) :: :ok | no_return def rm!(path) do - path = IO.chardata_to_string(path) case rm(path) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "remove file", path: path + raise File.Error, reason: reason, action: "remove file", + path: IO.chardata_to_string(path) end end @@ -701,7 +911,7 @@ defmodule File do """ @spec rmdir(Path.t) :: :ok | {:error, posix} def rmdir(path) do - F.del_dir(IO.chardata_to_string(path)) + :file.del_dir(IO.chardata_to_string(path)) end @doc """ @@ -709,16 +919,16 @@ defmodule File do """ @spec rmdir!(Path.t) :: :ok | {:error, posix} def rmdir!(path) do - path = IO.chardata_to_string(path) case rmdir(path) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "remove directory", path: path + raise File.Error, reason: reason, action: "remove directory", + path: IO.chardata_to_string(path) end end @doc """ - Remove files and directories recursively at the given `path`. + Removes files and directories recursively at the given `path`. Symlinks are not followed but simply removed, non-existing files are simply ignored (i.e. doesn't make this function fail). @@ -751,7 +961,7 @@ defmodule File do case res do {:ok, acc} -> case rmdir(path) do - :ok -> {:ok, [path|acc]} + :ok -> {:ok, [path | acc]} {:error, :enoent} -> res {:error, reason} -> {:error, reason, path} end @@ -771,19 +981,19 @@ defmodule File do defp do_rm_regular(path, {:ok, acc} = entry) do case rm(path) do - :ok -> {:ok, [path|acc]} + :ok -> {:ok, [path | acc]} {:error, :enoent} -> entry {:error, reason} -> {:error, reason, path} end end - # On windows, symlinks are treated as directory and must be removed + # On Windows, symlinks are treated as directory and must be removed # with rmdir/1. But on Unix, we remove them via rm/1. So we first try # to remove it as a directory and, if we get :enotdir, we fallback to # a file removal. defp do_rm_directory(path, {:ok, acc} = entry) do case rmdir(path) do - :ok -> {:ok, [path|acc]} + :ok -> {:ok, [path | acc]} {:error, :enotdir} -> do_rm_regular(path, entry) {:error, :enoent} -> entry {:error, reason} -> {:error, reason, path} @@ -798,7 +1008,7 @@ defmodule File do _ -> {:ok, :regular} end {:ok, :directory} -> - F.list_dir(path) + :file.list_dir(path) {:ok, _} -> {:ok, :regular} {:error, reason} -> @@ -812,27 +1022,35 @@ defmodule File do """ @spec rm_rf!(Path.t) :: [binary] | no_return def rm_rf!(path) do - path = IO.chardata_to_string(path) case rm_rf(path) do {:ok, files} -> files {:error, reason, _} -> - raise File.Error, reason: reason, path: path, + raise File.Error, reason: reason, path: IO.chardata_to_string(path), action: "remove files and directories recursively from" end end @doc ~S""" - Opens the given `path` according to the given list of modes. + Opens the given `path`. In order to write and read files, one must use the functions - in the `IO` module. By default, a file is opened in binary mode + in the `IO` module. By default, a file is opened in `:binary` mode, which requires the functions `IO.binread/2` and `IO.binwrite/2` to interact with the file. A developer may pass `:utf8` as an option when opening the file and then all other functions from `IO` are available, since they work directly with Unicode data. + `modes_or_function` can either be a list of modes or a function. If it's a + list, it's considered to be a list of modes (that are documented below). If + it's a function, then it's equivalent to calling `open(path, [], + modes_or_function)`. See the documentation for `open/3` for more information + on this function. + The allowed modes: + * `:binary` - opens the file in binary mode, disabling special handling of unicode sequences + (default mode). + * `:read` - the file, which must exist, is opened for reading. * `:write` - the file is opened for writing. It is created if it does not @@ -848,8 +1066,8 @@ defmodule File do * `:exclusive` - the file, when opened for writing, is created if it does not exist. If the file exists, open will return `{:error, :eexist}`. - * `:char_list` - when this term is given, read operations on the file will - return char lists rather than binaries. + * `:charlist` - when this term is given, read operations on the file will + return charlists rather than binaries. * `:compressed` - makes it possible to read or write gzip compressed files. @@ -859,21 +1077,22 @@ defmodule File do * `:utf8` - this option denotes how data is actually stored in the disk file and makes the file perform automatic translation of characters to - and from utf-8. + and from UTF-8. If data is sent to a file in a format that cannot be converted to the - utf-8 or if data is read by a function that returns data in a format that + UTF-8 or if data is read by a function that returns data in a format that cannot cope with the character range of the data, an error occurs and the file will be closed. - Check http://www.erlang.org/doc/man/file.html#open-2 for more information about - other options like `:read_ahead` and `:delayed_write`. + * `:delayed_write`, `:raw`, `:ram`, `:read_ahead`, `:sync`, `{:encoding, ...}`, + `{:read_ahead, pos_integer}`, `{:delayed_write, non_neg_integer, non_neg_integer}` - + for more information about these options see [`:file.open/2`](http://www.erlang.org/doc/man/file.html#open-2). This function returns: * `{:ok, io_device}` - the file has been opened in the requested mode. - `io_device` is actually the pid of the process which handles the file. + `io_device` is actually the PID of the process which handles the file. This process is linked to the process which originally opened the file. If any process to which the `io_device` is linked terminates, the file will be closed and the process itself will be terminated. @@ -890,25 +1109,26 @@ defmodule File do File.close(file) """ - @spec open(Path.t, list) :: {:ok, io_device} | {:error, posix} - def open(path, modes \\ []) + @spec open(Path.t, [mode | :ram]) :: {:ok, io_device} | {:error, posix} + @spec open(Path.t, (io_device -> res)) :: {:ok, res} | {:error, posix} when res: var + def open(path, modes_or_function \\ []) def open(path, modes) when is_list(modes) do - F.open(IO.chardata_to_string(path), open_defaults(modes, true)) + :file.open(IO.chardata_to_string(path), normalize_modes(modes, true)) end - def open(path, function) when is_function(function) do + def open(path, function) when is_function(function, 1) do open(path, [], function) end @doc """ - Similar to `open/2` but expects a function as last argument. + Similar to `open/2` but expects a function as its last argument. - The file is opened, given to the function as argument and + The file is opened, given to the function as an argument and automatically closed after the function returns, regardless if there was an error when executing the function. - It returns `{:ok, function_result}` in case of success, + Returns `{:ok, function_result}` in case of success, `{:error, reason}` otherwise. This function expects the file to be closed with success, @@ -922,47 +1142,53 @@ defmodule File do IO.read(file, :line) end) + See `open/2` for the list of available `modes`. """ - @spec open(Path.t, list, (io_device -> res)) :: {:ok, res} | {:error, posix} when res: var - def open(path, modes, function) do + @spec open(Path.t, [mode | :ram], (io_device -> res)) :: {:ok, res} | {:error, posix} when res: var + def open(path, modes, function) when is_list(modes) and is_function(function, 1) do case open(path, modes) do - {:ok, device} -> + {:ok, io_device} -> try do - {:ok, function.(device)} + {:ok, function.(io_device)} after - :ok = close(device) + :ok = close(io_device) end other -> other end end @doc """ - Same as `open/2` but raises an error if file could not be opened. + Similar to `open/2` but raises an error if file could not be opened. - Returns the `io_device` otherwise. + Returns the IO device otherwise. + + See `open/2` for the list of available modes. """ - @spec open!(Path.t, list) :: io_device | no_return - def open!(path, modes \\ []) do - path = IO.chardata_to_string(path) - case open(path, modes) do - {:ok, device} -> device + @spec open!(Path.t, [mode | :ram]) :: io_device | no_return + @spec open!(Path.t, (io_device -> res)) :: res | no_return when res: var + def open!(path, modes_or_function \\ []) do + case open(path, modes_or_function) do + {:ok, io_device_or_function_result} -> + io_device_or_function_result {:error, reason} -> - raise File.Error, reason: reason, action: "open", path: path + raise File.Error, reason: reason, action: "open", path: IO.chardata_to_string(path) end end @doc """ - Same as `open/3` but raises an error if file could not be opened. + Similar to `open/3` but raises an error if file could not be opened. + + If it succeeds opening the file, it returns the `function` result on the IO device. - Returns the function result otherwise. + See `open/2` for the list of available `modes`. """ - @spec open!(Path.t, list, (io_device -> res)) :: res | no_return when res: var + @spec open!(Path.t, [mode | :ram], (io_device -> res)) :: res | no_return when res: var def open!(path, modes, function) do - path = IO.chardata_to_string(path) case open(path, modes, function) do - {:ok, device} -> device + {:ok, function_result} -> + function_result {:error, reason} -> - raise File.Error, reason: reason, action: "open", path: path + raise File.Error, reason: reason, action: "open", path: IO.chardata_to_string(path) end end @@ -970,27 +1196,36 @@ defmodule File do Gets the current working directory. In rare circumstances, this function can fail on Unix. It may happen - if read permission does not exist for the parent directories of the + if read permissions do not exist for the parent directories of the current directory. For this reason, returns `{:ok, cwd}` in case of success, `{:error, reason}` otherwise. """ @spec cwd() :: {:ok, binary} | {:error, posix} def cwd() do - case F.get_cwd do - {:ok, base} -> {:ok, IO.chardata_to_string(base)} + case :file.get_cwd do + {:ok, base} -> {:ok, IO.chardata_to_string(fix_drive_letter(base))} {:error, _} = error -> error end end + defp fix_drive_letter([l, ?:, ?/ | rest] = original) when l in ?A..?Z do + case :os.type() do + {:win32, _} -> [l + ?a - ?A, ?:, ?/ | rest] + _ -> original + end + end + + defp fix_drive_letter(original), do: original + @doc """ The same as `cwd/0`, but raises an exception if it fails. """ @spec cwd!() :: binary | no_return def cwd!() do - case F.get_cwd do - {:ok, cwd} -> IO.chardata_to_string(cwd) + case cwd() do + {:ok, cwd} -> cwd {:error, reason} -> - raise File.Error, reason: reason, action: "get current working directory" + raise File.Error, reason: reason, action: "get current working directory" end end @@ -1001,7 +1236,7 @@ defmodule File do """ @spec cd(Path.t) :: :ok | {:error, posix} def cd(path) do - F.set_cwd(IO.chardata_to_string(path)) + :file.set_cwd(IO.chardata_to_string(path)) end @doc """ @@ -1009,25 +1244,25 @@ defmodule File do """ @spec cd!(Path.t) :: :ok | no_return def cd!(path) do - path = IO.chardata_to_string(path) - case F.set_cwd(path) do + case cd(path) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "set current working directory to", path: path + raise File.Error, reason: reason, action: "set current working directory to", + path: IO.chardata_to_string(path) end end @doc """ Changes the current directory to the given `path`, - executes the given function and then revert back - to the previous path regardless if there is an exception. + executes the given function and then reverts back + to the previous path regardless of whether there is an exception. Raises an error if retrieving or changing the current directory fails. """ @spec cd!(Path.t, (() -> res)) :: res | no_return when res: var def cd!(path, function) do - old = cwd! + old = cwd!() cd!(path) try do function.() @@ -1037,14 +1272,14 @@ defmodule File do end @doc """ - Returns list of files in the given directory. + Returns the list of files in the given directory. - It returns `{:ok, [files]}` in case of success, + Returns `{:ok, [files]}` in case of success, `{:error, reason}` otherwise. """ @spec ls(Path.t) :: {:ok, [binary]} | {:error, posix} def ls(path \\ ".") do - case F.list_dir(IO.chardata_to_string(path)) do + case :file.list_dir(IO.chardata_to_string(path)) do {:ok, file_list} -> {:ok, Enum.map(file_list, &IO.chardata_to_string/1)} {:error, _} = error -> error end @@ -1056,11 +1291,11 @@ defmodule File do """ @spec ls!(Path.t) :: [binary] | no_return def ls!(path \\ ".") do - path = IO.chardata_to_string(path) case ls(path) do {:ok, value} -> value {:error, reason} -> - raise File.Error, reason: reason, action: "list directory", path: path + raise File.Error, reason: reason, action: "list directory", + path: IO.chardata_to_string(path) end end @@ -1070,11 +1305,11 @@ defmodule File do Note that if the option `:delayed_write` was used when opening the file, `close/1` might return an old write error and not even try to close the file. - See `open/2`. + See `open/2` for more information. """ @spec close(io_device) :: :ok | {:error, posix | :badarg | :terminated} def close(io_device) do - F.close(io_device) + :file.close(io_device) end @doc """ @@ -1083,13 +1318,13 @@ defmodule File do The stream implements both `Enumerable` and `Collectable` protocols, which means it can be used both for read and write. - The `line_or_byte` argument configures how the file is read when + The `line_or_bytes` argument configures how the file is read when streaming, by `:line` (default) or by a given number of bytes. Operating the stream can fail on open for the same reasons as - `File.open!/2`. Note that the file is automatically opened only and - every time streaming begins. There is no need to pass `:read` and - `:write` modes, as those are automatically set by Elixir. + `File.open!/2`. Note that the file is automatically opened each time streaming + begins. There is no need to pass `:read` and `:write` modes, as those are + automatically set by Elixir. ## Raw files @@ -1097,59 +1332,98 @@ defmodule File do device cannot be shared and as such it is convenient to open the file in raw mode for performance reasons. Therefore, Elixir **will** open streams in `:raw` mode with the `:read_ahead` option unless an encoding - is specified. + is specified. This means any data streamed into the file must be + converted to `t:iodata/0` type. If you pass `[:utf8]` in the modes parameter, + the underlying stream will use `IO.write/2` and the `String.Chars` protocol + to convert the data. See `IO.binwrite/2` and `IO.write/2` . One may also consider passing the `:delayed_write` option if the stream is meant to be written to under a tight loop. + + ## Byte order marks + + If you pass `:trim_bom` in the modes parameter, the stream will + trim UTF-8, UTF-16 and UTF-32 byte order marks when reading from file. + + ## Examples + + # Read in 2048 byte chunks rather than lines + File.stream!("./test/test.data", [], 2048) + #=> %File.Stream{line_or_bytes: 2048, modes: [:raw, :read_ahead, :binary], + #=> path: "./test/test.data", raw: true} + + See `Stream.run/1` for an example of streaming into a file. + """ def stream!(path, modes \\ [], line_or_bytes \\ :line) do - modes = open_defaults(modes, true) + modes = normalize_modes(modes, true) File.Stream.__build__(IO.chardata_to_string(path), modes, line_or_bytes) end @doc """ - Changes the unix file `mode` for a given `file`. - Returns `:ok` on success, or `{:error, reason}` - on failure. + Changes the `mode` for a given `file`. + + Returns `:ok` on success, or `{:error, reason}` on failure. + + ## Permissions + + File permissions are specified by adding together the following octal flags: + + * `0o400` - read permission: owner + * `0o200` - write permission: owner + * `0o100` - execute permission: owner + + * `0o040` - read permission: group + * `0o020` - write permission: group + * `0o010` - execute permission: group + + * `0o004` - read permission: other + * `0o002` - write permission: other + * `0o001` - execute permission: other + + For example, setting the mode `0o755` gives it + write, read and execute permission to the owner + and both read and execute permission to group + and others. """ - @spec chmod(Path.t, integer) :: :ok | {:error, posix} + @spec chmod(Path.t, non_neg_integer) :: :ok | {:error, posix} def chmod(path, mode) do - F.change_mode(IO.chardata_to_string(path), mode) + :file.change_mode(IO.chardata_to_string(path), mode) end @doc """ Same as `chmod/2`, but raises an exception in case of failure. Otherwise `:ok`. """ - @spec chmod!(Path.t, integer) :: :ok | no_return + @spec chmod!(Path.t, non_neg_integer) :: :ok | no_return def chmod!(path, mode) do - path = IO.chardata_to_string(path) case chmod(path, mode) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "change mode for", path: path + raise File.Error, reason: reason, action: "change mode for", + path: IO.chardata_to_string(path) end end @doc """ - Changes the user group given by the group id `gid` + Changes the group given by the group id `gid` for a given `file`. Returns `:ok` on success, or `{:error, reason}` on failure. """ - @spec chgrp(Path.t, integer) :: :ok | {:error, posix} + @spec chgrp(Path.t, non_neg_integer) :: :ok | {:error, posix} def chgrp(path, gid) do - F.change_group(IO.chardata_to_string(path), gid) + :file.change_group(IO.chardata_to_string(path), gid) end @doc """ Same as `chgrp/2`, but raises an exception in case of failure. Otherwise `:ok`. """ - @spec chgrp!(Path.t, integer) :: :ok | no_return + @spec chgrp!(Path.t, non_neg_integer) :: :ok | no_return def chgrp!(path, gid) do - path = IO.chardata_to_string(path) case chgrp(path, gid) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "change group for", path: path + raise File.Error, reason: reason, action: "change group for", + path: IO.chardata_to_string(path) end end @@ -1158,44 +1432,51 @@ defmodule File do for a given `file`. Returns `:ok` on success, or `{:error, reason}` on failure. """ - @spec chown(Path.t, integer) :: :ok | {:error, posix} + @spec chown(Path.t, non_neg_integer) :: :ok | {:error, posix} def chown(path, uid) do - F.change_owner(IO.chardata_to_string(path), uid) + :file.change_owner(IO.chardata_to_string(path), uid) end @doc """ Same as `chown/2`, but raises an exception in case of failure. Otherwise `:ok`. """ - @spec chown!(Path.t, integer) :: :ok | no_return + @spec chown!(Path.t, non_neg_integer) :: :ok | no_return def chown!(path, uid) do - path = IO.chardata_to_string(path) case chown(path, uid) do :ok -> :ok {:error, reason} -> - raise File.Error, reason: reason, action: "change owner for", path: path + raise File.Error, reason: reason, action: "change owner for", + path: IO.chardata_to_string(path) end end ## Helpers - @read_ahead 64*1024 + @read_ahead_size 64 * 1024 - defp open_defaults([:char_list|t], _add_binary) do - open_defaults(t, false) + defp normalize_modes([:utf8 | rest], binary?) do + [encoding: :utf8] ++ normalize_modes(rest, binary?) end - - defp open_defaults([:utf8|t], add_binary) do - open_defaults([{:encoding, :utf8}|t], add_binary) + defp normalize_modes([:read_ahead | rest], binary?) do + [read_ahead: @read_ahead_size] ++ normalize_modes(rest, binary?) end - - defp open_defaults([:read_ahead|t], add_binary) do - open_defaults([{:read_ahead, @read_ahead}|t], add_binary) + # TODO: Remove :char_list mode by 2.0 + defp normalize_modes([mode | rest], _binary?) when mode in [:charlist, :char_list] do + if mode == :char_list do + IO.warn "the :char_list mode is deprecated, use :charlist" + end + normalize_modes(rest, false) end - - defp open_defaults([h|t], add_binary) do - [h|open_defaults(t, add_binary)] + defp normalize_modes([mode | rest], binary?) do + [mode | normalize_modes(rest, binary?)] end + defp normalize_modes([], true), do: [:binary] + defp normalize_modes([], false), do: [] - defp open_defaults([], true), do: [:binary] - defp open_defaults([], false), do: [] + defp maybe_to_string(path) when is_list(path), + do: IO.chardata_to_string(path) + defp maybe_to_string(path) when is_binary(path), + do: path + defp maybe_to_string(path), + do: path end diff --git a/lib/elixir/lib/file/stat.ex b/lib/elixir/lib/file/stat.ex index 5a684ff58ca..c395f245680 100644 --- a/lib/elixir/lib/file/stat.ex +++ b/lib/elixir/lib/file/stat.ex @@ -2,11 +2,11 @@ require Record defmodule File.Stat do @moduledoc """ - A struct responsible to hold file information. + A struct that holds file information. In Erlang, this struct is represented by a `:file_info` record. Therefore this module also provides functions for converting - in between the Erlang record and the Elixir struct. + between the Erlang record and the Elixir struct. Its fields are: @@ -32,7 +32,7 @@ defmodule File.Stat do systems which have no concept of links. * `major_device` - identifies the file system where the file is located. - In windows, the number indicates a drive as follows: 0 means A:, 1 means + In Windows, the number indicates a drive as follows: 0 means A:, 1 means B:, and so on. * `minor_device` - only valid for character devices on Unix. In all other @@ -41,14 +41,15 @@ defmodule File.Stat do * `inode` - gives the inode number. On non-Unix file systems, this field will be zero. - * `uid` - indicates the owner of the file. + * `uid` - indicates the owner of the file. Will be zero for non-Unix file + systems. - * `gid` - gives the group that the owner of the file belongs to. Will be - zero for non-Unix file systems. + * `gid` - indicates the group that owns the file. Will be zero for + non-Unix file systems. The time type returned in `atime`, `mtime`, and `ctime` is dependent on the time type set in options. `{:time, type}` where type can be `:local`, - `:universal`, or `:posix`. Default is `:local`. + `:universal`, or `:posix`. Default is `:universal`. """ record = Record.extract(:file_info, from_lib: "kernel/include/file.hrl") @@ -57,6 +58,7 @@ defmodule File.Stat do pairs = :lists.zip(keys, vals) defstruct keys + @type t :: %__MODULE__{} @doc """ Converts a `File.Stat` struct to a `:file_info` record. @@ -68,6 +70,7 @@ defmodule File.Stat do @doc """ Converts a `:file_info` record into a `File.Stat`. """ + def from_record(file_info) def from_record({:file_info, unquote_splicing(vals)}) do %File.Stat{unquote_splicing(pairs)} end diff --git a/lib/elixir/lib/file/stream.ex b/lib/elixir/lib/file/stream.ex index 3c7ffed9190..5b50f1bb635 100644 --- a/lib/elixir/lib/file/stream.ex +++ b/lib/elixir/lib/file/stream.ex @@ -13,33 +13,32 @@ defmodule File.Stream do defstruct path: nil, modes: [], line_or_bytes: :line, raw: true + @type t :: %__MODULE__{} + @doc false def __build__(path, modes, line_or_bytes) do raw = :lists.keyfind(:encoding, 1, modes) == false modes = - if raw do - if :lists.keyfind(:read_ahead, 1, modes) == {:read_ahead, false} do - [:raw|modes] - else - [:raw, :read_ahead|modes] - end - else - modes + case raw do + true -> + if :lists.keyfind(:read_ahead, 1, modes) == {:read_ahead, false} do + [:raw | modes] + else + [:raw, :read_ahead | modes] + end + false -> + modes end %File.Stream{path: path, modes: modes, raw: raw, line_or_bytes: line_or_bytes} end defimpl Collectable do - def empty(stream) do - stream - end - def into(%{path: path, modes: modes, raw: raw} = stream) do - modes = for mode <- modes, not mode in [:read], do: mode + modes = for mode <- modes, mode not in [:read], do: mode - case :file.open(path, [:write|modes]) do + case :file.open(path, [:write | modes]) do {:ok, device} -> {:ok, into(device, stream, raw)} {:error, reason} -> @@ -55,22 +54,27 @@ defmodule File.Stream do false -> IO.write(device, x) end :ok, :done -> - :file.close(device) + # If delayed_write option is used and the last write failed will + # MatchError here as {:error, _} is returned. + :ok = :file.close(device) stream :ok, :halt -> - :file.close(device) + # If delayed_write option is used and the last write failed will + # MatchError here as {:error, _} is returned. + :ok = :file.close(device) end end end defimpl Enumerable do - def reduce(%{path: path, modes: modes, line_or_bytes: line_or_bytes, raw: raw}, acc, fun) do - modes = for mode <- modes, not mode in [:write, :append], do: mode + @read_ahead_size 64 * 1024 + def reduce(%{path: path, modes: modes, line_or_bytes: line_or_bytes, raw: raw}, acc, fun) do start_fun = fn -> - case :file.open(path, modes) do - {:ok, device} -> device + case :file.open(path, read_modes(modes)) do + {:ok, device} -> + if :trim_bom in modes, do: trim_bom(device), else: device {:error, reason} -> raise File.Error, reason: reason, action: "stream", path: path end @@ -85,12 +89,70 @@ defmodule File.Stream do Stream.resource(start_fun, next_fun, &:file.close/1).(acc, fun) end - def count(_stream) do - {:error, __MODULE__} + def count(%{path: path, modes: modes, line_or_bytes: :line} = stream) do + pattern = :binary.compile_pattern("\n") + counter = &count_lines(&1, path, pattern, read_function(stream), 0) + + case File.open(path, read_modes(modes), counter) do + {:ok, count} -> + {:ok, count} + {:error, reason} -> + raise File.Error, reason: reason, action: "stream", path: path + end + end + + def count(%{path: path, line_or_bytes: bytes}) do + case File.stat(path) do + {:ok, %{size: 0}} -> + {:error, __MODULE__} + {:ok, %{size: size}} -> + {:ok, div(size, bytes) + if(rem(size, bytes) == 0, do: 0, else: 1)} + {:error, reason} -> + raise File.Error, reason: reason, action: "stream", path: path + end end def member?(_stream, _term) do {:error, __MODULE__} end + + defp trim_bom(device) do + header = IO.binread(device, 4) + {:ok, _new_pos} = :file.position(device, bom_length(header)) + device + end + + defp bom_length(<<239, 187, 191, _rest::binary>>), + do: 3 + defp bom_length(<<254, 255, _rest::binary>>), + do: 2 + defp bom_length(<<255, 254, _rest::binary>>), + do: 2 + defp bom_length(<<0, 0, 254, 255, _rest::binary>>), + do: 4 + defp bom_length(<<254, 255, 0, 0, _rest::binary>>), + do: 4 + defp bom_length(_binary), + do: 0 + + defp read_modes(modes) do + for mode <- modes, mode not in [:write, :append, :trim_bom], do: mode + end + + defp count_lines(device, path, pattern, read, count) do + case read.(device) do + data when is_binary(data) -> + count_lines(device, path, pattern, read, count + count_lines(data, pattern)) + :eof -> + count + {:error, reason} -> + raise File.Error, reason: reason, action: "stream", path: path + end + end + + defp count_lines(data, pattern), do: length(:binary.matches(data, pattern)) + + defp read_function(%{raw: true}), do: &IO.binread(&1, @read_ahead_size) + defp read_function(%{raw: false}), do: &IO.read(&1, @read_ahead_size) end end diff --git a/lib/elixir/lib/float.ex b/lib/elixir/lib/float.ex index 720d043958f..b46bc1d78fe 100644 --- a/lib/elixir/lib/float.ex +++ b/lib/elixir/lib/float.ex @@ -1,24 +1,37 @@ +import Kernel, except: [round: 1] + defmodule Float do @moduledoc """ - Functions for working with floating point numbers. + Functions for working with floating-point numbers. """ + import Bitwise + + @power_of_2_to_52 4503599627370496 + @precision_range 0..15 + @type precision_range :: 0..15 + @doc """ Parses a binary into a float. - If successful, returns a tuple of the form `{float, remainder_of_binary}`. - Otherwise `:error`. + If successful, returns a tuple in the form of `{float, remainder_of_binary}`; + when the binary cannot be coerced into a valid float, the atom `:error` is + returned. + + If the size of float exceeds the maximum size of `1.7976931348623157e+308`, + the `ArgumentError` exception is raised. + + If you want to convert a string-formatted float directly to a float, + `String.to_float/1` can be used instead. ## Examples iex> Float.parse("34") - {34.0,""} - + {34.0, ""} iex> Float.parse("34.25") - {34.25,""} - + {34.25, ""} iex> Float.parse("56.5xyz") - {56.5,"xyz"} + {56.5, "xyz"} iex> Float.parse("pi") :error @@ -26,229 +39,410 @@ defmodule Float do """ @spec parse(binary) :: {float, binary} | :error def parse("-" <> binary) do - case parse_unsign(binary) do + case parse_unsigned(binary) do :error -> :error {number, remainder} -> {-number, remainder} end end - def parse(binary) do - parse_unsign(binary) + def parse("+" <> binary) do + parse_unsigned(binary) end - defp parse_unsign("-" <> _), do: :error - defp parse_unsign(binary) when is_binary(binary) do - case Integer.parse binary do - :error -> :error - {integer_part, after_integer} -> parse_unsign after_integer, integer_part - end + def parse(binary) do + parse_unsigned(binary) end - # Dot followed by digit is required afterwards or we are done - defp parse_unsign(<< ?., char, rest :: binary >>, int) when char in ?0..?9 do - parse_unsign(rest, char - ?0, 1, int) - end + defp parse_unsigned(<>) when digit in ?0..?9, do: + parse_unsigned(rest, false, false, <>) - defp parse_unsign(rest, int) do - {:erlang.float(int), rest} - end + defp parse_unsigned(binary) when is_binary(binary), do: + :error - # Handle decimal points - defp parse_unsign(<< char, rest :: binary >>, float, decimal, int) when char in ?0..?9 do - parse_unsign rest, 10 * float + (char - ?0), decimal + 1, int - end - - defp parse_unsign(<< ?e, after_e :: binary >>, float, decimal, int) do - case Integer.parse after_e do - :error -> - # Note we rebuild the binary here instead of breaking it apart at - # the function clause because the current approach copies a binary - # just on this branch. If we broke it apart in the function clause, - # the copy would happen when calling Integer.parse/1. - {floatify(int, float, decimal), << ?e, after_e :: binary >>} - {exponential, after_exponential} -> - {floatify(int, float, decimal, exponential), after_exponential} - end - end + defp parse_unsigned(<>, dot?, e?, acc) when digit in ?0..?9, do: + parse_unsigned(rest, dot?, e?, <>) - defp parse_unsign(bitstring, float, decimal, int) do - {floatify(int, float, decimal), bitstring} - end + defp parse_unsigned(<>, false, false, acc) when digit in ?0..?9, do: + parse_unsigned(rest, true, false, <>) - defp floatify(int, float, decimal, exponential \\ 0) do - multiplier = if int < 0, do: -1.0, else: 1.0 + defp parse_unsigned(<>, dot?, false, acc) when exp_marker in 'eE' and digit in ?0..?9, do: + parse_unsigned(rest, true, true, <>) - # Try to ensure the minimum amount of rounding errors - result = multiplier * (abs(int) * :math.pow(10, decimal) + float) * :math.pow(10, exponential - decimal) + defp parse_unsigned(<>, dot?, false, acc) when exp_marker in 'eE' and sign in '-+' and digit in ?0..?9, do: + parse_unsigned(rest, true, true, <>) - # Try avoiding stuff like this: - # iex(1)> 0.0001 * 75 - # 0.007500000000000001 - # Due to IEEE 754 floating point standard - # http://docs.oracle.com/cd/E19957-01/806-3568/ncg_goldberg.html + defp parse_unsigned(rest, dot?, _e?, acc), do: + {:erlang.binary_to_float(add_dot(acc, dot?)), rest} - final_decimal_places = decimal - exponential - if final_decimal_places > 0 do - decimal_power_round = :math.pow(10, final_decimal_places) - trunc(result * decimal_power_round) / decimal_power_round - else - result - end - end + defp add_dot(acc, true), do: acc + defp add_dot(acc, false), do: acc <> ".0" @doc """ Rounds a float to the largest integer less than or equal to `num`. - ## Examples + `floor/2` also accepts a precision to round a floating-point value down + to an arbitrary number of fractional digits (between 0 and 15). + The operation is performed on the binary floating point, without a + conversion to decimal. - iex> Float.floor(34) - 34 + The behaviour of `floor/2` for floats can be surprising. For example: - iex> Float.floor(34.25) - 34 + iex> Float.floor(12.52, 2) + 12.51 + + One may have expected it to floor to 12.52. This is not a bug. + Most decimal fractions cannot be represented as a binary floating point + and therefore the number above is internally represented as 12.51999999, + which explains the behaviour above. + + This function always returns a float. `Kernel.trunc/1` may be used instead to + truncate the result to an integer afterwards. + ## Examples + + iex> Float.floor(34.25) + 34.0 iex> Float.floor(-56.5) - -57 + -57.0 + iex> Float.floor(34.259, 2) + 34.25 """ - @spec floor(float | integer) :: integer - def floor(num) when is_integer(num), do: num - def floor(num) when is_float(num) do - truncated = :erlang.trunc(num) - case :erlang.abs(num - truncated) do - x when x > 0 and num < 0 -> truncated - 1 - _ -> truncated - end + @spec floor(float, precision_range) :: float + def floor(number, precision \\ 0) + + def floor(number, precision) when is_float(number) and precision in @precision_range do + round(number, precision, :floor) + end + + def floor(number, precision) when is_float(number) do + raise ArgumentError, invalid_precision_message(precision) end @doc """ - Rounds a float to the largest integer greater than or equal to `num`. + Rounds a float to the smallest integer greater than or equal to `num`. - ## Examples + `ceil/2` also accepts a precision to round a floating-point value down + to an arbitrary number of fractional digits (between 0 and 15). - iex> Float.ceil(34) - 34 + The operation is performed on the binary floating point, without a + conversion to decimal. - iex> Float.ceil(34.25) - 35 + The behaviour of `ceil/2` for floats can be surprising. For example: + + iex> Float.ceil(-12.52, 2) + -12.51 + + One may have expected it to ceil to -12.52. This is not a bug. + Most decimal fractions cannot be represented as a binary floating point + and therefore the number above is internally represented as -12.51999999, + which explains the behaviour above. + + This function always returns floats. `Kernel.trunc/1` may be used instead to + truncate the result to an integer afterwards. + ## Examples + + iex> Float.ceil(34.25) + 35.0 iex> Float.ceil(-56.5) - -56 + -56.0 + iex> Float.ceil(34.251, 2) + 34.26 """ - @spec ceil(float | integer) :: integer - def ceil(num) when is_integer(num), do: num - def ceil(num) when is_float(num) do - truncated = :erlang.trunc(num) - case :erlang.abs(num - truncated) do - x when x > 0 and num > 0 -> truncated + 1 - _ -> truncated - end + @spec ceil(float, precision_range) :: float + def ceil(number, precision \\ 0) + + def ceil(number, precision) when is_float(number) and precision in @precision_range do + round(number, precision, :ceil) + end + + def ceil(number, precision) when is_float(number) do + raise ArgumentError, invalid_precision_message(precision) end @doc """ - Rounds a floating point value to an arbitrary number of fractional digits - (between 0 and 15). + Rounds a floating-point value to an arbitrary number of fractional + digits (between 0 and 15). + + The rounding direction always ties to half up. The operation is + performed on the binary floating point, without a conversion to decimal. + + This function only accepts floats and always returns a float. Use + `Kernel.round/1` if you want a function that accepts both floats + and integers and always returns an integer. + + The behaviour of `round/2` for floats can be surprising. For example: + + iex> Float.round(5.5675, 3) + 5.567 + + One may have expected it to round to the half up 5.568. This is not a bug. + Most decimal fractions cannot be represented as a binary floating point + and therefore the number above is internally represented as 5.567499999, + which explains the behaviour above. If you want exact rounding for decimals, + you must use a decimal library. The behaviour above is also in accordance + to reference implementations, such as "Correctly Rounded Binary-Decimal and + Decimal-Binary Conversions" by David M. Gay. ## Examples + iex> Float.round(12.5) + 13.0 iex> Float.round(5.5674, 3) 5.567 - iex> Float.round(5.5675, 3) - 5.568 - + 5.567 iex> Float.round(-5.5674, 3) -5.567 - - iex> Float.round(-5.5675, 3) - -5.568 + iex> Float.round(-5.5675) + -6.0 + iex> Float.round(12.341444444444441, 15) + 12.341444444444441 """ - @spec round(float, integer) :: float - def round(number, precision) when is_float(number) and is_integer(precision) and precision in 0..15 do - Kernel.round(number * :math.pow(10, precision)) / :math.pow(10, precision) + @spec round(float, precision_range) :: float + # This implementation is slow since it relies on big integers. + # Faster implementations are available on more recent papers + # and could be implemented in the future. + def round(float, precision \\ 0) + + def round(float, precision) when is_float(float) and precision in @precision_range do + round(float, precision, :half_up) end - @doc """ - Returns a char list which corresponds to the text representation of the given float. + def round(number, precision) when is_float(number) do + raise ArgumentError, invalid_precision_message(precision) + end - Inlined by the compiler. + defp round(float, precision, rounding) do + <> = <> + {num, count, _} = decompose(significant) + count = count - exp + 1023 + + cond do + count <= 0 or # There is no decimal precision + (0 == exp and <<0::52>> == significant) -> #zero or minus zero + float + + count >= 104 -> # Precision beyond 15 digits + case rounding do + :ceil when sign === 0 -> 1 / power_of_10(precision) + :floor when sign === 1 -> -1 / power_of_10(precision) + _ -> 0.0 + end + + count <= precision -> # We are asking more precision than we have + float + + true -> + # Difference in precision between float and asked precision + # We subtract 1 because we need to calculate the remainder too + diff = count - precision - 1 + + # Get up to latest so we calculate the remainder + power_of_10 = power_of_10(diff) + + # Convert the numerand to decimal base + num = num * power_of_5(count) + + # Move to the given precision - 1 + num = div(num, power_of_10) + div = div(num, 10) + num = rounding(rounding, sign, num, div) + + # Convert back to float without loss + # http://www.exploringbinary.com/correct-decimal-to-floating-point-using-big-integers/ + den = power_of_10(precision) + boundary = den <<< 52 + + cond do + num == 0 -> + 0.0 + num >= boundary -> + {den, exp} = scale_down(num, boundary, 52) + decimal_to_float(sign, num, den, exp) + true -> + {num, exp} = scale_up(num, boundary, 52) + decimal_to_float(sign, num, den, exp) + end + end + end - ## Examples + defp scale_up(num, boundary, exp) when num >= boundary, do: {num, exp} + defp scale_up(num, boundary, exp), do: scale_up(num <<< 1, boundary, exp - 1) - iex> Float.to_char_list(7.0) - '7.00000000000000000000e+00' + defp scale_down(num, den, exp) do + new_den = den <<< 1 + if num < new_den do + {den >>> 52, exp} + else + scale_down(num, new_den, exp + 1) + end + end - """ - @spec to_char_list(float) :: char_list - def to_char_list(number) do - :erlang.float_to_list(number) + defp decimal_to_float(sign, num, den, exp) do + quo = div(num, den) + rem = num - quo * den + + tmp = + case den >>> 1 do + den when rem > den -> quo + 1 + den when rem < den -> quo + _ when (quo &&& 1) === 1 -> quo + 1 + _ -> quo + end + + tmp = tmp - @power_of_2_to_52 + <> = <> + tmp end - @doc """ - Returns a list which corresponds to the text representation - of `float`. + defp rounding(:floor, 1, _num, div), do: div + 1 + defp rounding(:ceil, 0, _num, div), do: div + 1 + defp rounding(:half_up, _sign, num, div) do + case rem(num, 10) do + rem when rem < 5 -> div + rem when rem >= 5 -> div + 1 + end + end + defp rounding(_, _, _, div), do: div + + Enum.reduce 0..104, 1, fn x, acc -> + defp power_of_10(unquote(x)), do: unquote(acc) + acc * 10 + end - ## Options + Enum.reduce 0..104, 1, fn x, acc -> + defp power_of_5(unquote(x)), do: unquote(acc) + acc * 5 + end - * `:decimals` — number of decimal points to show - * `:scientific` — number of decimal points to show, in scientific format - * `:compact` — when true, use the most compact representation (ignored - with the `scientific` option) + @doc """ + Returns a pair of integers whose ratio is exactly equal + to the original float and with a positive denominator. ## Examples - iex> Float.to_char_list 7.1, [decimals: 2, compact: true] - '7.1' + iex> Float.ratio(3.14) + {7070651414971679, 2251799813685248} + iex> Float.ratio(-3.14) + {-7070651414971679, 2251799813685248} + iex> Float.ratio(1.5) + {3, 2} + iex> Float.ratio(-1.5) + {-3, 2} + iex> Float.ratio(16.0) + {16, 1} + iex> Float.ratio(-16.0) + {-16, 1} """ - @spec to_char_list(float, list) :: char_list - def to_char_list(float, options) do - :erlang.float_to_list(float, expand_compact(options)) + def ratio(float) when is_float(float) do + <> = <> + {num, _, den} = decompose(significant) + num = sign(sign, num) + case exp - 1023 do + exp when exp > 0 -> + {den, exp} = shift_right(den, exp) + {shift_left(num, exp), den} + exp when exp < 0 -> + {num, shift_left(den, -exp)} + 0 -> + {num, den} + end end + defp decompose(significant) do + decompose(significant, 1, 0, 2, 1, 1) + end + + defp decompose(<<1::1, bits::bitstring>>, count, last_count, power, _last_power, acc) do + decompose(bits, count + 1, count, power <<< 1, power, shift_left(acc, count - last_count) + 1) + end + defp decompose(<<0::1, bits::bitstring>>, count, last_count, power, last_power, acc) do + decompose(bits, count + 1, last_count, power <<< 1, last_power, acc) + end + defp decompose(<<>>, _count, last_count, _power, last_power, acc) do + {acc, last_count, last_power} + end + + defp sign(0, num), do: num + defp sign(1, num), do: -num + + defp shift_left(num, 0), do: num + defp shift_left(num, times), do: shift_left(num <<< 1, times - 1) + + defp shift_right(num, 0), do: {num, 0} + defp shift_right(1, times), do: {1, times} + defp shift_right(num, times), do: shift_right(num >>> 1, times - 1) + @doc """ - Returns a binary which corresponds to the text representation - of `some_float`. + Returns a charlist which corresponds to the text representation + of the given float. - Inlined by the compiler. + It uses the shortest representation according to algorithm described + in "Printing Floating-Point Numbers Quickly and Accurately" in + Proceedings of the SIGPLAN '96 Conference on Programming Language + Design and Implementation. ## Examples - iex> Float.to_string(7.0) - "7.00000000000000000000e+00" + iex> Float.to_charlist(7.0) + '7.0' """ - @spec to_string(float) :: String.t - def to_string(some_float) do - :erlang.float_to_binary(some_float) + @spec to_charlist(float) :: charlist + def to_charlist(float) when is_float(float) do + :io_lib_format.fwrite_g(float) end @doc """ Returns a binary which corresponds to the text representation - of `float`. + of the given float. - ## Options - - * `:decimals` — number of decimal points to show - * `:scientific` — number of decimal points to show, in scientific format - * `:compact` — when true, use the most compact representation (ignored - with the `scientific` option) + It uses the shortest representation according to algorithm described + in "Printing Floating-Point Numbers Quickly and Accurately" in + Proceedings of the SIGPLAN '96 Conference on Programming Language + Design and Implementation. ## Examples - iex> Float.to_string 7.1, [decimals: 2, compact: true] - "7.1" + iex> Float.to_string(7.0) + "7.0" """ - @spec to_string(float, list) :: String.t + @spec to_string(float) :: String.t + def to_string(float) when is_float(float) do + IO.iodata_to_binary(:io_lib_format.fwrite_g(float)) + end + + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + @doc false + def to_char_list(float), do: Float.to_charlist(float) + + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + def to_char_list(float, options) do + :erlang.float_to_list(float, expand_compact(options)) + end + + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) def to_string(float, options) do :erlang.float_to_binary(float, expand_compact(options)) end - defp expand_compact([{:compact, false}|t]), do: expand_compact(t) - defp expand_compact([{:compact, true}|t]), do: [:compact|expand_compact(t)] - defp expand_compact([h|t]), do: [h|expand_compact(t)] - defp expand_compact([]), do: [] + defp invalid_precision_message(precision) do + "precision #{precision} is out of valid range of #{inspect @precision_range}" + end + + defp expand_compact([{:compact, false} | t]), do: expand_compact(t) + defp expand_compact([{:compact, true} | t]), do: [:compact | expand_compact(t)] + defp expand_compact([h | t]), do: [h | expand_compact(t)] + defp expand_compact([]), do: [] end diff --git a/lib/elixir/lib/gen_event.ex b/lib/elixir/lib/gen_event.ex index 99ab1c48388..84ad053db35 100644 --- a/lib/elixir/lib/gen_event.ex +++ b/lib/elixir/lib/gen_event.ex @@ -1,185 +1,90 @@ defmodule GenEvent do - @moduledoc """ - A behaviour module for implementing event handling functionality. - - The event handling model consists of a generic event manager - process with an arbitrary number of event handlers which are - added and deleted dynamically. - - An event manager implemented using this module will have a standard - set of interface functions and include functionality for tracing and - error reporting. It will also fit into an supervision tree. - - ## Example - - There are many use cases for event handlers. For example, a logging - system can be built using event handlers where which log message is - an event and different event handlers can be plugged to handle the - log messages. One handler may print error messages on the terminal, - another can write it to a file, while a third one can keep the - messages in memory (like a buffer) until they are read. - - As an example, let's have a GenEvent that accumulates messages until - they are collected by an explicit call. - - defmodule LoggerHandler do - use GenEvent - - # Callbacks - - def handle_event({:log, x}, messages) do - {:ok, [x|messages]} - end - - def handle_call(:messages, messages) do - {:ok, Enum.reverse(messages), []} - end - end - - {:ok, pid} = GenEvent.start_link() - - GenEvent.add_handler(pid, LoggerHandler, []) - #=> :ok - - GenEvent.notify(pid, {:log, 1}) - #=> :ok - - GenEvent.notify(pid, {:log, 2}) - #=> :ok - - GenEvent.call(pid, LoggerHandler, :messages) - #=> [1, 2] - - GenEvent.call(pid, LoggerHandler, :messages) - #=> [] - - We start a new event manager by calling `GenEvent.start_link/0`. - Notifications can be sent to the event manager which will then - invoke `handle_event/0` for each registered handler. - - We can add new handlers with `add_handler/4`. Calls can also - be made to specific handlers by using `call/3`. - - ## Callbacks - - There are 6 callbacks required to be implemented in a `GenEvent`. By - adding `use GenEvent` to your module, Elixir will automatically define - all 6 callbacks for you, leaving it up to you to implement the ones - you want to customize. The callbacks are: - - * `init(args)` - invoked when the event handler is added. - - It must return: - - - `{:ok, state}` - - `{:ok, state, :hibernate}` - - `{:error, reason}` - - * `handle_event(msg, state)` - invoked whenever an event is sent via - `notify/2` or `sync_notify/2`. + # TODO: Remove by 2.0 - It must return: + # Functions from this module are deprecated in elixir_dispatch. - - `{:ok, new_state}` - - `{:ok, new_state, :hibernate}` - - `{:swap_handler, args1, new_state, handler2, args2}` - - `:remove_handler` - - * `handle_call(msg, state)` - invoked when a `call/3` is done to a specific - handler. - - It must return: - - - `{:ok, reply, new_state}` - - `{:ok, reply, new_state, :hibernate}` - - `{:swap_handler, reply, args1, new_state, handler2, args2}` - - `{:remove_handler, reply}` - - * `handle_info(msg, state)` - invoked to handle all other messages which - are received by the process. Must return the same values as - `handle_event/2`. - - It must return: - - - `{:noreply, state}` - - `{:noreply, state, timeout}` - - `{:stop, reason, state}` + @moduledoc """ + WARNING: this module is deprecated. - * `terminate(reason, state)` - called when the event handler is removed or - the event manager is terminating. It can return any term. + If you are interested in implementing an event manager, please read the + "Alternatives" section below. If you have to implement an event handler to + integrate with an existing system, such as Elixir's Logger, please use + `:gen_event` instead. - * `code_change(old_vsn, state, extra)` - called when the application - code is being upgraded live (hot code swapping). + ## Alternatives - It must return: + There are a few suitable alternatives to replace GenEvent. Each of them can be + the most beneficial based on the use case. - - `{:ok, new_state}` + ### Supervisor and GenServers - ## Name Registration + One alternative to GenEvent is a very minimal solution consisting of using a + supervisor and multiple GenServers started under it. The supervisor acts as + the "event manager" and the children GenServers act as the "event handlers". + This approach has some shortcomings (it provides no backpressure for example) + but can still replace GenEvent for low-profile usages of it. [This blog post + by José + Valim](http://blog.plataformatec.com.br/2016/11/replacing-genevent-by-a-supervisor-genserver/) + has more detailed information on this approach. - A GenEvent is bound to the same name registration rules as a `GenServer`. - Read more about it in the `GenServer` docs. + ### GenStage - ## Streaming + If the use case where you were using GenEvent requires more complex logic, + [GenStage](https://github.com/elixir-lang/gen_stage) provides a great + alternative. GenStage is an external Elixir library maintained by the Elixir + team; it provides tool to implement systems that exchange events in a + demand-driven way with built-in support for backpressure. See the [GenStage + documentation](https://hexdocs.pm/gen_stage) for more information. - `GenEvent`s can be streamed from and streamed with the help of `stream/2`. - Here are some examples: + ### `:gen_event` - stream = GenEvent.stream(pid) + If your use case requires exactly what GenEvent provided, or you have to + integrate with an existing `:gen_event`-based system, you can still use the + [`:gen_event`](http://erlang.org/doc/man/gen_event.html) Erlang module. + """ - # Take the next 10 events - Enum.take(stream, 10) + @callback init(args :: term) :: + {:ok, state} | + {:ok, state, :hibernate} | + {:error, reason :: any} when state: any - # Print all remaining events - for event <- stream do - IO.inspect event - end + @callback handle_event(event :: term, state :: term) :: + {:ok, new_state} | + {:ok, new_state, :hibernate} | + :remove_handler when new_state: term - A stream may also be given an id, which allows all streams with the given - id to be cancelled at any moment via `cancel_streams/1`. + @callback handle_call(request :: term, state :: term) :: + {:ok, reply, new_state} | + {:ok, reply, new_state, :hibernate} | + {:remove_handler, reply} when reply: term, new_state: term - ## Learn more + @callback handle_info(msg :: term, state :: term) :: + {:ok, new_state} | + {:ok, new_state, :hibernate} | + :remove_handler when new_state: term - If you wish to find out more about gen events, Elixir getting started - guides provide a tutorial-like introduction. The documentation and links - in Erlang can also provide extra insight. + @callback terminate(reason, state :: term) :: + term when reason: :stop | {:stop, term} | :remove_handler | {:error, term} | term - * http://elixir-lang.org/getting_started/mix/1.html - * http://www.erlang.org/doc/man/gen_event.html - * http://learnyousomeerlang.com/event-handlers - """ + @callback code_change(old_vsn, state :: term, extra :: term) :: + {:ok, new_state :: term} when old_vsn: term | {:down, term} - @typedoc "Return values of `start*` functions" @type on_start :: {:ok, pid} | {:error, {:already_started, pid}} - @typedoc "The GenEvent manager name" @type name :: atom | {:global, term} | {:via, module, term} - @typedoc "Options used by the `start*` functions" @type options :: [name: name] - @typedoc "The event manager reference" @type manager :: pid | name | {atom, node} - @typedoc "Supported values for new handlers" - @type handler :: module | {module, term} - - @doc """ - Defines a `GenEvent` stream. - - This is a struct returned by `stream/2`. The struct is public and - contains the following fields: - - * `:manager` - the manager reference given to `GenEvent.stream/2` - * `:id` - the event stream id for cancellation - * `:timeout` - the timeout in between events, defaults to `:infinity` - * `:duration` - the duration of the subscription, defaults to `:infinity` - * `:mode` - if the subscription mode is sync or async, defaults to `:sync` - """ - defstruct manager: nil, id: nil, timeout: :infinity, duration: :infinity, mode: :sync + @type handler :: atom | {atom, term} @doc false defmacro __using__(_) do + %{file: file, line: line} = __CALLER__ + deprecation_message = "the GenEvent module is deprecated, see its documentation for alternatives" + :elixir_errors.warn(line, file, deprecation_message) + quote location: :keep do @behaviour :gen_event @@ -194,8 +99,18 @@ defmodule GenEvent do end @doc false - def handle_call(_request, state) do - {:ok, {:error, :bad_call}, state} + def handle_call(msg, state) do + proc = + case Process.info(self(), :registered_name) do + {_, []} -> self() + {_, name} -> name + end + + # We do this to trick Dialyzer to not complain about non-local returns. + case :erlang.phash2(1, 1) do + 0 -> raise "attempted to call GenEvent #{inspect proc} but no handle_call/2 clause was provided" + 1 -> {:remove_handler, {:bad_call, msg}} + end end @doc false @@ -204,7 +119,7 @@ defmodule GenEvent do end @doc false - def terminate(reason, state) do + def terminate(_reason, _state) do :ok end @@ -218,447 +133,678 @@ defmodule GenEvent do end end - @doc """ - Starts an event manager linked to the current process. - - This is often used to start the `GenEvent` as part of a supervision tree. - - It accepts the `:name` option which is described under the `Name Registration` - section in the `GenServer` module docs. - - If the event manager is successfully created and initialized, the function - returns `{:ok, pid}`, where pid is the pid of the server. If there already - exists a process with the specified server name, the function returns - `{:error, {:already_started, pid}}` with the pid of that process. - - Note that a `GenEvent` started with `start_link/1` is linked to the - parent process and will exit not only on crashes but also if the parent - process exits with `:normal` reason. - """ + @doc false @spec start_link(options) :: on_start def start_link(options \\ []) when is_list(options) do do_start(:link, options) end - @doc """ - Starts an event manager process without links (outside of a supervision tree). - - See `start_link/1` for more information. - """ + @doc false @spec start(options) :: on_start def start(options \\ []) when is_list(options) do do_start(:nolink, options) end + @no_callback :"no callback module" + defp do_start(mode, options) do case Keyword.get(options, :name) do nil -> - :gen.start(:gen_event, mode, :"no callback module", [], []) + :gen.start(GenEvent, mode, @no_callback, [], []) atom when is_atom(atom) -> - :gen.start(:gen_event, mode, {:local, atom}, :"no callback module", [], []) - other when is_tuple(other) -> - :gen.start(:gen_event, mode, other, :"no callback module", [], []) + :gen.start(GenEvent, mode, {:local, atom}, @no_callback, [], []) + {:global, _term} = tuple -> + :gen.start(GenEvent, mode, tuple, @no_callback, [], []) + {:via, via_module, _term} = tuple when is_atom(via_module) -> + :gen.start(GenEvent, mode, tuple, @no_callback, [], []) + other -> + raise ArgumentError, """ + expected :name option to be one of: + + * nil + * atom + * {:global, term} + * {:via, module, term} + + Got: #{inspect(other)} + """ end end - @doc """ - Returns a stream that consumes and notifies events to the `manager`. + @doc false + @spec stream(manager, Keyword.t) :: GenEvent.Stream.t + def stream(manager, options \\ []) do + %GenEvent.Stream{ + manager: manager, + timeout: Keyword.get(options, :timeout, :infinity)} + end - The stream is a `GenEvent` struct that implements the `Enumerable` - protocol. The supported options are: + @doc false + @spec add_handler(manager, handler, term) :: :ok | {:error, term} + def add_handler(manager, handler, args) do + rpc(manager, {:add_handler, handler, args}) + end - * `:id` - an id to identify all live stream instances; when an `:id` is - given, existing streams can be called with via `cancel_streams`. + @doc false + @spec add_mon_handler(manager, handler, term) :: :ok | {:error, term} + def add_mon_handler(manager, handler, args) do + rpc(manager, {:add_mon_handler, handler, args, self()}) + end - * `:timeout` (Enumerable) - raises if no event arrives in X milliseconds. + @doc false + @spec notify(manager, term) :: :ok + def notify(manager, event) + + def notify({:global, name}, msg) do + try do + :global.send(name, {:notify, msg}) + :ok + catch + _, _ -> :ok + end + end - * `:duration` (Enumerable) - only consume events during the X milliseconds - from the streaming start. + def notify({:via, mod, name}, msg) when is_atom(mod) do + try do + mod.send(name, {:notify, msg}) + :ok + catch + _, _ -> :ok + end + end - * `:mode` - the mode to consume events, can be `:sync` (default) or - `:async`. On sync, the event manager waits for the event to be consumed - before moving on to the next event handler. + def notify(manager, msg) + when is_pid(manager) + when is_atom(manager) + when tuple_size(manager) == 2 and + is_atom(elem(manager, 0)) and is_atom(elem(manager, 1)) do + send(manager, {:notify, msg}) + :ok + end - """ - def stream(manager, options \\ []) do - %GenEvent{manager: manager, - id: Keyword.get(options, :id), - timeout: Keyword.get(options, :timeout, :infinity), - duration: Keyword.get(options, :duration, :infinity), - mode: Keyword.get(options, :mode, :sync)} + @doc false + @spec sync_notify(manager, term) :: :ok + def sync_notify(manager, event) do + rpc(manager, {:sync_notify, event}) end - @doc """ - Adds a new event handler to the event `manager`. + @doc false + @spec ack_notify(manager, term) :: :ok + def ack_notify(manager, event) do + rpc(manager, {:ack_notify, event}) + end - The event manager will call the `init/1` callback with `args` to - initiate the event handler and its internal state. + @doc false + @spec call(manager, handler, term, timeout) :: term | {:error, term} + def call(manager, handler, request, timeout \\ 5000) do + try do + :gen.call(manager, self(), {:call, handler, request}, timeout) + catch + :exit, reason -> + exit({reason, {__MODULE__, :call, [manager, handler, request, timeout]}}) + else + {:ok, res} -> res + end + end - If `init/1` returns a correct value indicating successful completion, - the event manager adds the event handler and this function returns - `:ok`. If the callback fails with `reason` or returns `{:error, reason}`, - the event handler is ignored and this function returns `{:EXIT, reason}` - or `{:error, reason}`, respectively. + @doc false + @spec remove_handler(manager, handler, term) :: term | {:error, term} + def remove_handler(manager, handler, args) do + rpc(manager, {:delete_handler, handler, args}) + end - ## Linked handlers + @doc false + @spec swap_handler(manager, handler, term, handler, term) :: :ok | {:error, term} + def swap_handler(manager, handler1, args1, handler2, args2) do + rpc(manager, {:swap_handler, handler1, args1, handler2, args2}) + end - When adding a handler, a `:link` option with value `true` can be given. - This means the event handler and the calling process are now linked. + @doc false + @spec swap_mon_handler(manager, handler, term, handler, term) :: :ok | {:error, term} + def swap_mon_handler(manager, handler1, args1, handler2, args2) do + rpc(manager, {:swap_mon_handler, handler1, args1, handler2, args2, self()}) + end - If the calling process later terminates with `reason`, the event manager - will delete the event handler by calling the `terminate/2` callback with - `{:stop, reason}` as argument. If the event handler later is deleted, - the event manager sends a message `{:gen_event_EXIT, handler, reason}` - to the calling process. Reason is one of the following: + @doc false + @spec which_handlers(manager) :: [handler] + def which_handlers(manager) do + rpc(manager, :which_handlers) + end - * `:normal` - if the event handler has been removed due to a call to - `remove_handler/3`, or `:remove_handler` has been returned by a callback - function + @doc false + @spec stop(manager, reason :: term, timeout) :: :ok + def stop(manager, reason \\ :normal, timeout \\ :infinity) do + :gen.stop(manager, reason, timeout) + end - * `:shutdown` - if the event handler has been removed because the event - manager is terminating + defp rpc(module, cmd) do + {:ok, reply} = :gen.call(module, self(), cmd, :infinity) + reply + end - * `{:swapped, new_handler, pid}` - if the process pid has replaced the - event handler by another + ## Init callbacks - * a term - if the event handler is removed due to an error. Which term - depends on the error + require Record + Record.defrecordp :handler, [:module, :id, :state, :pid, :ref] - """ - @spec add_handler(manager, handler, term, [link: boolean]) :: :ok | {:EXIT, term} | {:error, term} - def add_handler(manager, handler, args, options \\ []) do - case Keyword.get(options, :link, false) do - true -> :gen_event.add_sup_handler(manager, handler, args) - false -> :gen_event.add_handler(manager, handler, args) - end + @doc false + def init_it(starter, :self, name, mod, args, options) do + init_it(starter, self(), name, mod, args, options) end - @doc """ - Sends an event notification to the event `manager`. + def init_it(starter, parent, name, _mod, _args, options) do + Process.put(:"$initial_call", {__MODULE__, :init_it, 6}) + debug = + if function_exported?(:gen, :debug_options, 2) do + :gen.debug_options(name, options) + else + :gen.debug_options(options) + end + :proc_lib.init_ack(starter, {:ok, self()}) + loop(parent, name(name), [], debug, false) + end - The event manager will call `handle_event/2` for each installed event handler. + @doc false + def init_hib(parent, name, handlers, debug) do + fetch_msg(parent, name, handlers, debug, true) + end - `notify` is asynchronous and will return immediately after the notification is - sent. `notify` will not fail even if the specified event manager does not exist, - unless it is specified as `name` (atom). - """ - @spec notify(manager, term) :: :ok - defdelegate notify(manager, event), to: :gen_event + defp name({:local, name}), do: name + defp name({:global, name}), do: name + defp name({:via, _, name}), do: name + defp name(pid) when is_pid(pid), do: pid - @doc """ - Sends a sync event notification to the event `manager`. + ## Loop - In other words, this function only returns `:ok` after the event manager - invokes the `handle_event/2` on each installed event handler. + defp loop(parent, name, handlers, debug, true) do + :proc_lib.hibernate(__MODULE__, :init_hib, [parent, name, handlers, debug]) + end - See `notify/2` for more info. - """ - @spec sync_notify(manager, term) :: :ok - defdelegate sync_notify(manager, event), to: :gen_event + defp loop(parent, name, handlers, debug, false) do + fetch_msg(parent, name, handlers, debug, false) + end - @doc """ - Makes a synchronous call to the event `handler` installed in `manager`. + defp fetch_msg(parent, name, handlers, debug, hib) do + receive do + {:system, from, req} -> + :sys.handle_system_msg(req, from, parent, __MODULE__, + debug, [name, handlers, hib], hib) + {:EXIT, ^parent, reason} -> + server_terminate(reason, parent, handlers, name) + msg when debug == [] -> + handle_msg(msg, parent, name, handlers, []) + msg -> + debug = :sys.handle_debug(debug, &print_event/3, name, {:in, msg}) + handle_msg(msg, parent, name, handlers, debug) + end + end - The given `request` is sent and the caller waits until a reply arrives or - a timeout occurs. The event manager will call `handle_call/2` to handle - the request. + defp handle_msg(msg, parent, name, handlers, debug) do + case msg do + {:notify, event} -> + {hib, handlers} = server_event(:async, event, handlers, name) + loop(parent, name, handlers, debug, hib) + {_from, _tag, {:notify, event}} -> + {hib, handlers} = server_event(:async, event, handlers, name) + loop(parent, name, handlers, debug, hib) + {_from, tag, {:ack_notify, event}} -> + reply(tag, :ok) + {hib, handlers} = server_event(:ack, event, handlers, name) + loop(parent, name, handlers, debug, hib) + {_from, tag, {:sync_notify, event}} -> + {hib, handlers} = server_event(:sync, event, handlers, name) + reply(tag, :ok) + loop(parent, name, handlers, debug, hib) + {:DOWN, ref, :process, _pid, reason} = other -> + case handle_down(ref, reason, handlers, name) do + {:ok, handlers} -> + loop(parent, name, handlers, debug, false) + :error -> + {hib, handlers} = server_info(other, handlers, name) + loop(parent, name, handlers, debug, hib) + end + {_from, tag, {:call, handler, query}} -> + {hib, reply, handlers} = server_call(handler, query, handlers, name) + reply(tag, reply) + loop(parent, name, handlers, debug, hib) + {_from, tag, {:add_handler, handler, args}} -> + {hib, reply, handlers} = server_add_handler(handler, args, handlers) + reply(tag, reply) + loop(parent, name, handlers, debug, hib) + {_from, tag, {:add_mon_handler, handler, args, notify}} -> + {hib, reply, handlers} = server_add_mon_handler(handler, args, handlers, notify) + reply(tag, reply) + loop(parent, name, handlers, debug, hib) + {_from, tag, {:add_process_handler, pid, notify}} -> + {hib, reply, handlers} = server_add_process_handler(pid, handlers, notify) + reply(tag, reply) + loop(parent, name, handlers, debug, hib) + {_from, tag, {:delete_handler, handler, args}} -> + {reply, handlers} = server_remove_handler(handler, args, handlers, name) + reply(tag, reply) + loop(parent, name, handlers, debug, false) + {_from, tag, {:swap_handler, handler1, args1, handler2, args2}} -> + {hib, reply, handlers} = server_swap_handler(handler1, args1, handler2, args2, handlers, nil, name) + reply(tag, reply) + loop(parent, name, handlers, debug, hib) + {_from, tag, {:swap_mon_handler, handler1, args1, handler2, args2, mon}} -> + {hib, reply, handlers} = server_swap_handler(handler1, args1, handler2, args2, handlers, mon, name) + reply(tag, reply) + loop(parent, name, handlers, debug, hib) + {_from, tag, :which_handlers} -> + reply(tag, server_which_handlers(handlers)) + loop(parent, name, handlers, debug, false) + {_from, tag, :get_modules} -> + reply(tag, server_get_modules(handlers)) + loop(parent, name, handlers, debug, false) + other -> + {hib, handlers} = server_info(other, handlers, name) + loop(parent, name, handlers, debug, hib) + end + end - The return value `reply` is defined in the return value of `handle_call/2`. - If the specified event handler is not installed, the function returns - `{:error, :bad_module}`. - """ - @spec call(manager, handler, term, timeout) :: term | {:error, term} - def call(manager, handler, request, timeout \\ 5000) do - :gen_event.call(manager, handler, request, timeout) + ## System callbacks + + @doc false + def system_continue(parent, debug, [name, handlers, hib]) do + loop(parent, name, handlers, debug, hib) end - @doc """ - Cancels all streams currently running with the given `:id`. + @doc false + def system_terminate(reason, parent, _debug, [name, handlers, _hib]) do + server_terminate(reason, parent, handlers, name) + end - In order for a stream to be cancelled, an `:id` must be passed - when the stream is created via `stream/2`. Passing a stream without - an id leads to an argument error. - """ - @spec cancel_streams(t) :: :ok - def cancel_streams(%GenEvent{id: nil}) do - raise ArgumentError, "cannot cancel streams without an id" + @doc false + def system_code_change([name, handlers, hib], module, old_vsn, extra) do + handlers = + for handler <- handlers do + if handler(handler, :module) == module do + {:ok, state} = module.code_change(old_vsn, handler(handler, :state), extra) + handler(handler, state: state) + else + handler + end + end + {:ok, [name, handlers, hib]} + end + + @doc false + def system_get_state([_name, handlers, _hib]) do + tuples = for handler(module: mod, id: id, state: state) <- handlers do + {mod, id, state} + end + {:ok, tuples} + end + + @doc false + def system_replace_state(fun, [name, handlers, hib]) do + {handlers, states} = + :lists.unzip(for handler <- handlers do + handler(module: mod, id: id, state: state) = handler + cur = {mod, id, state} + try do + new = {^mod, ^id, new_state} = fun.(cur) + {handler(handler, state: new_state), new} + catch + _, _ -> + {handler, cur} + end + end) + {:ok, states, [name, handlers, hib]} end - def cancel_streams(%GenEvent{manager: manager, id: id}) do - handlers = :gen_event.which_handlers(manager) + @doc false + def format_status(opt, status_data) do + [pdict, sys_state, parent, _debug, [name, handlers, _hib]] = status_data + header = :gen.format_status_header('Status for event handler', name) - for {Enumerable.GenEvent, {handler_id, _}} = ref <- handlers, - handler_id === id do - :gen_event.delete_handler(manager, ref, :remove_handler) + formatted = for handler <- handlers do + handler(module: module, state: state) = handler + if function_exported?(module, :format_status, 2) do + try do + state = module.format_status(opt, [pdict, state]) + handler(handler, state: state) + catch + _, _ -> handler + end + else + handler + end end - :ok + [header: header, + data: [{'Status', sys_state}, {'Parent', parent}], + items: {'Installed handlers', formatted}] end - @doc """ - Removes an event handler from the event `manager`. + ## Loop helpers - The event manager will call `terminate/2` to terminate the event handler - and return the callback value. If the specified event handler is not - installed, the function returns `{:error, :module_not_found}`. - """ - @spec remove_handler(manager, handler, term) :: term | {:error, term} - def remove_handler(manager, handler, args) do - :gen_event.delete_handler(manager, handler, args) + defp print_event(dev, {:in, msg}, name) do + case msg do + {:notify, event} -> + IO.puts dev, "*DBG* #{inspect name} got event #{inspect event}" + {_, _, {:call, handler, query}} -> + IO.puts dev, "*DBG* #{inspect name} (handler #{inspect handler}) got call #{inspect query}" + _ -> + IO.puts dev, "*DBG* #{inspect name} got #{inspect msg}" + end + end + + defp print_event(dev, dbg, name) do + IO.puts dev, "*DBG* #{inspect name}: #{inspect dbg}" end - @doc """ - Replaces an old event handler with a new one in the event `manager`. + defp server_add_handler({module, id}, args, handlers) do + handler = handler(module: module, id: {module, id}) + do_add_handler(module, handler, args, handlers, :ok) + end - First, the old event handler is deleted by calling `terminate/2` with - the given `args1` and collects the return value. Then the new event handler - is added and initiated by calling `init({args2, term}), where term is the - return value of calling `terminate/2` in the old handler. This makes it - possible to transfer information from one handler to another. + defp server_add_handler(module, args, handlers) do + handler = handler(module: module, id: module) + do_add_handler(module, handler, args, handlers, :ok) + end - The new handler will be added even if the specified old event handler - is not installed in which case `term = :error` or if the handler fails to - terminate with a given reason. + defp server_add_mon_handler({module, id}, args, handlers, notify) do + ref = Process.monitor(notify) + handler = handler(module: module, id: {module, id}, pid: notify, ref: ref) + do_add_handler(module, handler, args, handlers, :ok) + end - If there was a linked connection between handler1 and a process pid, there - will be a link connection between handler2 and pid instead. A new link in - between the caller process and the new handler can also be set with by - giving `link: true` as option. See `add_handler/4` for more information. + defp server_add_mon_handler(module, args, handlers, notify) do + ref = Process.monitor(notify) + handler = handler(module: module, id: module, pid: notify, ref: ref) + do_add_handler(module, handler, args, handlers, :ok) + end - If `init/1` in the second handler returns a correct value, this function - returns `:ok`. - """ - @spec swap_handler(manager, handler, term, handler, term, [link: boolean]) :: :ok | {:error, term} - def swap_handler(manager, handler1, args1, handler2, args2, options \\ []) do - case Keyword.get(options, :link, false) do - true -> :gen_event.swap_sup_handler(manager, {handler1, args1}, {handler2, args2}) - false -> :gen_event.swap_handler(manager, {handler1, args1}, {handler2, args2}) - end + defp server_add_process_handler(pid, handlers, notify) do + ref = Process.monitor(pid) + handler = handler(module: GenEvent.Stream, id: {self(), ref}, + pid: notify, ref: ref) + do_add_handler(GenEvent.Stream, handler, {pid, ref}, handlers, {self(), ref}) end - @doc """ - Returns a list of all event handlers installed in the `manager`. - """ - @spec which_handlers(manager) :: [handler] - defdelegate which_handlers(manager), to: :gen_event + defp server_remove_handler(module, args, handlers, name) do + do_take_handler(module, args, handlers, name, :remove, :normal) + end - @doc """ - Terminates the event `manager`. + defp server_swap_handler(module1, args1, module2, args2, handlers, sup, name) do + {state, handlers} = + do_take_handler(module1, args1, handlers, name, :swapped, {:swapped, module2, sup}) - Before terminating, the event manager will call `terminate(:stop, ...)` - for each installed event handler. - """ - @spec stop(manager) :: :ok - defdelegate stop(manager), to: :gen_event -end + if sup do + server_add_mon_handler(module2, {args2, state}, handlers, sup) + else + server_add_handler(module2, {args2, state}, handlers) + end + end -defimpl Enumerable, for: GenEvent do - use GenEvent + defp server_info(event, handlers, name) do + handlers = :lists.reverse(handlers) + server_notify(event, :handle_info, handlers, name, handlers, [], false) + end - @doc false - def init({_mode, mon_pid, _pid, ref} = state) do - # Tell the mon_pid we are good to go, and send self() so that this handler - # can be removed later without using the managers name. - send(mon_pid, {:UP, ref, self()}) - {:ok, state} + defp server_event(mode, event, handlers, name) do + {handlers, streams} = server_split_process_handlers(mode, event, handlers, [], []) + {hib, handlers} = server_notify(event, :handle_event, handlers, name, handlers, [], false) + {hib, server_collect_process_handlers(mode, event, streams, handlers, name)} end - @doc false - def handle_event(event, {:sync, mon_pid, pid, ref} = state) do - sync = Process.monitor(mon_pid) - send pid, {ref, sync, event} - receive do - {^sync, :done} -> - Process.demonitor(sync, [:flush]) - :remove_handler - {^sync, :next} -> - Process.demonitor(sync, [:flush]) - {:ok, state} - {:DOWN, ^sync, _, _, _} -> - {:ok, state} + defp server_split_process_handlers(mode, event, [handler | t], handlers, streams) do + case handler(handler, :id) do + {pid, _ref} when is_pid(pid) -> + server_process_notify(mode, event, handler) + server_split_process_handlers(mode, event, t, handlers, [handler | streams]) + _ -> + server_split_process_handlers(mode, event, t, [handler | handlers], streams) end end - def handle_event(event, {:async, _mon_pid, pid, ref} = state) do - send pid, {ref, nil, event} - {:ok, state} + defp server_split_process_handlers(_mode, _event, [], handlers, streams) do + {handlers, streams} end - def reduce(stream, acc, fun) do - start_fun = fn() -> start(stream) end - next_fun = &next(stream, &1) - stop_fun = &stop(stream, &1) - Stream.resource(start_fun, next_fun, stop_fun).(acc, wrap_reducer(fun)) + defp server_process_notify(mode, event, handler(state: {pid, ref})) do + send pid, {self(), {self(), ref}, {mode_to_tag(mode), event}} end - def count(_stream) do - {:error, __MODULE__} + defp mode_to_tag(:ack), do: :ack_notify + defp mode_to_tag(:sync), do: :sync_notify + defp mode_to_tag(:async), do: :notify + + defp server_notify(event, fun, [handler | t], name, handlers, acc, hib) do + case server_update(handler, fun, event, name, handlers) do + {new_hib, handler} -> + server_notify(event, fun, t, name, handlers, [handler | acc], hib or new_hib) + :error -> + server_notify(event, fun, t, name, handlers, acc, hib) + end end - def member?(_stream, _item) do - {:error, __MODULE__} + defp server_notify(_, _, [], _, _, acc, hib) do + {hib, acc} end - defp wrap_reducer(fun) do - fn - {nil, _manager, event}, acc -> - fun.(event, acc) - {ref, manager, event}, acc -> - try do - fun.(event, acc) - after - send manager, {ref, :next} + defp server_update(handler, fun, event, name, _handlers) do + handler(module: module, state: state) = handler + + case do_handler(module, fun, [event, state]) do + {:ok, res} -> + case res do + {:ok, state} -> + {false, handler(handler, state: state)} + {:ok, state, :hibernate} -> + {true, handler(handler, state: state)} + :remove_handler -> + do_terminate(handler, :remove_handler, event, name, :normal) + :error + other -> + reason = {:bad_return_value, other} + do_terminate(handler, {:error, reason}, event, name, reason) + :error end + {:error, reason} -> + do_terminate(handler, {:error, reason}, event, name, reason) + :error end end - defp start(%{manager: manager, id: id, duration: duration, mode: mode} = stream) do - {mon_pid, mon_ref} = add_handler(mode, manager, id, duration) - send mon_pid, {:UP, mon_ref, self()} + defp server_collect_process_handlers(:async, event, [handler | t], handlers, name) do + server_collect_process_handlers(:async, event, t, [handler | handlers], name) + end + + defp server_collect_process_handlers(mode, event, [handler | t], handlers, name) when mode in [:sync, :ack] do + handler(ref: ref, id: id) = handler receive do - # The subscription process gave us a go. - {:UP, ^mon_ref, manager_pid} -> - {mon_ref, mon_pid, manager_pid} - # The subscription process died due to an abnormal reason. - {:DOWN, ^mon_ref, _, _, reason} -> - exit({reason, {__MODULE__, :start, [stream]}}) + {^ref, :ok} -> + server_collect_process_handlers(mode, event, t, [handler | handlers], name) + {_from, tag, {:delete_handler, ^id, args}} -> + do_terminate(handler, args, :remove, name, :normal) + reply(tag, :ok) + server_collect_process_handlers(mode, event, t, handlers, name) + {:DOWN, ^ref, _, _, reason} -> + do_terminate(handler, {:stop, reason}, :DOWN, name, :shutdown) + server_collect_process_handlers(mode, event, t, handlers, name) end end - defp next(%{timeout: timeout} = stream, {mon_ref, mon_pid, manager_pid} = acc) do - # If :DOWN is received must resend it to self so that stop/2 can receive it - # and know that the handler has been removed. - receive do - {:DOWN, ^mon_ref, _, _, :normal} -> - send(self(), {:DOWN, mon_ref, :process, mon_pid, :normal}) - nil - {:DOWN, ^mon_ref, _, _, reason} -> - send(self(), {:DOWN, mon_ref, :process, mon_pid, :normal}) - exit({reason, {__MODULE__, :next, [stream, acc]}}) - {^mon_ref, sync_ref, event} -> - {{sync_ref, manager_pid, event}, acc} - after - timeout -> - exit({:timeout, {__MODULE__, :next, [stream, acc]}}) + defp server_collect_process_handlers(_mode, _event, [], handlers, _name) do + handlers + end + + defp server_call(module, query, handlers, name) do + case :lists.keyfind(module, handler(:id) + 1, handlers) do + false -> + {false, {:error, :not_found}, handlers} + handler -> + case server_call_update(handler, query, name, handlers) do + {{hib, handler}, reply} -> + {hib, reply, :lists.keyreplace(module, handler(:id) + 1, handlers, handler)} + {:error, reply} -> + {false, reply, :lists.keydelete(module, handler(:id) + 1, handlers)} + end end end - defp stop(%{mode: mode} = stream, {mon_ref, mon_pid, manager_pid} = acc) do - case remove_handler(mon_ref, mon_pid, manager_pid) do - :ok when mode == :async -> - flush_events(mon_ref) - :ok -> - :ok + defp server_call_update(handler, query, name, _handlers) do + handler(module: module, state: state) = handler + case do_handler(module, :handle_call, [query, state]) do + {:ok, res} -> + case res do + {:ok, reply, state} -> + {{false, handler(handler, state: state)}, reply} + {:ok, reply, state, :hibernate} -> + {{true, handler(handler, state: state)}, reply} + {:remove_handler, reply} -> + do_terminate(handler, :remove_handler, query, name, :normal) + {:error, reply} + other -> + reason = {:bad_return_value, other} + do_terminate(handler, {:error, reason}, query, name, reason) + {:error, {:error, reason}} + end {:error, reason} -> - exit({reason, {__MODULE__, :stop, [stream, acc]}}) + do_terminate(handler, {:error, reason}, query, name, reason) + {:error, {:error, reason}} end end - defp add_handler(mode, manager, id, duration) do - parent = self() - - # The subscription is managed by another process, that dies if - # the handler dies, and is killed when there is a need to remove - # the subscription. - spawn_monitor(fn -> - # It is possible that the handler could be removed, and then the GenEvent - # could exit before this process has exited normally. Because the removal - # does not cause an unlinking this process would exit with the same - # reason. Trapping exits ensures that no errors is raised in this case. - Process.flag(:trap_exit, true) - parent_ref = Process.monitor(parent) - - # Receive the notification from the parent, unless it died. - mon_ref = receive do - {:UP, ref, ^parent} -> ref - {:DOWN, ^parent_ref, _, _, _} -> exit(:normal) - end + defp server_get_modules(handlers) do + (for handler(module: module) <- handlers, do: module) + |> :ordsets.from_list + |> :ordsets.to_list + end - cancel = cancel_ref(id, mon_ref) - :ok = :gen_event.add_sup_handler(manager, {__MODULE__, cancel}, - {mode, self(), parent, mon_ref}) - - receive do - # This message is already in the mailbox if we got this far. - {:UP, ^mon_ref, manager_pid} -> - send(parent, {:UP, mon_ref, manager_pid}) - receive do - # The stream has finished, remove the handler. - {:DONE, ^mon_ref} -> - exit_handler(manager_pid, parent_ref, cancel) - - # If the parent died, we can exit normally. - {:DOWN, ^parent_ref, _, _, _} -> - exit(:normal) - - # reason should be normal unless the handler is swapped. - {:gen_event_EXIT, {__MODULE__, ^cancel}, reason} -> - exit(reason) - - # Exit if the manager dies, so the streamer is notified. - {:EXIT, ^manager_pid, :noconnection} -> - exit({:nodedown, node(manager_pid)}) - - {:EXIT, ^manager_pid, reason} -> - exit(reason) - after - # Our time is over, notify the parent. - duration -> exit(:normal) - end + defp server_which_handlers(handlers) do + for handler(id: id) <- handlers, do: id + end + + defp server_terminate(reason, _parent, handlers, name) do + _ = + for handler <- handlers do + do_terminate(handler, :stop, :stop, name, :shutdown) end - end) + exit(reason) end - defp cancel_ref(nil, mon_ref), do: mon_ref - defp cancel_ref(id, mon_ref), do: {id, mon_ref} + defp reply({from, ref}, msg) do + send from, {ref, msg} + end - defp exit_handler(manager_pid, parent_ref, cancel) do - # Send exit signal so manager removes handler. - Process.exit(manager_pid, :shutdown) - receive do - # If the parent died, we can exit normally. - {:DOWN, ^parent_ref, _, _, _} -> - exit(:normal) - - # Probably the reason is :shutdown, which occurs when the manager receives - # an exit signal from a handler supervising process. However whatever the - # reason the handler has been removed so it is ok. - {:gen_event_EXIT, {__MODULE__, ^cancel}, _} -> - exit(:normal) - - # The connection broke, perhaps the handler might try to forward events - # before it removes the handler, so must exit abnormally. - {:EXIT, ^manager_pid, :noconnection} -> - exit({:nodedown, node(manager_pid)}) - - # The manager has exited but don't exit abnormally as the handler has died - # with the manager and all expected events have been handled. This is ok. - {:EXIT, ^manager_pid, _} -> - exit(:normal) + defp handle_down(ref, reason, handlers, name) do + case :lists.keyfind(ref, handler(:ref) + 1, handlers) do + false -> :error + handler -> + do_terminate(handler, {:stop, reason}, :DOWN, name, :shutdown) + {:ok, :lists.keydelete(ref, handler(:ref) + 1, handlers)} end end - defp remove_handler(mon_ref, mon_pid, manager_pid) do - send(mon_pid, {:DONE, mon_ref}) - receive do - {^mon_ref, sync, _} when sync != nil -> - send(manager_pid, {sync, :done}) - Process.demonitor(mon_ref, [:flush]) - :ok - {:DOWN, ^mon_ref, _, _, :normal} -> - :ok - {:DOWN, ^mon_ref, _, _, reason} -> - {:error, reason} + defp do_add_handler(module, handler, arg, handlers, succ) do + case :lists.keyfind(handler(handler, :id), handler(:id) + 1, handlers) do + false -> + case do_handler(module, :init, [arg]) do + {:ok, res} -> + case res do + {:ok, state} -> + {false, succ, [handler(handler, state: state) | handlers]} + {:ok, state, :hibernate} -> + {true, succ, [handler(handler, state: state) | handlers]} + {:error, _} = error -> + {false, error, handlers} + other -> + {false, {:error, {:bad_return_value, other}}, handlers} + end + {:error, _} = error -> + {false, error, handlers} + end + _ -> + {false, {:error, :already_present}, handlers} end end - defp flush_events(mon_ref) do - receive do - {^mon_ref, _, _} -> - flush_events(mon_ref) - after - 0 -> :ok + defp do_take_handler(module, args, handlers, name, last_in, reason) do + case :lists.keytake(module, handler(:id) + 1, handlers) do + {:value, handler, handlers} -> + {do_terminate(handler, args, last_in, name, reason), handlers} + false -> + {{:error, :not_found}, handlers} + end + end + + defp do_terminate(handler, arg, last_in, name, reason) do + handler(module: module, state: state) = handler + + res = + case do_handler(module, :terminate, [arg, state]) do + {:ok, res} -> res + {:error, _} = error -> error + end + report_terminate(handler, reason, state, last_in, name) + res + end + + defp do_handler(mod, fun, args) do + try do + apply(mod, fun, args) + catch + :throw, val -> {:ok, val} + :error, val -> {:error, {val, System.stacktrace}} + :exit, val -> {:error, val} + else + res -> {:ok, res} + end + end + + defp report_terminate(handler, reason, state, last_in, name) do + report_error(handler, reason, state, last_in, name) + if ref = handler(handler, :ref) do + Process.demonitor(ref, [:flush]) + end + if pid = handler(handler, :pid) do + send pid, {:gen_event_EXIT, handler(handler, :id), reason} + end + end + + defp report_error(_handler, :normal, _, _, _), do: :ok + defp report_error(_handler, :shutdown, _, _, _), do: :ok + defp report_error(_handler, {:swapped, _, _}, _, _, _), do: :ok + defp report_error(handler, reason, state, last_in, name) do + reason = + case reason do + {:undef, [{m, f, a, _} | _] = mfas} -> + cond do + :code.is_loaded(m) === false -> + {:"module could not be loaded", mfas} + function_exported?(m, f, length(a)) -> + reason + true -> + {:"function not exported", mfas} + end + _ -> + reason + end + + formatted = report_status(handler, state) + + :error_logger.error_msg( + '** gen_event handler ~p crashed.~n' ++ + '** Was installed in ~p~n' ++ + '** Last event was: ~p~n' ++ + '** When handler state == ~p~n' ++ + '** Reason == ~p~n', [handler(handler, :id), name, last_in, formatted, reason]) + end + + defp report_status(handler(module: module), state) do + if function_exported?(module, :format_status, 2) do + try do + module.format_status(:terminate, [Process.get(), state]) + catch + _, _ -> state + end + else + state end end end diff --git a/lib/elixir/lib/gen_event/stream.ex b/lib/elixir/lib/gen_event/stream.ex new file mode 100644 index 00000000000..14163ab0717 --- /dev/null +++ b/lib/elixir/lib/gen_event/stream.ex @@ -0,0 +1,162 @@ +defmodule GenEvent.Stream do + @moduledoc false + defstruct manager: nil, timeout: :infinity + + @type t :: %__MODULE__{ + manager: GenEvent.manager, + timeout: timeout} + + @doc false + def init({_pid, _ref} = state) do + {:ok, state} + end + + @doc false + def handle_event(event, _state) do + # We do this to trick Dialyzer to not complain about non-local returns. + case :erlang.phash2(1, 1) do + 0 -> exit({:bad_event, event}) + 1 -> :remove_handler + end + end + + @doc false + def handle_call(msg, _state) do + # We do this to trick Dialyzer to not complain about non-local returns. + reason = {:bad_call, msg} + case :erlang.phash2(1, 1) do + 0 -> exit(reason) + 1 -> {:remove_handler, reason} + end + end + + @doc false + def handle_info(_msg, state) do + {:ok, state} + end + + @doc false + def terminate(_reason, _state) do + :ok + end + + @doc false + def code_change(_old, state, _extra) do + {:ok, state} + end +end + +defimpl Enumerable, for: GenEvent.Stream do + def reduce(stream, acc, fun) do + start_fun = fn() -> start(stream) end + next_fun = &next(stream, &1) + stop_fun = &stop(stream, &1) + Stream.resource(start_fun, next_fun, stop_fun).(acc, wrap_reducer(fun)) + end + + def count(_stream) do + {:error, __MODULE__} + end + + def member?(_stream, _item) do + {:error, __MODULE__} + end + + defp wrap_reducer(fun) do + fn + {:ack, manager, ref, event}, acc -> + send manager, {ref, :ok} + fun.(event, acc) + {:async, _manager, _ref, event}, acc -> + fun.(event, acc) + {:sync, manager, ref, event}, acc -> + try do + fun.(event, acc) + after + send manager, {ref, :ok} + end + end + end + + defp start(%{manager: manager} = stream) do + try do + {:ok, {pid, ref}} = :gen.call(manager, self(), + {:add_process_handler, self(), self()}, :infinity) + mon_ref = Process.monitor(pid) + {pid, ref, mon_ref} + catch + :exit, reason -> exit({reason, {__MODULE__, :start, [stream]}}) + end + end + + defp next(%{timeout: timeout} = stream, {pid, ref, mon_ref} = acc) do + self = self() + + receive do + # Got an async event. + {_from, {^pid, ^ref}, {:notify, event}} -> + {[{:async, pid, ref, event}], acc} + + # Got a sync event. + {_from, {^pid, ^ref}, {:sync_notify, event}} -> + {[{:sync, pid, ref, event}], acc} + + # Got an ack event. + {_from, {^pid, ^ref}, {:ack_notify, event}} -> + {[{:ack, pid, ref, event}], acc} + + # The handler was removed. Stop iteration, resolve the + # event later. We need to demonitor now, otherwise DOWN + # appears with higher priority in the shutdown process. + {:gen_event_EXIT, {^pid, ^ref}, _reason} = event -> + Process.demonitor(mon_ref, [:flush]) + send(self, event) + {:halt, {:removed, acc}} + + # The manager died. Stop iteration, resolve the event later. + {:DOWN, ^mon_ref, _, _, _} = event -> + send(self, event) + {:halt, {:removed, acc}} + after + timeout -> + exit({:timeout, {__MODULE__, :next, [stream, acc]}}) + end + end + + # If we reach this branch, we know the handler was already + # removed, so we don't trigger a request for doing so. + defp stop(stream, {:removed, {pid, ref, mon_ref} = acc}) do + case wait_for_handler_removal(pid, ref, mon_ref) do + :ok -> + flush_events(ref) + {:error, reason} -> + exit({reason, {__MODULE__, :stop, [stream, acc]}}) + end + end + + # If we reach this branch, the handler was not removed yet, + # so we trigger a request for doing so. + defp stop(stream, {pid, ref, _} = acc) do + _ = :gen_event.delete_handler(pid, {pid, ref}, :shutdown) + stop(stream, {:removed, acc}) + end + + defp wait_for_handler_removal(pid, ref, mon_ref) do + receive do + {:gen_event_EXIT, {^pid, ^ref}, _reason} -> + Process.demonitor(mon_ref, [:flush]) + :ok + {:DOWN, ^mon_ref, _, _, reason} -> + {:error, reason} + end + end + + defp flush_events(ref) do + receive do + {_from, {_pid, ^ref}, {notify, _event}} when notify in [:notify, :ack_notify, :sync_notify] -> + flush_events(ref) + after + 0 -> :ok + end + end +end diff --git a/lib/elixir/lib/gen_server.ex b/lib/elixir/lib/gen_server.ex index faeb3344aab..1234a8da3ab 100644 --- a/lib/elixir/lib/gen_server.ex +++ b/lib/elixir/lib/gen_server.ex @@ -2,7 +2,7 @@ defmodule GenServer do @moduledoc """ A behaviour module for implementing the server of a client-server relation. - A GenServer is a process as any other Elixir process and it can be used + A GenServer is a process like any other Elixir process and it can be used to keep state, execute code asynchronously and so on. The advantage of using a generic server process (GenServer) implemented using this module is that it will have a standard set of interface functions and include functionality for @@ -11,7 +11,7 @@ defmodule GenServer do ## Example The GenServer behaviour abstracts the common client-server interaction. - Developer are only required to implement the callbacks and functionality they are + Developers are only required to implement the callbacks and functionality they are interested in. Let's start with a code example and then explore the available callbacks. @@ -23,12 +23,12 @@ defmodule GenServer do # Callbacks - def handle_call(:pop, _from, [h|t]) do + def handle_call(:pop, _from, [h | t]) do {:reply, h, t} end def handle_cast({:push, item}, state) do - {:noreply, [item|state]} + {:noreply, [item | state]} end end @@ -53,58 +53,15 @@ defmodule GenServer do while **cast** messages do not. Every time you do a `GenServer.call/3`, the client will send a message - that must be handled by the `handle_call/3` callback in the GenServer. - A `cast/2` message must be handled by `handle_cast/2`. + that must be handled by the `c:handle_call/3` callback in the GenServer. + A `cast/2` message must be handled by `c:handle_cast/2`. ## Callbacks There are 6 callbacks required to be implemented in a `GenServer`. By adding `use GenServer` to your module, Elixir will automatically define all 6 callbacks for you, leaving it up to you to implement the ones - you want to customize. The callbacks are: - - * `init(args)` - invoked when the server is started. - - It must return: - - - `{:ok, state}` - - `{:ok, state, timeout}` - - `:ignore` - - `{:stop, reason}` - - * `handle_call(msg, {from, ref}, state)` and `handle_cast(msg, state)` - - invoked to handle call (sync) and cast (async) messages. - - It must return: - - - `{:reply, reply, new_state}` - - `{:reply, reply, new_state, timeout}` - - `{:reply, reply, new_state, :hibernate}` - - `{:noreply, new_state}` - - `{:noreply, new_state, timeout}` - - `{:noreply, new_state, :hibernate}` - - `{:stop, reason, new_state}` - - `{:stop, reason, reply, new_state}` - - * `handle_info(msg, state)` - invoked to handle all other messages which - are received by the process. - - It must return: - - - `{:noreply, state}` - - `{:noreply, state, timeout}` - - `{:stop, reason, state}` - - * `terminate(reason, state)` - called when the server is about to - terminate, useful for cleaning up. It must return `:ok`. - - * `code_change(old_vsn, state, extra)` - called when the application - code is being upgraded live (hot code swapping). - - It must return: - - - `{:ok, new_state}` - - `{:error, reason}` + you want to customize. ## Name Registration @@ -116,13 +73,18 @@ defmodule GenServer do using `Process.register/2`. * `{:global, term}`- the GenServer is registered globally with the given - term using the functions in the `:global` module. + term using the functions in the [`:global` module](http://www.erlang.org/doc/man/global.html). * `{:via, module, term}` - the GenServer is registered with the given - mechanism and name. The `:via` option expects a module name to control - the registration mechanism alongside a name which can be any term. + mechanism and name. The `:via` option expects a module that exports + `register_name/2`, `unregister_name/1`, `whereis_name/1` and `send/2`. + One such example is the [`:global` module](http://www.erlang.org/doc/man/global.html) which uses these functions + for keeping the list of names of processes and their associated PIDs + that are available globally for a network of Elixir nodes. Elixir also + ships with a local, decentralized and scalable registry called `Registry` + for locally storing names that are generated dynamically. - For example, we could start and register our Stack server locally as follows: + For example, we could start and register our `Stack` server locally as follows: # Start the server and register it locally with name MyStack {:ok, _} = GenServer.start_link(Stack, [:hello], name: MyStack) @@ -141,6 +103,11 @@ defmodule GenServer do * `{:via, module, name}` if the server is registered through an alternative registry + If there is an interest to register dynamic names locally, do not use + atoms, as atoms are never garbage collected and therefore dynamically + generated atoms won't be garbage collected. For such cases, you can + set up your own local registry by using the `Registry` module. + ## Client / Server APIs Although in the example above we have used `GenServer.start_link/3` and @@ -169,7 +136,7 @@ defmodule GenServer do # Server (callbacks) - def handle_call(:pop, _from, [h|t]) do + def handle_call(:pop, _from, [h | t]) do {:reply, h, t} end @@ -179,7 +146,7 @@ defmodule GenServer do end def handle_cast({:push, item}, state) do - {:noreply, [item|state]} + {:noreply, [item | state]} end def handle_cast(request, state) do @@ -191,17 +158,363 @@ defmodule GenServer do the same module. If the server and/or client implementations are growing complex, you may want to have them in different modules. + ## Receiving "regular" messages + + The goal of a `GenServer` is to abstract the "receive" loop for developers, + automatically handling system messages, support code change, synchronous + calls and more. Therefore, you should never call your own "receive" inside + the GenServer callbacks as doing so will cause the GenServer to misbehave. + + Besides the synchronous and asynchronous communication provided by `call/3` + and `cast/2`, "regular" messages sent by functions such `Kernel.send/2`, + `Process.send_after/4` and similar, can be handled inside the `c:handle_info/2` + callback. + + `c:handle_info/2` can be used in many situations, such as handling monitor + DOWN messages sent by `Process.monitor/1`. Another use case for `c:handle_info/2` + is to perform periodic work, with the help of `Process.send_after/4`: + + defmodule MyApp.Periodically do + use GenServer + + def start_link do + GenServer.start_link(__MODULE__, %{}) + end + + def init(state) do + schedule_work() # Schedule work to be performed on start + {:ok, state} + end + + def handle_info(:work, state) do + # Do the desired work here + schedule_work() # Reschedule once more + {:noreply, state} + end + + defp schedule_work() do + Process.send_after(self(), :work, 2 * 60 * 60 * 1000) # In 2 hours + end + end + + ## Debugging with the :sys module + + GenServers, as [special processes](http://erlang.org/doc/design_principles/spec_proc.html), + can be debugged using the [`:sys` module](http://www.erlang.org/doc/man/sys.html). Through various hooks, this module + allows developers to introspect the state of the process and trace + system events that happen during its execution, such as received messages, + sent replies and state changes. + + Let's explore the basic functions from the [`:sys` module](http://www.erlang.org/doc/man/sys.html) used for debugging: + + * [`:sys.get_state/2`](http://erlang.org/doc/man/sys.html#get_state-2) - + allows retrieval of the state of the process. In the case of + a GenServer process, it will be the callback module state, as + passed into the callback functions as last argument. + * [`:sys.get_status/2`](http://erlang.org/doc/man/sys.html#get_status-2) - + allows retrieval of the status of the process. This status includes + the process dictionary, if the process is running or is suspended, + the parent PID, the debugger state, and the state of the behaviour module, + which includes the callback module state (as returned by `:sys.get_state/2`). + It's possible to change how this status is represented by defining + the optional `c:GenServer.format_status/2` callback. + * [`:sys.trace/3`](http://erlang.org/doc/man/sys.html#trace-3) - + prints all the system events to `:stdio`. + * [`:sys.statistics/3`](http://erlang.org/doc/man/sys.html#statistics-3) - + manages collection of process statistics. + * [`:sys.no_debug/2`](http://erlang.org/doc/man/sys.html#no_debug-2) - + turns off all debug handlers for the given process. It is very important + to switch off debugging once we're done. Excessive debug handlers or + those that should be turned off, but weren't, can seriously damage + the performance of the system. + * [`:sys.suspend/2`](http://erlang.org/doc/man/sys.html#suspend-2) - allows + to suspend a process so that it only replies to system messages but no + other messages. A suspended process can be reactivated via + [`:sys.resume/2`](http://erlang.org/doc/man/sys.html#resume-2). + + Let's see how we could use those functions for debugging the stack server + we defined earlier. + + iex> {:ok, pid} = Stack.start_link([]) + iex> :sys.statistics(pid, true) # turn on collecting process statistics + iex> :sys.trace(pid, true) # turn on event printing + iex> Stack.push(pid, 1) + *DBG* <0.122.0> got cast {push,1} + *DBG* <0.122.0> new state [1] + :ok + iex> :sys.get_state(pid) + [1] + iex> Stack.pop(pid) + *DBG* <0.122.0> got call pop from <0.80.0> + *DBG* <0.122.0> sent 1 to <0.80.0>, new state [] + 1 + iex> :sys.statistics(pid, :get) + {:ok, + [start_time: {{2016, 7, 16}, {12, 29, 41}}, + current_time: {{2016, 7, 16}, {12, 29, 50}}, + reductions: 117, messages_in: 2, messages_out: 0]} + iex> :sys.no_debug(pid) # turn off all debug handlers + :ok + iex> :sys.get_status(pid) + {:status, #PID<0.122.0>, {:module, :gen_server}, + [["$initial_call": {Stack, :init, 1}, # pdict + "$ancestors": [#PID<0.80.0>, #PID<0.51.0>]], + :running, # :running | :suspended + #PID<0.80.0>, # parent + [], # debugger state + [header: 'Status for generic server <0.122.0>', # module status + data: [{'Status', :running}, {'Parent', #PID<0.80.0>}, + {'Logged events', []}], data: [{'State', [1]}]]]} + ## Learn more - If you wish to find out more about gen servers, Elixir getting started - guides provide a tutorial-like introduction. The documentation and links + If you wish to find out more about gen servers, the Elixir Getting Started + guide provides a tutorial-like introduction. The documentation and links in Erlang can also provide extra insight. - * http://elixir-lang.org/getting_started/mix/1.html - * http://www.erlang.org/doc/man/gen_server.html - * http://www.erlang.org/doc/design_principles/gen_server_concepts.html - * http://learnyousomeerlang.com/clients-and-servers + * [GenServer – Elixir's Getting Started Guide](http://elixir-lang.org/getting-started/mix-otp/genserver.html) + * [`:gen_server` module documentation](http://www.erlang.org/doc/man/gen_server.html) + * [gen_server Behaviour – OTP Design Principles](http://www.erlang.org/doc/design_principles/gen_server_concepts.html) + * [Clients and Servers – Learn You Some Erlang for Great Good!](http://learnyousomeerlang.com/clients-and-servers) + """ + + @doc """ + Invoked when the server is started. `start_link/3` or `start/3` will + block until it returns. + + `args` is the argument term (second argument) passed to `start_link/3`. + + Returning `{:ok, state}` will cause `start_link/3` to return + `{:ok, pid}` and the process to enter its loop. + + Returning `{:ok, state, timeout}` is similar to `{:ok, state}` + except `handle_info(:timeout, state)` will be called after `timeout` + milliseconds if no messages are received within the timeout. + + Returning `{:ok, state, :hibernate}` is similar to + `{:ok, state}` except the process is hibernated before entering the loop. See + `c:handle_call/3` for more information on hibernation. + + Returning `:ignore` will cause `start_link/3` to return `:ignore` and the + process will exit normally without entering the loop or calling `c:terminate/2`. + If used when part of a supervision tree the parent supervisor will not fail + to start nor immediately try to restart the `GenServer`. The remainder of the + supervision tree will be (re)started and so the `GenServer` should not be + required by other processes. It can be started later with + `Supervisor.restart_child/2` as the child specification is saved in the parent + supervisor. The main use cases for this are: + + * The `GenServer` is disabled by configuration but might be enabled later. + * An error occurred and it will be handled by a different mechanism than the + `Supervisor`. Likely this approach involves calling `Supervisor.restart_child/2` + after a delay to attempt a restart. + + Returning `{:stop, reason}` will cause `start_link/3` to return + `{:error, reason}` and the process to exit with reason `reason` without + entering the loop or calling `c:terminate/2`. + """ + @callback init(args :: term) :: + {:ok, state} | + {:ok, state, timeout | :hibernate} | + :ignore | + {:stop, reason :: any} when state: any + + @doc """ + Invoked to handle synchronous `call/3` messages. `call/3` will block until a + reply is received (unless the call times out or nodes are disconnected). + + `request` is the request message sent by a `call/3`, `from` is a 2-tuple + containing the caller's PID and a term that uniquely identifies the call, and + `state` is the current state of the `GenServer`. + + Returning `{:reply, reply, new_state}` sends the response `reply` to the + caller and continues the loop with new state `new_state`. + + Returning `{:reply, reply, new_state, timeout}` is similar to + `{:reply, reply, new_state}` except `handle_info(:timeout, new_state)` will be + called after `timeout` milliseconds if no messages are received. + + Returning `{:reply, reply, new_state, :hibernate}` is similar to + `{:reply, reply, new_state}` except the process is hibernated and will + continue the loop once a message is in its message queue. If a message is + already in the message queue this will be immediately. Hibernating a + `GenServer` causes garbage collection and leaves a continuous heap that + minimises the memory used by the process. + + Hibernating should not be used aggressively as too much time could be spent + garbage collecting. Normally it should only be used when a message is not + expected soon and minimising the memory of the process is shown to be + beneficial. + + Returning `{:noreply, new_state}` does not send a response to the caller and + continues the loop with new state `new_state`. The response must be sent with + `reply/2`. + + There are three main use cases for not replying using the return value: + + * To reply before returning from the callback because the response is known + before calling a slow function. + * To reply after returning from the callback because the response is not yet + available. + * To reply from another process, such as a task. + + When replying from another process the `GenServer` should exit if the other + process exits without replying as the caller will be blocking awaiting a + reply. + + Returning `{:noreply, new_state, timeout | :hibernate}` is similar to + `{:noreply, new_state}` except a timeout or hibernation occurs as with a + `:reply` tuple. + + Returning `{:stop, reason, reply, new_state}` stops the loop and `c:terminate/2` + is called with reason `reason` and state `new_state`. Then the `reply` is sent + as the response to call and the process exits with reason `reason`. + + Returning `{:stop, reason, new_state}` is similar to + `{:stop, reason, reply, new_state}` except a reply is not sent. + + If this callback is not implemented, the default implementation by + `use GenServer` will return `{:stop, {:bad_call, request}, state}`. """ + @callback handle_call(request :: term, from, state :: term) :: + {:reply, reply, new_state} | + {:reply, reply, new_state, timeout | :hibernate} | + {:noreply, new_state} | + {:noreply, new_state, timeout | :hibernate} | + {:stop, reason, reply, new_state} | + {:stop, reason, new_state} when reply: term, new_state: term, reason: term + + @doc """ + Invoked to handle asynchronous `cast/2` messages. + + `request` is the request message sent by a `cast/2` and `state` is the current + state of the `GenServer`. + + Returning `{:noreply, new_state}` continues the loop with new state `new_state`. + + Returning `{:noreply, new_state, timeout}` is similar to + `{:noreply, new_state}` except `handle_info(:timeout, new_state)` will be + called after `timeout` milliseconds if no messages are received. + + Returning `{:noreply, new_state, :hibernate}` is similar to + `{:noreply, new_state}` except the process is hibernated before continuing the + loop. See `c:handle_call/3` for more information. + + Returning `{:stop, reason, new_state}` stops the loop and `c:terminate/2` is + called with the reason `reason` and state `new_state`. The process exits with + reason `reason`. + + If this callback is not implemented, the default implementation by + `use GenServer` will return `{:stop, {:bad_cast, request}, state}`. + """ + @callback handle_cast(request :: term, state :: term) :: + {:noreply, new_state} | + {:noreply, new_state, timeout | :hibernate} | + {:stop, reason :: term, new_state} when new_state: term + + @doc """ + Invoked to handle all other messages. + + `msg` is the message and `state` is the current state of the `GenServer`. When + a timeout occurs the message is `:timeout`. + + Return values are the same as `c:handle_cast/2`. + + If this callback is not implemented, the default implementation by + `use GenServer` will return `{:noreply, state}`. + """ + @callback handle_info(msg :: :timeout | term, state :: term) :: + {:noreply, new_state} | + {:noreply, new_state, timeout | :hibernate} | + {:stop, reason :: term, new_state} when new_state: term + + @doc """ + Invoked when the server is about to exit. It should do any cleanup required. + + `reason` is exit reason and `state` is the current state of the `GenServer`. + The return value is ignored. + + `c:terminate/2` is called if a callback (except `c:init/1`) does one of the + following: + + * returns a `:stop` tuple + * raises + * calls `Kernel.exit/1` + * returns an invalid value + * the `GenServer` traps exits (using `Process.flag/2`) *and* the parent + process sends an exit signal + + If part of a supervision tree, a `GenServer`'s `Supervisor` will send an exit + signal when shutting it down. The exit signal is based on the shutdown + strategy in the child's specification. If it is `:brutal_kill` the `GenServer` + is killed and so `c:terminate/2` is not called. However if it is a timeout the + `Supervisor` will send the exit signal `:shutdown` and the `GenServer` will + have the duration of the timeout to call `c:terminate/2` - if the process is + still alive after the timeout it is killed. + + If the `GenServer` receives an exit signal (that is not `:normal`) from any + process when it is not trapping exits it will exit abruptly with the same + reason and so not call `c:terminate/2`. Note that a process does *NOT* trap + exits by default and an exit signal is sent when a linked process exits or its + node is disconnected. + + Therefore it is not guaranteed that `c:terminate/2` is called when a `GenServer` + exits. For such reasons, we usually recommend important clean-up rules to + happen in separated processes either by use of monitoring or by links + themselves. For example if the `GenServer` controls a `port` (e.g. + `:gen_tcp.socket`) or `t:File.io_device/0`, they will be closed on receiving a + `GenServer`'s exit signal and do not need to be closed in `c:terminate/2`. + + If `reason` is not `:normal`, `:shutdown`, nor `{:shutdown, term}` an error is + logged. + """ + @callback terminate(reason, state :: term) :: + term when reason: :normal | :shutdown | {:shutdown, term} | term + + @doc """ + Invoked to change the state of the `GenServer` when a different version of a + module is loaded (hot code swapping) and the state's term structure should be + changed. + + `old_vsn` is the previous version of the module (defined by the `@vsn` + attribute) when upgrading. When downgrading the previous version is wrapped in + a 2-tuple with first element `:down`. `state` is the current state of the + `GenServer` and `extra` is any extra data required to change the state. + + Returning `{:ok, new_state}` changes the state to `new_state` and the code + change is successful. + + Returning `{:error, reason}` fails the code change with reason `reason` and + the state remains as the previous state. + + If `c:code_change/3` raises the code change fails and the loop will continue + with its previous state. Therefore this callback does not usually contain side effects. + """ + @callback code_change(old_vsn, state :: term, extra :: term) :: + {:ok, new_state :: term} | + {:error, reason :: term} when old_vsn: term | {:down, term} + + @doc """ + Invoked in some cases to retrieve a formatted version of the `GenServer` status. + + This callback can be useful to control the *appearance* of the status of the + `GenServer`. For example, it can be used to return a compact representation of + the `GenServer`'s state to avoid having large state terms printed. + + * one of `:sys.get_status/1` or `:sys.get_status/2` is invoked to get the + status of the `GenServer`; in such cases, `reason` is `:normal` + + * the `GenServer` terminates abnormally and logs an error; in such cases, + `reason` is `:terminate` + + `pdict_and_state` is a two-elements list `[pdict, state]` where `pdict` is a + list of `{key, value}` tuples representing the current process dictionary of + the `GenServer` and `state` is the current state of the `GenServer`. + """ + @callback format_status(reason, pdict_and_state :: list) :: + term when reason: :normal | :terminate + + @optional_callbacks format_status: 2 @typedoc "Return values of `start*` functions" @type on_start :: {:ok, pid} | :ignore | {:error, {:already_started, pid} | term} @@ -210,21 +523,32 @@ defmodule GenServer do @type name :: atom | {:global, term} | {:via, module, term} @typedoc "Options used by the `start*` functions" - @type options :: [debug: debug, - name: name, - timeout: timeout, - spawn_opt: Process.spawn_opt] + @type options :: [option] + + @typedoc "Option values used by the `start*` functions" + @type option :: {:debug, debug} | + {:name, name} | + {:timeout, timeout} | + {:spawn_opt, Process.spawn_opt} - @typedoc "debug options supported by the `start*` functions" + @typedoc "Debug options supported by the `start*` functions" @type debug :: [:trace | :log | :statistics | {:log_to_file, Path.t}] @typedoc "The server reference" @type server :: pid | name | {atom, node} + @typedoc """ + Tuple describing the client of a call request. + + `pid` is the PID of the caller and `tag` is a unique term used to identify the + call. + """ + @type from :: {pid, tag :: term} + @doc false defmacro __using__(_) do quote location: :keep do - @behaviour :gen_server + @behaviour GenServer @doc false def init(args) do @@ -233,17 +557,44 @@ defmodule GenServer do @doc false def handle_call(msg, _from, state) do - {:stop, {:bad_call, msg}, state} + proc = + case Process.info(self(), :registered_name) do + {_, []} -> self() + {_, name} -> name + end + + # We do this to trick Dialyzer to not complain about non-local returns. + case :erlang.phash2(1, 1) do + 0 -> raise "attempted to call GenServer #{inspect proc} but no handle_call/3 clause was provided" + 1 -> {:stop, {:bad_call, msg}, state} + end end @doc false - def handle_info(_msg, state) do + def handle_info(msg, state) do + proc = + case Process.info(self(), :registered_name) do + {_, []} -> self() + {_, name} -> name + end + :error_logger.error_msg('~p ~p received unexpected message in handle_info/2: ~p~n', + [__MODULE__, proc, msg]) {:noreply, state} end @doc false def handle_cast(msg, state) do - {:stop, {:bad_cast, msg}, state} + proc = + case Process.info(self(), :registered_name) do + {_, []} -> self() + {_, name} -> name + end + + # We do this to trick Dialyzer to not complain about non-local returns. + case :erlang.phash2(1, 1) do + 0 -> raise "attempted to cast GenServer #{inspect proc} but no handle_cast/2 clause was provided" + 1 -> {:stop, {:bad_cast, msg}, state} + end end @doc false @@ -256,8 +607,7 @@ defmodule GenServer do {:ok, state} end - defoverridable [init: 1, handle_call: 3, handle_info: 2, - handle_cast: 2, terminate: 2, code_change: 3] + defoverridable GenServer end end @@ -266,38 +616,40 @@ defmodule GenServer do This is often used to start the `GenServer` as part of a supervision tree. - Once the server is started, it calls the `init/1` function in the given `module` - passing the given `args` to initialize it. To ensure a synchronized start-up - procedure, this function does not return until `init/1` has returned. + Once the server is started, the `c:init/1` function of the given `module` is + called with `args` as its arguments to initialize the server. To ensure a + synchronized start-up procedure, this function does not return until `c:init/1` + has returned. Note that a `GenServer` started with `start_link/3` is linked to the - parent process and will exit in case of crashes. The GenServer will also - exit due to the `:normal` reasons in case it is configured to trap exits - in the `init/1` callback. + parent process and will exit in case of crashes from the parent. The GenServer + will also exit due to the `:normal` reasons in case it is configured to trap + exits in the `c:init/1` callback. ## Options - The `:name` option is used for name registration as described in the module - documentation. If the option `:timeout` option is present, the server is - allowed to spend the given milliseconds initializing or it will be - terminated and the start function will return `{:error, :timeout}`. + * `:name` - used for name registration as described in the "Name + registration" section of the module documentation + + * `:timeout` - if present, the server is allowed to spend the given amount of + milliseconds initializing or it will be terminated and the start function + will return `{:error, :timeout}` - If the `:debug` option is present, the corresponding function in the - [`:sys` module](http://www.erlang.org/doc/man/sys.html) will be invoked. + * `:debug` - if present, the corresponding function in the [`:sys` module](http://www.erlang.org/doc/man/sys.html) is invoked - If the `:spawn_opt` option is present, its value will be passed as options - to the underlying process as in `Process.spawn/4`. + * `:spawn_opt` - if present, its value is passed as options to the + underlying process as in `Process.spawn/4` ## Return values - If the server is successfully created and initialized, the function returns - `{:ok, pid}`, where pid is the pid of the server. If there already exists a - process with the specified server name, the function returns - `{:error, {:already_started, pid}}` with the pid of that process. + If the server is successfully created and initialized, this function returns + `{:ok, pid}`, where `pid` is the PID of the server. If a process with the + specified server name already exists, this function returns + `{:error, {:already_started, pid}}` with the PID of that process. - If the `init/1` callback fails with `reason`, the function returns + If the `c:init/1` callback fails with `reason`, this function returns `{:error, reason}`. Otherwise, if it returns `{:stop, reason}` - or `:ignore`, the process is terminated and the function returns + or `:ignore`, the process is terminated and this function returns `{:error, reason}` or `:ignore`, respectively. """ @spec start_link(module, any, options) :: on_start @@ -321,99 +673,221 @@ defmodule GenServer do :gen.start(:gen_server, link, module, args, opts) {atom, opts} when is_atom(atom) -> :gen.start(:gen_server, link, {:local, atom}, module, args, opts) - {other, opts} when is_tuple(other) -> - :gen.start(:gen_server, link, other, module, args, opts) + {{:global, _term} = tuple, opts} -> + :gen.start(:gen_server, link, tuple, module, args, opts) + {{:via, via_module, _term} = tuple, opts} when is_atom(via_module) -> + :gen.start(:gen_server, link, tuple, module, args, opts) + {other, _} -> + raise ArgumentError, """ + expected :name option to be one of: + + * nil + * atom + * {:global, term} + * {:via, module, term} + + Got: #{inspect(other)} + """ end end + @doc """ + Synchronously stops the server with the given `reason`. + + The `c:terminate/2` callback of the given `server` will be invoked before + exiting. This function returns `:ok` if the server terminates with the + given reason; if it terminates with another reason, the call exits. + + This function keeps OTP semantics regarding error reporting. + If the reason is any other than `:normal`, `:shutdown` or + `{:shutdown, _}`, an error report is logged. + """ + @spec stop(server, reason :: term, timeout) :: :ok + def stop(server, reason \\ :normal, timeout \\ :infinity) do + :gen.stop(server, reason, timeout) + end + @doc """ Makes a synchronous call to the `server` and waits for its reply. The client sends the given `request` to the server and waits until a reply - arrives or a timeout occurs. `handle_call/3` will be called on the server + arrives or a timeout occurs. `c:handle_call/3` will be called on the server to handle the request. - The server can be any of the values described in the `Name Registration` - section of the module documentation. + `server` can be any of the values described in the "Name registration" + section of the documentation for this module. ## Timeouts - The `timeout` is an integer greater than zero which specifies how many + `timeout` is an integer greater than zero which specifies how many milliseconds to wait for a reply, or the atom `:infinity` to wait - indefinitely. The default value is 5000. If no reply is received within - the specified time, the function call fails. If the caller catches the - failure and continues running, and the server is just late with the reply, - it may arrive at any time later into the caller's message queue. The caller - must in this case be prepared for this and discard any such garbage messages - that are two element tuples with a reference as the first element. + indefinitely. The default value is `5000`. If no reply is received within + the specified time, the function call fails and the caller exits. If the + caller catches the failure and continues running, and the server is just late + with the reply, it may arrive at any time later into the caller's message + queue. The caller must in this case be prepared for this and discard any such + garbage messages that are two-element tuples with a reference as the first + element. """ @spec call(server, term, timeout) :: term def call(server, request, timeout \\ 5000) do - :gen_server.call(server, request, timeout) + case whereis(server) do + nil -> + exit({:noproc, {__MODULE__, :call, [server, request, timeout]}}) + pid when pid == self() -> + exit({:calling_self, {__MODULE__, :call, [server, request, timeout]}}) + pid -> + try do + :gen.call(pid, :"$gen_call", request, timeout) + catch + :exit, reason -> + exit({reason, {__MODULE__, :call, [server, request, timeout]}}) + else + {:ok, res} -> res + end + end end @doc """ Sends an asynchronous request to the `server`. - This function returns `:ok` immediately, regardless of whether the - destination node or server does exists. `handle_cast/2` will be called on the - server to handle the request. + This function always returns `:ok` regardless of whether + the destination `server` (or node) exists. Therefore it + is unknown whether the destination `server` successfully + handled the message. + + `c:handle_cast/2` will be called on the server to handle + the request. In case the `server` is on a node which is + not yet connected to the caller one, the call is going to + block until a connection happens. This is different than + the behaviour in OTP's `:gen_server` where the message + is sent by another process in this case, which could cause + messages to other nodes to arrive out of order. """ @spec cast(server, term) :: :ok - defdelegate cast(server, request), to: :gen_server + def cast(server, request) + + def cast({:global, name}, request) do + try do + :global.send(name, cast_msg(request)) + :ok + catch + _, _ -> :ok + end + end + + def cast({:via, mod, name}, request) do + try do + mod.send(name, cast_msg(request)) + :ok + catch + _, _ -> :ok + end + end + + def cast({name, node}, request) when is_atom(name) and is_atom(node), + do: do_send({name, node}, cast_msg(request)) + + def cast(dest, request) when is_atom(dest) or is_pid(dest), + do: do_send(dest, cast_msg(request)) @doc """ Casts all servers locally registered as `name` at the specified nodes. - The function returns immediately and ignores nodes that do not exist, or where the + This function returns immediately and ignores nodes that do not exist, or where the server name does not exist. See `multi_call/4` for more information. """ @spec abcast([node], name :: atom, term) :: :abcast - def abcast(nodes \\ nodes(), name, request) do - :gen_server.abcast(nodes, name, request) + def abcast(nodes \\ [node() | Node.list()], name, request) when is_list(nodes) and is_atom(name) do + msg = cast_msg(request) + _ = for node <- nodes, do: do_send({name, node}, msg) + :abcast + end + + defp cast_msg(req) do + {:"$gen_cast", req} + end + + defp do_send(dest, msg) do + try do + send(dest, msg) + :ok + catch + _, _ -> :ok + end end @doc """ Calls all servers locally registered as `name` at the specified `nodes`. - The `request` is first sent to every node and then we wait for the - replies. This function returns a tuple containing the node and its reply - as first element and all bad nodes as second element. The bad nodes is a - list of nodes that either did not exist, or where a server with the given - `name` did not exist or did not reply. + First, the `request` is sent to every node in `nodes`; then, the caller waits + for the replies. This function returns a two-element tuple `{replies, + bad_nodes}` where: + + * `replies` - is a list of `{node, reply}` tuples where `node` is the node + that replied and `reply` is its reply + * `bad_nodes` - is a list of nodes that either did not exist or where a + server with the given `name` did not exist or did not reply - Nodes is a list of node names to which the request is sent. The default - value is the list of all known nodes. + `nodes` is a list of node names to which the request is sent. The default + value is the list of all known nodes (including this node). To avoid that late answers (after the timeout) pollute the caller's message queue, a middleman process is used to do the actual calls. Late answers will then be discarded when they arrive to a terminated process. + + ## Examples + + Assuming the `Stack` GenServer mentioned in the docs for the `GenServer` + module is registered as `Stack` in the `:"foo@my-machine"` and + `:"bar@my-machine"` nodes: + + GenServer.multi_call(Stack, :pop) + #=> {[{:"foo@my-machine", :hello}, {:"bar@my-machine", :world}], []} + """ @spec multi_call([node], name :: atom, term, timeout) :: {replies :: [{node, term}], bad_nodes :: [node]} - def multi_call(nodes \\ nodes(), name, request, timeout \\ :infinity) do + def multi_call(nodes \\ [node() | Node.list()], name, request, timeout \\ :infinity) do :gen_server.multi_call(nodes, name, request, timeout) end @doc """ Replies to a client. - This function can be used by a server to explicitly send a reply to a - client that called `call/3` or `multi_call/4`. When the reply cannot be - defined in the return value of `handle_call/3`. + This function can be used to explicitly send a reply to a client that called + `call/3` or `multi_call/4` when the reply cannot be specified in the return + value of `c:handle_call/3`. + + `client` must be the `from` argument (the second argument) accepted by + `c:handle_call/3` callbacks. `reply` is an arbitrary term which will be given + back to the client as the return value of the call. - The `client` must be the `from` argument (the second argument) received - in `handle_call/3` callbacks. Reply is an arbitrary term which will be - given back to the client as the return value of the call. + Note that `reply/2` can be called from any process, not just the GenServer + that originally received the call (as long as that GenServer communicated the + `from` argument somehow). This function always returns `:ok`. + + ## Examples + + def handle_call(:reply_in_one_second, from, state) do + Process.send_after(self(), {:reply, from}, 1_000) + {:noreply, state} + end + + def handle_info({:reply, from}, state) do + GenServer.reply(from, :one_second_has_passed) + {:noreply, state} + end + """ - @spec reply({pid, reference}, term) :: :ok + @spec reply(from, term) :: :ok def reply(client, reply) - def reply({to, tag}, reply) do + def reply({to, tag}, reply) when is_pid(to) do try do send(to, {tag, reply}) :ok @@ -422,9 +896,47 @@ defmodule GenServer do end end - @compile {:inline, [nodes: 0]} + @doc """ + Returns the `pid` or `{name, node}` of a GenServer process, or `nil` if + no process is associated with the given `server`. + + ## Examples + + For example, to lookup a server process, monitor it and send a cast to it: + + process = GenServer.whereis(server) + monitor = Process.monitor(process) + GenServer.cast(process, :hello) + + """ + @spec whereis(server) :: pid | {atom, node} | nil + def whereis(server) + + def whereis(pid) when is_pid(pid), do: pid + + def whereis(name) when is_atom(name) do + Process.whereis(name) + end + + def whereis({:global, name}) do + case :global.whereis_name(name) do + pid when is_pid(pid) -> pid + :undefined -> nil + end + end + + def whereis({:via, mod, name}) do + case apply(mod, :whereis_name, [name]) do + pid when is_pid(pid) -> pid + :undefined -> nil + end + end + + def whereis({name, local}) when is_atom(name) and local == node() do + Process.whereis(name) + end - defp nodes do - [node()|:erlang.nodes()] + def whereis({name, node} = server) when is_atom(name) and is_atom(node) do + server end end diff --git a/lib/elixir/lib/hash_dict.ex b/lib/elixir/lib/hash_dict.ex index edcb8e86329..f676c8bb64b 100644 --- a/lib/elixir/lib/hash_dict.ex +++ b/lib/elixir/lib/hash_dict.ex @@ -1,19 +1,13 @@ defmodule HashDict do @moduledoc """ - A key-value store. - - The `HashDict` is represented internally as a struct, therefore - `%HashDict{}` can be used whenever there is a need to match - on any `HashDict`. Note though the struct fields are private and - must not be accessed directly. Instead, use the functions on this - or in the `Dict` module. - - Implementation-wise, `HashDict` is implemented using tries, which - grows in space as the number of keys grows, working well with both - small and large set of keys. For more information about the - functions and their APIs, please consult the `Dict` module. + WARNING: this module is deprecated. + + Use the `Map` module instead. """ + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + use Dict @node_bitmap 0b111 @@ -21,7 +15,7 @@ defmodule HashDict do @node_size 8 @node_template :erlang.make_tuple(@node_size, []) - @opaque t :: map + @opaque t :: %__MODULE__{size: non_neg_integer, root: term} @doc false defstruct size: 0, root: @node_template @@ -99,7 +93,7 @@ defmodule HashDict do defp do_fetch(node, key, hash) do index = key_mask(hash) case elem(node, index) do - [^key|v] -> {:ok, v} + [^key | v] -> {:ok, v} {^key, v, _} -> {:ok, v} {_, _, n} -> do_fetch(n, key, key_shift(hash)) _ -> :error @@ -110,11 +104,11 @@ defmodule HashDict do index = key_mask(hash) case elem(node, index) do [] -> - {put_elem(node, index, [key|value]), 1} - [^key|_] -> - {put_elem(node, index, [key|value]), 0} - [k|v] -> - n = put_elem(@node_template, key_mask(key_shift(hash)), [key|value]) + {put_elem(node, index, [key | value]), 1} + [^key | _] -> + {put_elem(node, index, [key | value]), 0} + [k | v] -> + n = put_elem(@node_template, key_mask(key_shift(hash)), [key | value]) {put_elem(node, index, {k, v, n}), 1} {^key, _, n} -> {put_elem(node, index, {key, value, n}), 0} @@ -128,11 +122,11 @@ defmodule HashDict do index = key_mask(hash) case elem(node, index) do [] -> - {put_elem(node, index, [key|initial.()]), 1} - [^key|value] -> - {put_elem(node, index, [key|fun.(value)]), 0} - [k|v] -> - n = put_elem(@node_template, key_mask(key_shift(hash)), [key|initial.()]) + {put_elem(node, index, [key | initial.()]), 1} + [^key | value] -> + {put_elem(node, index, [key | fun.(value)]), 0} + [k | v] -> + n = put_elem(@node_template, key_mask(key_shift(hash)), [key | initial.()]) {put_elem(node, index, {k, v, n}), 1} {^key, value, n} -> {put_elem(node, index, {key, fun.(value), n}), 0} @@ -147,16 +141,16 @@ defmodule HashDict do case elem(node, index) do [] -> :error - [^key|value] -> + [^key | value] -> {put_elem(node, index, []), value} - [_|_] -> + [_ | _] -> :error {^key, value, n} -> {put_elem(node, index, do_compact_node(n)), value} {k, v, n} -> case do_delete(n, key, key_shift(hash)) do {@node_template, value} -> - {put_elem(node, index, [k|v]), value} + {put_elem(node, index, [k | v]), value} {n, value} -> {put_elem(node, index, {k, v, n}), value} :error -> @@ -168,9 +162,9 @@ defmodule HashDict do Enum.each 0..(@node_size - 1), fn index -> defp do_compact_node(node) when elem(node, unquote(index)) != [] do case elem(node, unquote(index)) do - [k|v] -> + [k | v] -> case put_elem(node, unquote(index), []) do - @node_template -> [k|v] + @node_template -> [k | v] n -> {k, v, n} end {k, v, n} -> @@ -193,8 +187,8 @@ defmodule HashDict do next.(acc) end - defp do_reduce_each([k|v], {:cont, acc}, fun, next) do - next.(fun.({k,v}, acc)) + defp do_reduce_each([k | v], {:cont, acc}, fun, next) do + next.(fun.({k, v}, acc)) end defp do_reduce_each({k, v, n}, {:cont, acc}, fun, next) do @@ -227,31 +221,35 @@ defmodule HashDict do end defimpl Enumerable, for: HashDict do - def reduce(dict, acc, fun), do: HashDict.reduce(dict, acc, fun) - def member?(dict, {k, v}), do: {:ok, match?({:ok, ^v}, HashDict.fetch(dict, k))} - def member?(_dict, _), do: {:ok, false} - def count(dict), do: {:ok, HashDict.size(dict)} -end + def reduce(dict, acc, fun) do + # Avoid warnings about HashDict being deprecated. + module = HashDict + module.reduce(dict, acc, fun) + end -defimpl Access, for: HashDict do - def get(dict, key) do - HashDict.get(dict, key, nil) + def member?(dict, {key, value}) do + # Avoid warnings about HashDict being deprecated. + module = HashDict + {:ok, match?({:ok, ^value}, module.fetch(dict, key))} end - def get_and_update(dict, key, fun) do - {get, update} = fun.(HashDict.get(dict, key, nil)) - {get, HashDict.put(dict, key, update)} + def member?(_dict, _) do + {:ok, false} end -end -defimpl Collectable, for: HashDict do - def empty(_dict) do - HashDict.new + def count(dict) do + # Avoid warnings about HashDict being deprecated. + module = HashDict + {:ok, module.size(dict)} end +end +defimpl Collectable, for: HashDict do def into(original) do + # Avoid warnings about HashDict being deprecated. + module = HashDict {original, fn - dict, {:cont, {k, v}} -> Dict.put(dict, k, v) + dict, {:cont, {key, value}} -> module.put(dict, key, value) dict, :done -> dict _, :halt -> :ok end} @@ -262,6 +260,8 @@ defimpl Inspect, for: HashDict do import Inspect.Algebra def inspect(dict, opts) do - concat ["#HashDict<", Inspect.List.inspect(HashDict.to_list(dict), opts), ">"] + # Avoid warnings about HashDict being deprecated. + module = HashDict + concat ["#HashDict<", Inspect.List.inspect(module.to_list(dict), opts), ">"] end end diff --git a/lib/elixir/lib/hash_set.ex b/lib/elixir/lib/hash_set.ex index acbc21ae943..d553df33a0a 100644 --- a/lib/elixir/lib/hash_set.ex +++ b/lib/elixir/lib/hash_set.ex @@ -1,27 +1,19 @@ defmodule HashSet do @moduledoc """ - A set store. - - The `HashSet` is represented internally as a struct, therefore - `%HashSet{}` can be used whenever there is a need to match - on any `HashSet`. Note though the struct fields are private and - must not be accessed directly. Instead, use the functions on this - or in the `Set` module. - - The `HashSet` is implemented using tries, which grows in - space as the number of keys grows, working well with both - small and large set of keys. For more information about the - functions and their APIs, please consult the `Set` module. + WARNING: this module is deprecated. + + Use the `MapSet` module instead. """ - @behaviour Set + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) @node_bitmap 0b111 @node_shift 3 @node_size 8 @node_template :erlang.make_tuple(@node_size, []) - @opaque t :: map + @opaque t :: %__MODULE__{size: non_neg_integer, root: term} @doc false defstruct size: 0, root: @node_template @@ -29,9 +21,6 @@ defmodule HashSet do @compile :inline_list_funcs @compile {:inline, key_hash: 1, key_mask: 1, key_shift: 1} - @doc """ - Creates a new empty set. - """ @spec new :: Set.t def new do %HashSet{} @@ -56,7 +45,7 @@ defmodule HashSet do end def to_list(set) do - set_fold(set, [], &[&1|&2]) |> :lists.reverse + set_fold(set, [], &[&1 | &2]) |> :lists.reverse end def equal?(%HashSet{size: size1} = set1, %HashSet{size: size2} = set2) do @@ -124,10 +113,10 @@ defmodule HashSet do defp do_member?(node, term, hash) do index = key_mask(hash) case elem(node, index) do - [] -> false - [^term|_] -> true - [_] -> false - [_|n] -> do_member?(n, term, key_shift(hash)) + [] -> false + [^term | _] -> true + [_] -> false + [_ | n] -> do_member?(n, term, key_shift(hash)) end end @@ -136,14 +125,14 @@ defmodule HashSet do case elem(node, index) do [] -> {put_elem(node, index, [term]), 1} - [^term|_] -> + [^term | _] -> {node, 0} [t] -> n = put_elem(@node_template, key_mask(key_shift(hash)), [term]) - {put_elem(node, index, [t|n]), 1} - [t|n] -> + {put_elem(node, index, [t | n]), 1} + [t | n] -> {n, counter} = do_put(n, term, key_shift(hash)) - {put_elem(node, index, [t|n]), counter} + {put_elem(node, index, [t | n]), counter} end end @@ -156,14 +145,14 @@ defmodule HashSet do {:ok, put_elem(node, index, [])} [_] -> :error - [^term|n] -> + [^term | n] -> {:ok, put_elem(node, index, do_compact_node(n))} - [t|n] -> + [t | n] -> case do_delete(n, term, key_shift(hash)) do {:ok, @node_template} -> {:ok, put_elem(node, index, [t])} {:ok, n} -> - {:ok, put_elem(node, index, [t|n])} + {:ok, put_elem(node, index, [t | n])} :error -> :error end @@ -176,19 +165,19 @@ defmodule HashSet do [t] -> case put_elem(node, unquote(index), []) do @node_template -> [t] - n -> [t|n] + n -> [t | n] end - [t|n] -> - [t|put_elem(node, unquote(index), do_compact_node(n))] + [t | n] -> + [t | put_elem(node, unquote(index), do_compact_node(n))] end end end ## Set fold - defp do_fold_each([], acc, _fun), do: acc - defp do_fold_each([t], acc, fun), do: fun.(t, acc) - defp do_fold_each([t|n], acc, fun), do: do_fold(n, fun.(t, acc), fun, @node_size) + defp do_fold_each([], acc, _fun), do: acc + defp do_fold_each([t], acc, fun), do: fun.(t, acc) + defp do_fold_each([t | n], acc, fun), do: do_fold(n, fun.(t, acc), fun, @node_size) defp do_fold(node, acc, fun, count) when count > 0 do acc = do_fold_each(:erlang.element(count, node), acc, fun) @@ -217,7 +206,7 @@ defmodule HashSet do next.(fun.(t, acc)) end - defp do_reduce_each([t|n], {:cont, acc}, fun, next) do + defp do_reduce_each([t | n], {:cont, acc}, fun, next) do do_reduce(n, fun.(t, acc), fun, @node_size, next) end @@ -247,19 +236,31 @@ defmodule HashSet do end defimpl Enumerable, for: HashSet do - def reduce(set, acc, fun), do: HashSet.reduce(set, acc, fun) - def member?(set, v), do: {:ok, HashSet.member?(set, v)} - def count(set), do: {:ok, HashSet.size(set)} -end + def reduce(set, acc, fun) do + # Avoid warnings about HashSet being deprecated. + module = HashSet + module.reduce(set, acc, fun) + end -defimpl Collectable, for: HashSet do - def empty(_dict) do - HashSet.new + def member?(set, term) do + # Avoid warnings about HashSet being deprecated. + module = HashSet + {:ok, module.member?(set, term)} end + def count(set) do + # Avoid warnings about HashSet being deprecated. + module = HashSet + {:ok, module.size(set)} + end +end + +defimpl Collectable, for: HashSet do def into(original) do + # Avoid warnings about HashSet being deprecated. + module = HashSet {original, fn - set, {:cont, x} -> HashSet.put(set, x) + set, {:cont, term} -> module.put(set, term) set, :done -> set _, :halt -> :ok end} @@ -270,6 +271,8 @@ defimpl Inspect, for: HashSet do import Inspect.Algebra def inspect(set, opts) do - concat ["#HashSet<", Inspect.List.inspect(HashSet.to_list(set), opts), ">"] + # Avoid warnings about HashSet being deprecated. + module = HashSet + concat ["#HashSet<", Inspect.List.inspect(module.to_list(set), opts), ">"] end end diff --git a/lib/elixir/lib/inspect.ex b/lib/elixir/lib/inspect.ex index 26f1dbd5977..eed18521aef 100644 --- a/lib/elixir/lib/inspect.ex +++ b/lib/elixir/lib/inspect.ex @@ -16,20 +16,20 @@ defprotocol Inspect do ## Examples Many times, inspecting a structure can be implemented in function - of existing entities. For example, here is `HashSet`'s `inspect` + of existing entities. For example, here is `MapSet`'s `inspect` implementation: - defimpl Inspect, for: HashSet do + defimpl Inspect, for: MapSet do import Inspect.Algebra def inspect(dict, opts) do - concat ["#HashSet<", to_doc(HashSet.to_list(dict), opts), ">"] + concat ["#MapSet<", to_doc(MapSet.to_list(dict), opts), ">"] end end - The `concat` function comes from `Inspect.Algebra` and it + The `concat/1` function comes from `Inspect.Algebra` and it concatenates algebra documents together. In the example above, - it is concatenating the string `"HashSet<"` (all strings are + it is concatenating the string `"MapSet<"` (all strings are valid algebra documents that keep their formatting when pretty printed), the document returned by `Inspect.Algebra.to_doc/2` and the other string `">"`. @@ -41,278 +41,263 @@ defprotocol Inspect do ## Error handling In case there is an error while your structure is being inspected, - Elixir will automatically fall back to a raw representation. + Elixir will raise an `ArgumentError` error and will automatically fall back + to a raw representation for printing the structure. You can however access the underlying error by invoking the Inspect - implementation directly. For example, to test Inspect.HashSet above, + implementation directly. For example, to test Inspect.MapSet above, you can invoke it as: - Inspect.HashSet.inspect(HashSet.new, Inspect.Opts.new) + Inspect.MapSet.inspect(MapSet.new, %Inspect.Opts{}) """ # Handle structs in Any @fallback_to_any true - def inspect(thing, opts) + def inspect(term, opts) end defimpl Inspect, for: Atom do require Macro - def inspect(atom, _opts) do - inspect(atom) + def inspect(atom, opts) do + color(inspect(atom), color_key(atom), opts) end - def inspect(false), do: "false" - def inspect(true), do: "true" - def inspect(nil), do: "nil" - def inspect(:""), do: ":\"\"" + defp color_key(atom) when is_boolean(atom), do: :boolean + defp color_key(nil), do: :nil + defp color_key(_), do: :atom - def inspect(atom) do + def inspect(atom) when is_nil(atom) or is_boolean(atom) do + Atom.to_string(atom) + end + + def inspect(atom) when is_atom(atom) do binary = Atom.to_string(atom) - cond do - valid_ref_identifier?(binary) -> - if only_elixir?(binary) do - binary - else - "Elixir." <> rest = binary - rest + case Macro.classify_identifier(atom) do + :alias -> + case binary do + binary when binary in ["Elixir", "Elixir.Elixir"] -> + binary + "Elixir.Elixir." <> _rest -> + binary + "Elixir." <> rest -> + rest end - valid_atom_identifier?(binary) -> - ":" <> binary - atom in [:%{}, :{}, :<<>>, :..., :%] -> - ":" <> binary - atom in Macro.binary_ops or atom in Macro.unary_ops -> + type when type in [:callable, :not_callable] -> ":" <> binary - true -> - << ?:, ?", Inspect.BitString.escape(binary, ?") :: binary, ?" >> + :other -> + {escaped, _} = Inspect.BitString.escape(binary, ?") + IO.iodata_to_binary [?:, ?", escaped, ?"] end end - - defp only_elixir?("Elixir." <> rest), do: only_elixir?(rest) - defp only_elixir?("Elixir"), do: true - defp only_elixir?(_), do: false - - # Detect if atom is an atom alias (Elixir.Foo.Bar.Baz) - - defp valid_ref_identifier?("Elixir" <> rest) do - valid_ref_piece?(rest) - end - - defp valid_ref_identifier?(_), do: false - - defp valid_ref_piece?(<>) when h in ?A..?Z do - valid_ref_piece? valid_identifier?(t) - end - - defp valid_ref_piece?(<<>>), do: true - defp valid_ref_piece?(_), do: false - - # Detect if atom - - defp valid_atom_identifier?(<>) when h in ?a..?z or h in ?A..?Z or h == ?_ do - valid_atom_piece?(t) - end - - defp valid_atom_identifier?(_), do: false - - defp valid_atom_piece?(t) do - case valid_identifier?(t) do - <<>> -> true - <> -> true - <> -> true - <> -> valid_atom_piece?(t) - _ -> false - end - end - - defp valid_identifier?(<>) - when h in ?a..?z - when h in ?A..?Z - when h in ?0..?9 - when h == ?_ do - valid_identifier? t - end - - defp valid_identifier?(other), do: other end defimpl Inspect, for: BitString do - def inspect(thing, %Inspect.Opts{binaries: bins} = opts) when is_binary(thing) do - if bins == :as_strings or (bins == :infer and String.printable?(thing)) do - <> + def inspect(term, opts) when is_binary(term) do + %Inspect.Opts{binaries: bins, base: base, printable_limit: printable_limit} = opts + + if base == :decimal and + (bins == :as_strings or (bins == :infer and String.printable?(term, printable_limit))) do + inspected = + case escape(term, ?", printable_limit) do + {escaped, ""} -> [?", escaped, ?"] + {escaped, _} -> [?", escaped, ?", " <> ..."] + end + color(IO.iodata_to_binary(inspected), :string, opts) else - inspect_bitstring(thing, opts) + inspect_bitstring(term, opts) end end - def inspect(thing, opts) do - inspect_bitstring(thing, opts) + def inspect(term, opts) do + inspect_bitstring(term, opts) end ## Escaping @doc false def escape(other, char) do - escape(other, char, <<>>) + escape(other, char, :infinity, []) end - defp escape(<< char, t :: binary >>, char, binary) do - escape(t, char, << binary :: binary, ?\\, char >>) + @doc false + def escape(other, char, count) do + escape(other, char, count, []) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?#, ?{>>) + + defp escape(<<_, _::binary>> = binary, _char, 0, acc) do + {acc, binary} end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?a >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | [?\\, char]]) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?b >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\\#{']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?d >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\a']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?e >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\b']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?f >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\d']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?n >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\e']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?r >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\f']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?\\ >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\n']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?t >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\r']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, ?\\, ?v >>) + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\\\']) end - defp escape(<>, char, binary) do - head = << h :: utf8 >> - if String.printable?(head) do - escape(t, char, append(head, binary)) - else - << byte :: size(8), h :: binary >> = head - t = << h :: binary, t :: binary >> - escape(t, char, << binary :: binary, escape_char(byte) :: binary >>) - end + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\t']) + end + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | '\\v']) end - defp escape(<>, char, binary) do - escape(t, char, << binary :: binary, escape_char(h) :: binary >>) + defp escape(<>, char, count, acc) + when h in 0x20..0x7E + when h in 0xA0..0xD7FF + when h in 0xE000..0xFFFD + when h in 0x10000..0x10FFFF do + escape(t, char, decrement(count), [acc | <>]) + end + defp escape(<>, char, count, acc) do + escape(t, char, decrement(count), [acc | escape_char(h)]) + end + defp escape(<<>>, _char, _count, acc) do + {acc, <<>>} end - defp escape(<<>>, _char, binary), do: binary @doc false # Also used by Regex - def escape_char(char) when char in ?\000..?\377, - do: octify(char) - - def escape_char(char), do: hexify(char) - - defp octify(byte) do - << hi :: size(2), mi :: size(3), lo :: size(3) >> = << byte >> - << ?\\, ?0 + hi, ?0 + mi, ?0 + lo >> + def escape_char(0) do + '\\0' end - defp hexify(char) when char < 0x10000 do - <> = <> - <> + def escape_char(char) when char < 0x100 do + <> = <> + ['\\x', to_hex(a), to_hex(b)] end - defp hexify(char) when char < 0x1000000 do - <> = <> - <> + def escape_char(char) when char < 0x10000 do + <> = <> + ['\\x{', to_hex(a), to_hex(b), to_hex(c), to_hex(d), ?}] end - defp to_hex(c) when c in 0..9, do: ?0+c - defp to_hex(c) when c in 10..15, do: ?a+c-10 + def escape_char(char) when char < 0x1000000 do + <> = <> + ['\\x{', to_hex(a), to_hex(b), to_hex(c), + to_hex(d), to_hex(e), to_hex(f), ?}] + end - defp append(<>, binary), do: append(t, << binary :: binary, h >>) - defp append(<<>>, binary), do: binary + defp to_hex(c) when c in 0..9, do: ?0 + c + defp to_hex(c) when c in 10..15, do: ?A + c - 10 ## Bitstrings + defp inspect_bitstring("", opts) do + color("<<>>", :binary, opts) + end + defp inspect_bitstring(bitstring, opts) do - each_bit(bitstring, opts.limit, "<<") <> ">>" + left = color("<<", :binary, opts) + right = color(">>", :binary, opts) + nest surround(left, each_bit(bitstring, opts.limit, opts), right), 1 end - defp each_bit(_, 0, acc) do - acc <> "..." + defp each_bit(_, 0, _) do + "..." end - defp each_bit(<>, counter, acc) when t != <<>> do - each_bit(t, decrement(counter), acc <> Integer.to_string(h) <> ", ") + defp each_bit(<<>>, _counter, _opts) do + :doc_nil end - defp each_bit(<>, _counter, acc) do - acc <> Integer.to_string(h) + defp each_bit(<>, _counter, opts) do + Inspect.Integer.inspect(h, opts) end - defp each_bit(<<>>, _counter, acc) do - acc + defp each_bit(<>, counter, opts) do + glue(concat(Inspect.Integer.inspect(h, opts), ","), + each_bit(t, decrement(counter), opts)) end - defp each_bit(bitstring, _counter, acc) do + defp each_bit(bitstring, _counter, opts) do size = bit_size(bitstring) - <> = bitstring - acc <> Integer.to_string(h) <> "::size(" <> Integer.to_string(size) <> ")" + <> = bitstring + Inspect.Integer.inspect(h, opts) <> "::size(" <> Integer.to_string(size) <> ")" end + @compile {:inline, decrement: 1} defp decrement(:infinity), do: :infinity defp decrement(counter), do: counter - 1 end defimpl Inspect, for: List do - @doc ~S""" - Represents a list, checking if it can be printed or not. - If so, a single-quoted representation is returned, - otherwise the brackets syntax is used. Keywords are - printed in keywords syntax. - - ## Examples - - iex> inspect('bar') - "'bar'" - - iex> inspect([0|'bar']) - "[0, 98, 97, 114]" - - iex> inspect([:foo,:bar]) - "[:foo, :bar]" - - """ + def inspect([], opts) do + color("[]", :list, opts) + end + + # TODO: Remove :char_list and :as_char_lists handling in 2.0 + def inspect(term, opts) do + %Inspect.Opts{charlists: lists, char_lists: lists_deprecated, printable_limit: printable_limit} = opts + lists = + if lists == :infer and lists_deprecated != :infer do + case lists_deprecated do + :as_char_lists -> + IO.warn "the :char_lists inspect option and its :as_char_lists " <> + "value are deprecated, use the :charlists option and its " <> + ":as_charlists value instead" + :as_charlists + _ -> + IO.warn "the :char_lists inspect option is deprecated, use :charlists instead" + lists_deprecated + end + else + lists + end - def inspect([], _opts), do: "[]" + open = color("[", :list, opts) + sep = color(",", :list, opts) + close = color("]", :list, opts) - def inspect(thing, %Inspect.Opts{char_lists: lists} = opts) do cond do - lists == :as_char_lists or (lists == :infer and printable?(thing)) -> - << ?', Inspect.BitString.escape(IO.chardata_to_string(thing), ?') :: binary, ?' >> - keyword?(thing) -> - surround_many("[", thing, "]", opts.limit, &keyword(&1, opts)) + lists == :as_charlists or (lists == :infer and printable?(term, printable_limit)) -> + inspected = + case Inspect.BitString.escape(IO.chardata_to_string(term), ?', printable_limit) do + {escaped, ""} -> [?', escaped, ?'] + {escaped, _} -> [?', escaped, ?', " ++ ..."] + end + IO.iodata_to_binary inspected + keyword?(term) -> + surround_many(open, term, close, opts, &keyword/2, sep) true -> - surround_many("[", thing, "]", opts.limit, &to_doc(&1, opts)) + surround_many(open, term, close, opts, &to_doc/2, sep) end end + @doc false def keyword({key, value}, opts) do - concat( - key_to_binary(key) <> ": ", - to_doc(value, opts) - ) + key = color(key_to_binary(key) <> ": ", :atom, opts) + concat(key, to_doc(value, opts)) end + @doc false def keyword?([{key, _value} | rest]) when is_atom(key) do - case Atom.to_char_list(key) do + case Atom.to_charlist(key) do 'Elixir.' ++ _ -> false _ -> keyword?(rest) end @@ -321,6 +306,27 @@ defimpl Inspect, for: List do def keyword?([]), do: true def keyword?(_other), do: false + @doc false + def printable?(list), do: printable?(list, :infinity) + + @doc false + def printable?(_, 0), do: true + def printable?([char | rest], counter) when char in 32..126, do: printable?(rest, decrement(counter)) + def printable?([?\n | rest], counter), do: printable?(rest, decrement(counter)) + def printable?([?\r | rest], counter), do: printable?(rest, decrement(counter)) + def printable?([?\t | rest], counter), do: printable?(rest, decrement(counter)) + def printable?([?\v | rest], counter), do: printable?(rest, decrement(counter)) + def printable?([?\b | rest], counter), do: printable?(rest, decrement(counter)) + def printable?([?\f | rest], counter), do: printable?(rest, decrement(counter)) + def printable?([?\e | rest], counter), do: printable?(rest, decrement(counter)) + def printable?([?\a | rest], counter), do: printable?(rest, decrement(counter)) + def printable?([], _counter), do: true + def printable?(_, _counter), do: false + + @compile {:inline, decrement: 1} + defp decrement(:infinity), do: :infinity + defp decrement(counter), do: counter - 1 + ## Private defp key_to_binary(key) do @@ -329,25 +335,14 @@ defimpl Inspect, for: List do other -> other end end - - defp printable?([c|cs]) when is_integer(c) and c in 32..126, do: printable?(cs) - defp printable?([?\n|cs]), do: printable?(cs) - defp printable?([?\r|cs]), do: printable?(cs) - defp printable?([?\t|cs]), do: printable?(cs) - defp printable?([?\v|cs]), do: printable?(cs) - defp printable?([?\b|cs]), do: printable?(cs) - defp printable?([?\f|cs]), do: printable?(cs) - defp printable?([?\e|cs]), do: printable?(cs) - defp printable?([?\a|cs]), do: printable?(cs) - defp printable?([]), do: true - defp printable?(_), do: false end defimpl Inspect, for: Tuple do - def inspect({}, _opts), do: "{}" - def inspect(tuple, opts) do - surround_many("{", Tuple.to_list(tuple), "}", opts.limit, &to_doc(&1, opts)) + open = color("{", :tuple, opts) + sep = color(",", :tuple, opts) + close = color("}", :tuple, opts) + surround_many(open, Tuple.to_list(tuple), close, opts, &to_doc/2, sep) end end @@ -358,14 +353,17 @@ defimpl Inspect, for: Map do def inspect(map, name, opts) do map = :maps.to_list(map) - surround_many("%" <> name <> "{", map, "}", opts.limit, traverse_fun(map, opts)) + open = color("%" <> name <> "{", :map, opts) + sep = color(",", :map, opts) + close = color("}", :map, opts) + surround_many(open, map, close, opts, traverse_fun(map), sep) end - defp traverse_fun(list, opts) do + defp traverse_fun(list) do if Inspect.List.keyword?(list) do - &Inspect.List.keyword(&1, opts) + &Inspect.List.keyword/2 else - &to_map(&1, opts) + &to_map/2 end end @@ -378,66 +376,83 @@ defimpl Inspect, for: Map do end defimpl Inspect, for: Integer do - def inspect(thing, _opts) do - Integer.to_string(thing) + def inspect(term, %Inspect.Opts{base: base} = opts) do + inspected = Integer.to_string(term, base_to_value(base)) |> prepend_prefix(base) + color(inspected, :number, opts) + end + + defp base_to_value(base) do + case base do + :binary -> 2 + :decimal -> 10 + :octal -> 8 + :hex -> 16 + end + end + + defp prepend_prefix(value, :decimal), do: value + defp prepend_prefix(value, base) do + prefix = case base do + :binary -> "0b" + :octal -> "0o" + :hex -> "0x" + end + prefix <> value end end defimpl Inspect, for: Float do - def inspect(thing, _opts) do - IO.iodata_to_binary(:io_lib_format.fwrite_g(thing)) + def inspect(term, opts) do + inspected = IO.iodata_to_binary(:io_lib_format.fwrite_g(term)) + color(inspected, :number, opts) end end defimpl Inspect, for: Regex do - def inspect(regex, _opts) do - delim = ?/ - concat ["~r", - <>, - regex.opts] + def inspect(regex, opts) do + source = IO.iodata_to_binary(['~r/', escape(regex.source, ?/), ?/, regex.opts]) + color(source, :regex, opts) end defp escape(bin, term), - do: escape(bin, <<>>, term) + do: escape(bin, [], term) defp escape(<> <> rest, buf, term), - do: escape(rest, buf <> <>, term) + do: escape(rest, [buf | [?\\, term]], term) defp escape(<> <> rest, buf, term), - do: escape(rest, buf <> <>, term) + do: escape(rest, [buf | [?\\, term]], term) - # the list of characters is from `String.printable?` impl + # The list of characters is from 'String.printable?' implementation # minus characters treated specially by regex: \s, \d, \b, \e defp escape(<> <> rest, buf, term), - do: escape(rest, <>, term) + do: escape(rest, [buf | '\\n'], term) defp escape(<> <> rest, buf, term), - do: escape(rest, <>, term) + do: escape(rest, [buf | '\\r'], term) defp escape(<> <> rest, buf, term), - do: escape(rest, <>, term) + do: escape(rest, [buf | '\\t'], term) defp escape(<> <> rest, buf, term), - do: escape(rest, <>, term) + do: escape(rest, [buf | '\\v'], term) defp escape(<> <> rest, buf, term), - do: escape(rest, <>, term) + do: escape(rest, [buf | '\\f'], term) defp escape(<> <> rest, buf, term), - do: escape(rest, <>, term) + do: escape(rest, [buf | '\\a'], term) - defp escape(<> <> rest, buf, term) do - charstr = <> - if String.printable?(charstr) and not c in [?\d, ?\b, ?\e] do - escape(rest, buf <> charstr, term) - else - escape(rest, buf <> Inspect.BitString.escape_char(c), term) - end - end + defp escape(<>, buf, term) + when char in 0x20..0x7E + when char in 0xA0..0xD7FF + when char in 0xE000..0xFFFD + when char in 0x10000..0x10FFFF, + do: escape(rest, [buf | <>], term) - defp escape(<> <> rest, buf, term), - do: escape(rest, <>, term) + defp escape(<>, buf, term), + do: escape(rest, [buf | Inspect.BitString.escape_char(char)], term) defp escape(<<>>, buf, _), do: buf end @@ -446,11 +461,12 @@ defimpl Inspect, for: Function do def inspect(function, _opts) do fun_info = :erlang.fun_info(function) mod = fun_info[:module] + name = fun_info[:name] if fun_info[:type] == :external and fun_info[:env] == [] do - "&#{Inspect.Atom.inspect(mod)}.#{fun_info[:name]}/#{fun_info[:arity]}" + "&#{Inspect.Atom.inspect(mod)}.#{escape_name(name)}/#{fun_info[:arity]}" else - case Atom.to_char_list(mod) do + case Atom.to_charlist(mod) do 'elixir_compiler_' ++ _ -> if function_exported?(mod, :__RELATIVE__, 0) do "#Function<#{uniq(fun_info)} in file:#{mod.__RELATIVE__}>" @@ -463,6 +479,41 @@ defimpl Inspect, for: Function do end end + def escape_name(atom) when is_atom(atom) do + string = Atom.to_string(atom) + + case Macro.classify_identifier(atom) do + :callable -> + string + type when type in [:not_callable, :alias] -> + "\"" <> string <> "\"" + :other -> + {escaped, _} = Inspect.BitString.escape(string, ?") + IO.iodata_to_binary [?", escaped, ?"] + end + end + + # Example of this format: -NAME/ARITY-fun-COUNT- + def extract_anonymous_fun_parent(atom) when is_atom(atom) do + extract_anonymous_fun_parent(Atom.to_string(atom)) + end + + def extract_anonymous_fun_parent("-" <> rest) do + [trailing | reversed] = + rest + |> String.split("/") + |> Enum.reverse() + + case String.split(trailing, "-") do + [arity, _inner, _count, ""] -> + {reversed |> Enum.reverse |> Enum.join("/") |> String.to_atom(), arity} + _other -> + :error + end + end + + def extract_anonymous_fun_parent(other) when is_binary(other), do: :error + defp default_inspect(mod, fun_info) do "#Function<#{uniq(fun_info)}/#{fun_info[:arity]} in " <> "#{Inspect.Atom.inspect(mod)}#{extract_name(fun_info[:name])}>" @@ -473,10 +524,11 @@ defimpl Inspect, for: Function do end defp extract_name(name) do - name = Atom.to_string(name) - case :binary.split(name, "-", [:global]) do - ["", name | _] -> "." <> name - _ -> "." <> name + case extract_anonymous_fun_parent(name) do + {name, arity} -> + "." <> escape_name(name) <> "/" <> arity + :error -> + "." <> escape_name(name) end end @@ -494,7 +546,7 @@ end defimpl Inspect, for: Port do def inspect(port, _opts) do - IO.iodata_to_binary :erlang.port_to_list(port) + IO.iodata_to_binary(:erlang.port_to_list(port)) end end @@ -515,7 +567,8 @@ defimpl Inspect, for: Any do dunder -> if :maps.keys(dunder) == :maps.keys(map) do pruned = :maps.remove(:__exception__, :maps.remove(:__struct__, map)) - Inspect.Map.inspect(pruned, Inspect.Atom.inspect(struct, opts), opts) + colorless_opts = %{opts | syntax_colors: []} + Inspect.Map.inspect(pruned, Inspect.Atom.inspect(struct, colorless_opts), opts) else Inspect.Map.inspect(map, opts) end diff --git a/lib/elixir/lib/inspect/algebra.ex b/lib/elixir/lib/inspect/algebra.ex index 3a3b915d786..d57e5624535 100644 --- a/lib/elixir/lib/inspect/algebra.ex +++ b/lib/elixir/lib/inspect/algebra.ex @@ -4,8 +4,8 @@ defmodule Inspect.Opts do The following fields are available: - * `:structs` - when false, structs are not formatted by the inspect - protocol, they are instead printed as maps, defaults to true. + * `:structs` - when `false`, structs are not formatted by the inspect + protocol, they are instead printed as maps, defaults to `true`. * `:binaries` - when `:as_strings` all binaries will be printed as strings, non-printable bytes will be escaped. @@ -15,38 +15,90 @@ defmodule Inspect.Opts do When the default `:infer`, the binary will be printed as a string if it is printable, otherwise in bit syntax. - * `:char_lists` - when `:as_char_lists` all lists will be printed as char + * `:charlists` - when `:as_charlists` all lists will be printed as char lists, non-printable elements will be escaped. When `:as_lists` all lists will be printed as lists. - When the default `:infer`, the list will be printed as a char list if it + When the default `:infer`, the list will be printed as a charlist if it is printable, otherwise as list. * `:limit` - limits the number of items that are printed for tuples, - bitstrings, and lists, does not apply to strings nor char lists, defaults - to 50. + bitstrings, maps, lists and any other collection of items. It does not + apply to strings nor charlists and defaults to 50. - * `:pretty` - if set to true enables pretty printing, defaults to false. + * `:printable_limit` - limits the number of bytes that are printed for strings + and char lists. Defaults to 1024. - * `:width` - defaults to the 80 characters. + * `:pretty` - if set to `true` enables pretty printing, defaults to `false`. + + * `:width` - defaults to 80 characters, used when pretty is `true` or when + printing to IO devices. Set to 0 to force each item to be printed on its + own line. + + * `:base` - prints integers as `:binary`, `:octal`, `:decimal`, or `:hex`, + defaults to `:decimal`. When inspecting binaries any `:base` other than + `:decimal` implies `binaries: :as_binaries`. + + * `:safe` - when `false`, failures while inspecting structs will be raised + as errors instead of being wrapped in the `Inspect.Error` exception. This + is useful when debugging failures and crashes for custom inspect + implementations + + * `:syntax_colors` - when set to a keyword list of colors the output will + be colorized. The keys are types and the values are the colors to use for + each type. e.g. `[number: :red, atom: :blue]`. Types can include + `:number`, `:atom`, `regex`, `:tuple`, `:map`, `:list`, and `:reset`. + Colors can be any `t:IO.ANSI.ansidata/0` as accepted by `IO.ANSI.format/1`. """ - defstruct structs: true :: boolean, - binaries: :infer :: :infer | :as_binaries | :as_strings, - char_lists: :infer :: :infer | :as_lists | :as_char_lists, - limit: 50 :: pos_integer, - width: 80 :: pos_integer | :infinity, - pretty: false :: boolean + # TODO: Remove :char_lists key by 2.0 + defstruct structs: true, + binaries: :infer, + charlists: :infer, + char_lists: :infer, + limit: 50, + printable_limit: 1024, + width: 80, + base: :decimal, + pretty: false, + safe: true, + syntax_colors: [] + + @type color_key :: atom + + # TODO: Remove :char_lists key and :as_char_lists value by 2.0 + @type t :: %__MODULE__{ + structs: boolean, + binaries: :infer | :as_binaries | :as_strings, + charlists: :infer | :as_lists | :as_charlists, + char_lists: :infer | :as_lists | :as_char_lists, + limit: pos_integer | :infinity, + printable_limit: pos_integer | :infinity, + width: pos_integer | :infinity, + base: :decimal | :binary | :hex | :octal, + pretty: boolean, + safe: boolean, + syntax_colors: [{color_key, IO.ANSI.ansidata}] + } +end + +defmodule Inspect.Error do + @moduledoc """ + Raised when a struct cannot be inspected. + """ + defexception [:message] end defmodule Inspect.Algebra do @moduledoc ~S""" A set of functions for creating and manipulating algebra - documents, as described in ["Strictly Pretty" (2000) by Christian Lindig][0]. + documents. - An algebra document is represented by an `Inspect.Algebra` node - or a regular string. + This module implements the functionality described in + ["Strictly Pretty" (2000) by Christian Lindig][0] with small + additions, like support for String nodes, and a custom + rendering function that maximises horizontal space use. iex> Inspect.Algebra.empty :doc_nil @@ -58,26 +110,26 @@ defmodule Inspect.Algebra do elements together and render them: iex> doc = Inspect.Algebra.concat(Inspect.Algebra.empty, "foo") - iex> Inspect.Algebra.pretty(doc, 80) - "foo" + iex> Inspect.Algebra.format(doc, 80) + ["foo"] The functions `nest/2`, `space/2` and `line/2` help you put the document together into a rigid structure. However, the document - algebra gets interesting when using functions like `break/2`, which + algebra gets interesting when using functions like `break/1`, which converts the given string into a line break depending on how much space there is to print. Let's glue two docs together with a break and then render it: iex> doc = Inspect.Algebra.glue("a", " ", "b") - iex> Inspect.Algebra.pretty(doc, 80) - "a b" + iex> Inspect.Algebra.format(doc, 80) + ["a", " ", "b"] Notice the break was represented as is, because we haven't reached a line limit. Once we do, it is replaced by a newline: iex> doc = Inspect.Algebra.glue(String.duplicate("a", 20), " ", "b") - iex> Inspect.Algebra.pretty(doc, 10) - "aaaaaaaaaaaaaaaaaaaa\nb" + iex> Inspect.Algebra.format(doc, 10) + ["aaaaaaaaaaaaaaaaaaaa", "\n", "b"] Finally, this module also contains Elixir related functions, a bit tied to Elixir formatting, namely `surround/3` and `surround_many/5`. @@ -89,7 +141,7 @@ defmodule Inspect.Algebra do `:flat` (breaks as spaces) and `:break` (breaks as newlines). Implementing the same logic in a strict language such as Elixir leads to an exponential growth of possible documents, unless document groups - are encoded explictly as `:flat` or `:break`. Those groups are then reduced + are encoded explicitly as `:flat` or `:break`. Those groups are then reduced to a simple document, where the layout is already decided, per [Lindig][0]. This implementation slightly changes the semantic of Lindig's algorithm @@ -112,11 +164,11 @@ defmodule Inspect.Algebra do @tail_separator " |" @newline "\n" @nesting 1 - @break " " + @space " " - # Functional interface to `doc` records + # Functional interface to "doc" records - @type t :: :doc_nil | :doc_line | doc_cons | doc_nest | doc_break | doc_group | binary + @type t :: :doc_nil | :doc_line | doc_cons | doc_nest | doc_break | doc_group | doc_color | binary @typep doc_cons :: {:doc_cons, t, t} defmacrop doc_cons(left, right) do @@ -125,7 +177,7 @@ defmodule Inspect.Algebra do @typep doc_nest :: {:doc_nest, t, non_neg_integer} defmacrop doc_nest(doc, indent) do - quote do: {:doc_nest, unquote(doc), unquote(indent) } + quote do: {:doc_nest, unquote(doc), unquote(indent)} end @typep doc_break :: {:doc_break, binary} @@ -138,6 +190,11 @@ defmodule Inspect.Algebra do quote do: {:doc_group, unquote(group)} end + @typep doc_color :: {:doc_color, t, IO.ANSI.ansidata} + defmacrop doc_color(doc, color) do + quote do: {:doc_color, unquote(doc), unquote(color)} + end + defmacrop is_doc(doc) do if Macro.Env.in_guard?(__CALLER__) do do_is_doc(doc) @@ -155,25 +212,54 @@ defmodule Inspect.Algebra do is_binary(unquote(doc)) or unquote(doc) in [:doc_nil, :doc_line] or (is_tuple(unquote(doc)) and - elem(unquote(doc), 0) in [:doc_cons, :doc_nest, :doc_break, :doc_group]) + elem(unquote(doc), 0) in [:doc_cons, :doc_nest, :doc_break, :doc_group, :doc_color]) end end @doc """ - Converts an Elixir structure to an algebra document - according to the inspect protocol. + Converts an Elixir term to an algebra document + according to the `Inspect` protocol. """ @spec to_doc(any, Inspect.Opts.t) :: t + def to_doc(term, opts) + def to_doc(%{__struct__: struct} = map, %Inspect.Opts{} = opts) when is_atom(struct) do if opts.structs do try do Inspect.inspect(map, opts) rescue e -> - res = Inspect.Map.inspect(map, opts) - raise ArgumentError, - "Got #{inspect e.__struct__} with message " <> - "\"#{Exception.message(e)}\" while inspecting #{pretty(res, opts.width)}" + stacktrace = System.stacktrace + + # Because we try to raise a nice error message in case + # we can't inspect a struct, there is a chance the error + # message itself relies on the struct being printed, so + # we need to trap the inspected messages to guarantee + # we won't try to render any failed instruct when building + # the error message. + if Process.get(:inspect_trap) do + Inspect.Map.inspect(map, opts) + else + try do + Process.put(:inspect_trap, true) + + res = Inspect.Map.inspect(map, opts) + res = IO.iodata_to_binary(format(res, :infinity)) + + exception = Inspect.Error.exception( + message: "got #{inspect e.__struct__} with message " <> + "#{inspect Exception.message(e)} while inspecting #{res}" + ) + + if opts.safe do + Inspect.inspect(exception, opts) + else + reraise(exception, stacktrace) + end + after + Process.delete(:inspect_trap) + end + end end else Inspect.Map.inspect(map, opts) @@ -196,94 +282,147 @@ defmodule Inspect.Algebra do @spec empty() :: :doc_nil def empty, do: :doc_nil - @doc """ - Concatenates two document entities. + @doc ~S""" + Concatenates two document entities returning a new document. ## Examples - iex> doc = Inspect.Algebra.concat "Tasteless", "Artosis" - iex> Inspect.Algebra.pretty(doc, 80) - "TastelessArtosis" + iex> doc = Inspect.Algebra.concat("hello", "world") + iex> Inspect.Algebra.format(doc, 80) + ["hello", "world"] """ - @spec concat(t, t) :: doc_cons - def concat(x, y) when is_doc(x) and is_doc(y) do - doc_cons(x, y) + @spec concat(t, t) :: t + def concat(doc1, doc2) when is_doc(doc1) and is_doc(doc2) do + doc_cons(doc1, doc2) end - @doc """ - Concatenates a list of documents. + @doc ~S""" + Concatenates a list of documents returning a new document. + + ## Examples + + iex> doc = Inspect.Algebra.concat(["a", "b", "c"]) + iex> Inspect.Algebra.format(doc, 80) + ["a", "b", "c"] + """ - @spec concat([t]) :: doc_cons - def concat(docs) do - folddoc(docs, &concat(&1, &2)) + @spec concat([t]) :: t + def concat(docs) when is_list(docs) do + fold_doc(docs, &concat(&1, &2)) end @doc ~S""" - Nests document entity `x` positions deep. + Colors a document if the `color_key` has a color in the options. + """ + @spec color(t, Inspect.Opts.color_key, Inspect.Opts.t) :: doc_color + def color(doc, color_key, %Inspect.Opts{syntax_colors: syntax_colors}) when is_doc(doc) do + if precolor = Keyword.get(syntax_colors, color_key) do + postcolor = Keyword.get(syntax_colors, :reset, :reset) + concat(doc_color(doc, precolor), doc_color(empty(), postcolor)) + else + doc + end + end + + @doc ~S""" + Nests the given document at the given `level`. Nesting will be appended to the line breaks. ## Examples iex> doc = Inspect.Algebra.nest(Inspect.Algebra.glue("hello", "world"), 5) - iex> Inspect.Algebra.pretty(doc, 5) - "hello\n world" + iex> Inspect.Algebra.format(doc, 5) + ["hello", "\n ", "world"] """ @spec nest(t, non_neg_integer) :: doc_nest - def nest(x, 0) when is_doc(x) do - x + def nest(doc, level) + + def nest(doc, 0) when is_doc(doc) do + doc end - def nest(x, i) when is_doc(x) and is_integer(i) do - doc_nest(x, i) + def nest(doc, level) when is_doc(doc) and is_integer(level) and level > 0 do + doc_nest(doc, level) end @doc ~S""" - Document entity representing a break. + Returns a document entity representing a break based on the given + `string`. - This break can be rendered as a linebreak or as spaces, + This break can be rendered as a linebreak or as the given `string`, depending on the `mode` of the chosen layout or the provided separator. ## Examples - Let's glue two docs together with a break and then render it: + Let's create a document by concatenating two strings with a break between + them: - iex> doc = Inspect.Algebra.glue("a", " ", "b") - iex> Inspect.Algebra.pretty(doc, 80) - "a b" + iex> doc = Inspect.Algebra.concat(["a", Inspect.Algebra.break("\t"), "b"]) + iex> Inspect.Algebra.format(doc, 80) + ["a", "\t", "b"] - Notice the break was represented as is, because we haven't reached - a line limit. Once we do, it is replaced by a newline: + Notice the break was represented with the given string, because we didn't + reach a line limit. Once we do, it is replaced by a newline: - iex> doc = Inspect.Algebra.glue(String.duplicate("a", 20), " ", "b") - iex> Inspect.Algebra.pretty(doc, 10) - "aaaaaaaaaaaaaaaaaaaa\nb" + iex> break = Inspect.Algebra.break("\t") + iex> doc = Inspect.Algebra.concat([String.duplicate("a", 20), break, "b"]) + iex> Inspect.Algebra.format(doc, 10) + ["aaaaaaaaaaaaaaaaaaaa", "\n", "b"] """ @spec break(binary) :: doc_break - def break(s) when is_binary(s), do: doc_break(s) + def break(string) when is_binary(string), do: doc_break(string) + @doc ~S""" + Returns a document entity with the `" "` string as break. + + See `break/1` for more information. + """ @spec break() :: doc_break - def break(), do: doc_break(@break) + def break(), do: doc_break(@space) + + @doc ~S""" + Glues two documents together inserting `" "` as a break between them. + + This means the two documents will be separated by `" "` in case they + fit in the same line. Otherwise a line break is used. + + ## Examples + + iex> doc = Inspect.Algebra.glue("hello", "world") + iex> Inspect.Algebra.format(doc, 80) + ["hello", " ", "world"] - @doc """ - Inserts a break between two docs. See `break/1` for more info. """ - @spec glue(t, t) :: doc_cons - def glue(x, y), do: concat(x, concat(break, y)) + @spec glue(t, t) :: t + def glue(doc1, doc2), do: concat(doc1, concat(break(), doc2)) + + @doc ~S""" + Glues two documents (`doc1` and `doc2`) together inserting the given + break `break_string` between them. + + For more information on how the break is inserted, see `break/1`. + + ## Examples + + iex> doc = Inspect.Algebra.glue("hello", "\t", "world") + iex> Inspect.Algebra.format(doc, 80) + ["hello", "\t", "world"] - @doc """ - Inserts a break, passed as the second argument, between two docs, - the first and the third arguments. """ - @spec glue(t, binary, t) :: doc_cons - def glue(x, g, y) when is_binary(g), do: concat(x, concat(break(g), y)) + @spec glue(t, binary, t) :: t + def glue(doc1, break_string, doc2) when is_binary(break_string), + do: concat(doc1, concat(break(break_string), doc2)) @doc ~S""" - Returns a group containing the specified document. + Returns a group containing the specified document `doc`. + + Documents in a group are attempted to be rendered together + to the best of the renderer ability. ## Examples @@ -303,184 +442,216 @@ defmodule Inspect.Algebra do ...> "B" ...> ) ...> )) - iex> Inspect.Algebra.pretty(doc, 80) - "Hello, A B" - iex> Inspect.Algebra.pretty(doc, 6) - "Hello,\nA B" + iex> Inspect.Algebra.format(doc, 80) + ["Hello,", " ", "A", " ", "B"] + iex> Inspect.Algebra.format(doc, 6) + ["Hello,", "\n", "A", " ", "B"] """ @spec group(t) :: doc_group - def group(d) when is_doc(d) do - doc_group(d) + def group(doc) when is_doc(doc) do + doc_group(doc) end - @doc """ - Inserts a mandatory single space between two document entities. + @doc ~S""" + Inserts a mandatory single space between two documents. ## Examples - iex> doc = Inspect.Algebra.space "Hughes", "Wadler" - iex> Inspect.Algebra.pretty(doc, 80) - "Hughes Wadler" + iex> doc = Inspect.Algebra.space("Hughes", "Wadler") + iex> Inspect.Algebra.format(doc, 5) + ["Hughes", " ", "Wadler"] """ - @spec space(t, t) :: doc_cons - def space(x, y), do: concat(x, concat(" ", y)) + @spec space(t, t) :: t + def space(doc1, doc2), do: concat(doc1, concat(" ", doc2)) @doc ~S""" - Inserts a mandatory linebreak between two document entities. + Inserts a mandatory linebreak between two documents. ## Examples - iex> doc = Inspect.Algebra.line "Hughes", "Wadler" - iex> Inspect.Algebra.pretty(doc, 80) - "Hughes\nWadler" + iex> doc = Inspect.Algebra.line("Hughes", "Wadler") + iex> Inspect.Algebra.format(doc, 80) + ["Hughes", "\n", "Wadler"] """ - @spec line(t, t) :: doc_cons - def line(x, y), do: concat(x, concat(:doc_line, y)) + @spec line(t, t) :: t + def line(doc1, doc2), do: concat(doc1, concat(:doc_line, doc2)) - @doc """ - Folds a list of document entities into a document entity - using a function that is passed as the first argument. + @doc ~S""" + Folds a list of documents into a document using the given folder function. + + The list of documents is folded "from the right"; in that, this function is + similar to `List.foldr/3`, except that it doesn't expect an initial + accumulator and uses the last element of `docs` as the initial accumulator. ## Examples - iex> doc = ["A", "B"] - iex> doc = Inspect.Algebra.folddoc(doc, fn(x,y) -> - ...> Inspect.Algebra.concat [x, "!", y] + iex> docs = ["A", "B", "C"] + iex> docs = Inspect.Algebra.fold_doc(docs, fn(doc, acc) -> + ...> Inspect.Algebra.concat([doc, "!", acc]) ...> end) - iex> Inspect.Algebra.pretty(doc, 80) - "A!B" + iex> Inspect.Algebra.format(docs, 80) + ["A", "!", "B", "!", "C"] """ - @spec folddoc([t], ((t, t) -> t)) :: t - def folddoc([], _), do: empty - def folddoc([doc], _), do: doc - def folddoc([d|ds], f), do: f.(d, folddoc(ds, f)) + @spec fold_doc([t], ((t, t) -> t)) :: t + def fold_doc(docs, folder_fun) + + def fold_doc([], _folder_fun), + do: empty() + def fold_doc([doc], _folder_fun), + do: doc + def fold_doc([doc | docs], folder_fun) when is_function(folder_fun, 2), + do: folder_fun.(doc, fold_doc(docs, folder_fun)) # Elixir conveniences @doc ~S""" Surrounds a document with characters. - Puts the document between left and right enclosing and nesting it. - The document is marked as a group, to show the maximum as possible - concisely together. + Puts the given document `doc` between the `left` and `right` documents enclosing + and nesting it. The document is marked as a group, to show the maximum as + possible concisely together. ## Examples - iex> doc = Inspect.Algebra.surround "[", Inspect.Algebra.glue("a", "b"), "]" - iex> Inspect.Algebra.pretty(doc, 3) - "[a\n b]" + iex> doc = Inspect.Algebra.surround("[", Inspect.Algebra.glue("a", "b"), "]") + iex> Inspect.Algebra.format(doc, 3) + ["[", "a", "\n ", "b", "]"] """ - @spec surround(binary, t, binary) :: t - def surround(left, doc, right) do - group concat left, concat(nest(doc, @nesting), right) + @spec surround(t, t, t) :: t + def surround(left, doc, right) when is_doc(left) and is_doc(doc) and is_doc(right) do + group(concat(left, concat(nest(doc, @nesting), right))) end @doc ~S""" - Maps and glues a collection of items together using the given separator - and surrounds them. A limit can be passed which, once reached, stops - gluing and outputs "..." instead. + Maps and glues a collection of items. + + It uses the given `left` and `right` documents as surrounding and the + separator document `separator` to separate items in `docs`. A limit can be + passed: when this limit is reached, this function stops gluing and outputs + `"..."` instead. ## Examples - iex> doc = Inspect.Algebra.surround_many("[", Enum.to_list(1..5), "]", :infinity, &Integer.to_string(&1)) - iex> Inspect.Algebra.pretty(doc, 5) + iex> doc = Inspect.Algebra.surround_many("[", Enum.to_list(1..5), "]", + ...> %Inspect.Opts{limit: :infinity}, fn i, _opts -> to_string(i) end) + iex> Inspect.Algebra.format(doc, 5) |> IO.iodata_to_binary "[1,\n 2,\n 3,\n 4,\n 5]" - iex> doc = Inspect.Algebra.surround_many("[", Enum.to_list(1..5), "]", 3, &Integer.to_string(&1)) - iex> Inspect.Algebra.pretty(doc, 20) + iex> doc = Inspect.Algebra.surround_many("[", Enum.to_list(1..5), "]", + ...> %Inspect.Opts{limit: 3}, fn i, _opts -> to_string(i) end) + iex> Inspect.Algebra.format(doc, 20) |> IO.iodata_to_binary "[1, 2, 3, ...]" - iex> doc = Inspect.Algebra.surround_many("[", Enum.to_list(1..5), "]", 3, &Integer.to_string(&1), "!") - iex> Inspect.Algebra.pretty(doc, 20) + iex> doc = Inspect.Algebra.surround_many("[", Enum.to_list(1..5), "]", + ...> %Inspect.Opts{limit: 3}, fn i, _opts -> to_string(i) end, "!") + iex> Inspect.Algebra.format(doc, 20) |> IO.iodata_to_binary "[1! 2! 3! ...]" """ - @spec surround_many(binary, [any], binary, integer | :infinity, (term -> t), binary) :: t - def surround_many(left, docs, right, limit, fun, separator \\ @surround_separator) + @spec surround_many(t, [any], t, Inspect.Opts.t, (term, Inspect.Opts.t -> t), t) :: t + def surround_many(left, docs, right, %Inspect.Opts{} = opts, fun, separator \\ @surround_separator) + when is_doc(left) and is_list(docs) and is_doc(right) and is_function(fun, 2) and is_doc(separator) do + do_surround_many(left, docs, right, opts.limit, opts, fun, separator) + end - def surround_many(left, [], right, _, _fun, _) do + defp do_surround_many(left, [], right, _, _opts, _fun, _) do concat(left, right) end - def surround_many(left, docs, right, limit, fun, sep) do - surround(left, surround_many(docs, limit, fun, sep), right) + defp do_surround_many(left, docs, right, limit, opts, fun, sep) do + surround(left, do_surround_many(docs, limit, opts, fun, sep), right) end - defp surround_many(_, 0, _fun, _sep) do + defp do_surround_many(_, 0, _opts, _fun, _sep) do "..." end - defp surround_many([h], _limit, fun, _sep) do - fun.(h) + defp do_surround_many([], _limit, _opts, _fun, _sep) do + :doc_nil + end + + defp do_surround_many([h], limit, opts, fun, _sep) do + fun.(h, %{opts | limit: limit}) end - defp surround_many([h|t], limit, fun, sep) when is_list(t) do - glue( - concat(fun.(h), sep), - surround_many(t, decrement(limit), fun, sep) - ) + defp do_surround_many([h | t], limit, opts, fun, sep) when is_list(t) do + limit = decrement(limit) + h = fun.(h, %{opts | limit: limit}) + t = do_surround_many(t, limit, opts, fun, sep) + do_join(h, t, sep) end - defp surround_many([h|t], _limit, fun, _sep) do - glue( - concat(fun.(h), @tail_separator), - fun.(t) - ) + defp do_surround_many([h | t], limit, opts, fun, _sep) do + limit = decrement(limit) + h = fun.(h, %{opts | limit: limit}) + t = fun.(t, %{opts | limit: limit}) + do_join(h, t, @tail_separator) end + defp do_join(:doc_nil, :doc_nil, _), do: :doc_nil + defp do_join(h, :doc_nil, _), do: h + defp do_join(:doc_nil, t, _), do: t + defp do_join(h, t, sep), do: glue(concat(h, sep), t) + defp decrement(:infinity), do: :infinity defp decrement(counter), do: counter - 1 - @doc """ - The pretty printing function. + @doc ~S""" + Formats a given document for a given width. Takes the maximum width and a document to print as its arguments - and returns the string representation of the best layout for the + and returns an IO data representation of the best layout for the document to fit in the given width. + + ## Examples + + iex> doc = Inspect.Algebra.glue("hello", " ", "world") + iex> Inspect.Algebra.format(doc, 30) |> IO.iodata_to_binary() + "hello world" + iex> Inspect.Algebra.format(doc, 10) |> IO.iodata_to_binary() + "hello\nworld" + """ - @spec pretty(t, non_neg_integer | :infinity) :: binary - def pretty(d, w) do - sdoc = format w, 0, [{0, default_mode(w), doc_group(d)}] - render(sdoc) + @spec format(t, non_neg_integer | :infinity) :: iodata + def format(doc, width) when is_doc(doc) and (width == :infinity or width >= 0) do + format(width, 0, [{0, default_mode(width), doc_group(doc)}]) end defp default_mode(:infinity), do: :flat defp default_mode(_), do: :break - # Rendering and internal helpers - # Record representing the document mode to be rendered: flat or broken @typep mode :: :flat | :break - @doc false @spec fits?(integer, [{integer, mode, t}]) :: boolean - def fits?(w, _) when w < 0, do: false - def fits?(_, []), do: true - def fits?(_, [{_, _, :doc_line} | _]), do: true - def fits?(w, [{_, _, :doc_nil} | t]), do: fits?(w, t) - def fits?(w, [{i, m, doc_cons(x, y)} | t]), do: fits?(w, [{i, m, x} | [{i, m, y} | t]]) - def fits?(w, [{i, m, doc_nest(x, j)} | t]), do: fits?(w, [{i + j, m, x} | t]) - def fits?(w, [{i, _, doc_group(x)} | t]), do: fits?(w, [{i, :flat, x} | t]) - def fits?(w, [{_, _, s} | t]) when is_binary(s), do: fits?((w - byte_size s), t) - def fits?(w, [{_, :flat, doc_break(s)} | t]), do: fits?((w - byte_size s), t) - def fits?(_, [{_, :break, doc_break(_)} | _]), do: true - - @doc false + defp fits?(w, _) when w < 0, do: false + defp fits?(_, []), do: true + defp fits?(_, [{_, _, :doc_line} | _]), do: true + defp fits?(w, [{_, _, :doc_nil} | t]), do: fits?(w, t) + defp fits?(w, [{i, m, doc_cons(x, y)} | t]), do: fits?(w, [{i, m, x} | [{i, m, y} | t]]) + defp fits?(w, [{i, m, doc_color(x, _)} | t]), do: fits?(w, [{i, m, x} | t]) + defp fits?(w, [{i, m, doc_nest(x, j)} | t]), do: fits?(w, [{i + j, m, x} | t]) + defp fits?(w, [{i, _, doc_group(x)} | t]), do: fits?(w, [{i, :flat, x} | t]) + defp fits?(w, [{_, _, s} | t]) when is_binary(s), do: fits?((w - byte_size(s)), t) + defp fits?(w, [{_, :flat, doc_break(s)} | t]), do: fits?((w - byte_size(s)), t) + defp fits?(_, [{_, :break, doc_break(_)} | _]), do: true + @spec format(integer | :infinity, integer, [{integer, mode, t}]) :: [binary] - def format(_, _, []), do: [] - def format(w, _, [{i, _, :doc_line} | t]), do: [indent(i) | format(w, i, t)] - def format(w, k, [{_, _, :doc_nil} | t]), do: format(w, k, t) - def format(w, k, [{i, m, doc_cons(x, y)} | t]), do: format(w, k, [{i, m, x} | [{i, m, y} | t]]) - def format(w, k, [{i, m, doc_nest(x, j)} | t]), do: format(w, k, [{i + j, m, x} | t]) - def format(w, k, [{i, m, doc_group(x)} | t]), do: format(w, k, [{i, m, x} | t]) - def format(w, k, [{_, _, s} | t]) when is_binary(s), do: [s | format(w, (k + byte_size s), t)] - def format(w, k, [{_, :flat, doc_break(s)} | t]), do: [s | format(w, (k + byte_size s), t)] - def format(w, k, [{i, :break, doc_break(s)} | t]) do + defp format(_, _, []), do: [] + defp format(w, _, [{i, _, :doc_line} | t]), do: [indent(i) | format(w, i, t)] + defp format(w, k, [{_, _, :doc_nil} | t]), do: format(w, k, t) + defp format(w, k, [{i, m, doc_cons(x, y)} | t]), do: format(w, k, [{i, m, x} | [{i, m, y} | t]]) + defp format(w, k, [{i, m, doc_nest(x, j)} | t]), do: format(w, k, [{i + j, m, x} | t]) + defp format(w, k, [{i, m, doc_group(x)} | t]), do: format(w, k, [{i, m, x} | t]) + defp format(w, k, [{i, m, doc_color(x, c)} | t]), do: [ansi(c) | format(w, k, [{i, m, x} | t])] + defp format(w, k, [{_, _, s} | t]) when is_binary(s), do: [s | format(w, (k + byte_size(s)), t)] + defp format(w, k, [{_, :flat, doc_break(s)} | t]), do: [s | format(w, (k + byte_size(s)), t)] + defp format(w, k, [{i, :break, doc_break(s)} | t]) do k = k + byte_size(s) if w == :infinity or fits?(w - k, t) do @@ -490,12 +661,10 @@ defmodule Inspect.Algebra do end end + defp ansi(color) do + IO.ANSI.format_fragment(color, true) + end + defp indent(0), do: @newline defp indent(i), do: @newline <> :binary.copy(" ", i) - - @doc false - @spec render([binary]) :: binary - def render(sdoc) do - IO.iodata_to_binary sdoc - end end diff --git a/lib/elixir/lib/integer.ex b/lib/elixir/lib/integer.ex index 85177480fae..710087a46c4 100644 --- a/lib/elixir/lib/integer.ex +++ b/lib/elixir/lib/integer.ex @@ -6,73 +6,276 @@ defmodule Integer do import Bitwise @doc """ - Determines if an integer is odd. + Determines if `integer` is odd. + + Returns `true` if the given `integer` is an odd number, + otherwise it returns `false`. + + Allowed in guard clauses. + + ## Examples + + iex> Integer.is_odd(5) + true + + iex> Integer.is_odd(6) + false + + iex> Integer.is_odd(-5) + true + + iex> Integer.is_odd(0) + false + + """ + defmacro is_odd(integer) do + quote do: (unquote(integer) &&& 1) == 1 + end + + @doc """ + Determines if an `integer` is even. + + Returns `true` if the given `integer` is an even number, + otherwise it returns `false`. + + Allowed in guard clauses. + + ## Examples + + iex> Integer.is_even(10) + true + + iex> Integer.is_even(5) + false + + iex> Integer.is_even(-10) + true + + iex> Integer.is_even(0) + true + + """ + defmacro is_even(integer) do + quote do: (unquote(integer) &&& 1) == 0 + end + + @doc """ + Computes the modulo remainder of an integer division. + + `Integer.mod/2` uses floored division, which means that + the result will always have the sign of the `divisor`. + + Raises an `ArithmeticError` exception if one of the arguments is not an + integer, or when the `divisor` is `0`. + + ## Examples + + iex> Integer.mod(5, 2) + 1 + iex> Integer.mod(6, -4) + -2 + + """ + @spec mod(integer, neg_integer | pos_integer) :: integer + def mod(dividend, divisor) do + remainder = rem(dividend, divisor) + if remainder * divisor < 0 do + remainder + divisor + else + remainder + end + end + + @doc """ + Performs a floored integer division. + + Raises an `ArithmeticError` exception if one of the arguments is not an + integer, or when the `divisor` is `0`. + + `Integer.floor_div/2` performs *floored* integer division. This means that + the result is always rounded towards negative infinity. + + If you want to perform truncated integer division (rounding towards zero), + use `Kernel.div/2` instead. + + ## Examples + + iex> Integer.floor_div(5, 2) + 2 + iex> Integer.floor_div(6, -4) + -2 + iex> Integer.floor_div(-99, 2) + -50 + + """ + @spec floor_div(integer, neg_integer | pos_integer) :: integer + def floor_div(dividend, divisor) do + if (dividend * divisor < 0) and rem(dividend, divisor) != 0 do + div(dividend, divisor) - 1 + else + div(dividend, divisor) + end + end + + @doc """ + Returns the ordered digits for the given `integer`. + + An optional `base` value may be provided representing the radix for the returned + digits. This one must be an integer >= 2. + + ## Examples + + iex> Integer.digits(123) + [1, 2, 3] + + iex> Integer.digits(170, 2) + [1, 0, 1, 0, 1, 0, 1, 0] + + iex> Integer.digits(-170, 2) + [-1, 0, -1, 0, -1, 0, -1, 0] - Returns `true` if `n` is an odd number, otherwise `false`. - Implemented as a macro so it is allowed in guard clauses. """ - defmacro odd?(n) do - quote do: (unquote(n) &&& 1) == 1 + @spec digits(integer, pos_integer) :: [integer, ...] + def digits(integer, base \\ 10) + when is_integer(integer) and is_integer(base) and base >= 2 do + do_digits(integer, base, []) end + defp do_digits(digit, base, []) when abs(digit) < base, + do: [digit] + defp do_digits(digit, base, []) when digit == -base, + do: [-1, 0] + defp do_digits(base, base, []), + do: [1, 0] + defp do_digits(0, _base, acc), + do: acc + defp do_digits(integer, base, acc), + do: do_digits(div(integer, base), base, [rem(integer, base) | acc]) + @doc """ - Determines if an integer is even. + Returns the integer represented by the ordered `digits`. + + An optional `base` value may be provided representing the radix for the `digits`. + This one can be an integer >= 2. + + ## Examples + + iex> Integer.undigits([1, 2, 3]) + 123 + + iex> Integer.undigits([1, 4], 16) + 20 + + iex> Integer.undigits([]) + 0 - Returns `true` if `n` is an even number, otherwise `false`. - Implemented as a macro so it is allowed in guard clauses. """ - defmacro even?(n) do - quote do: (unquote(n) &&& 1) == 0 + @spec undigits([integer], integer) :: integer + def undigits(digits, base \\ 10) when is_list(digits) and is_integer(base) and base >= 2 do + do_undigits(digits, base, 0) end + defp do_undigits([], _base, 0), + do: 0 + defp do_undigits([digit], base, 0) when is_integer(digit) and digit < base, + do: digit + defp do_undigits([1, 0], base, 0), + do: base + defp do_undigits([0 | tail], base, 0), + do: do_undigits(tail, base, 0) + + defp do_undigits([], _base, acc), + do: acc + defp do_undigits([digit | _], base, _) when is_integer(digit) and digit >= base, + do: raise ArgumentError, "invalid digit #{digit} in base #{base}" + defp do_undigits([digit | tail], base, acc) when is_integer(digit), + do: do_undigits(tail, base, acc * base + digit) + @doc """ - Converts a binary to an integer. + Parses a text representation of an integer. - If successful, returns a tuple of the form `{integer, remainder_of_binary}`. + An optional `base` to the corresponding integer can be provided. + If `base` is not given, 10 will be used. + + If successful, returns a tuple in the form of `{integer, remainder_of_binary}`. Otherwise `:error`. + Raises an error if `base` is less than 2 or more than 36. + + If you want to convert a string-formatted integer directly to a integer, + `String.to_integer/1` or `String.to_integer/2` can be used instead. + ## Examples iex> Integer.parse("34") - {34,""} + {34, ""} iex> Integer.parse("34.5") - {34,".5"} + {34, ".5"} iex> Integer.parse("three") :error + iex> Integer.parse("34", 10) + {34, ""} + + iex> Integer.parse("f4", 16) + {244, ""} + + iex> Integer.parse("Awww++", 36) + {509216, "++"} + + iex> Integer.parse("fab", 10) + :error + + iex> Integer.parse("a2", 38) + ** (ArgumentError) invalid base 38 + """ - @spec parse(binary) :: {integer, binary} | :error - def parse(<< ?-, bin :: binary >>) do - case do_parse(bin) do - :error -> :error - {number, remainder} -> {-number, remainder} + @spec parse(binary, 2..36) :: {integer, binary} | :error + def parse(binary, base \\ 10) + + def parse(_binary, base) when not base in 2..36 do + raise ArgumentError, "invalid base #{inspect base}" + end + + def parse(binary, base) do + case count_digits(binary, base) do + 0 -> + :error + count -> + {digits, rem} = :erlang.split_binary(binary, count) + {:erlang.binary_to_integer(digits, base), rem} end end - def parse(<< ?+, bin :: binary >>) do - do_parse(bin) + defp count_digits(<>, base) when sign in '+-' do + case count_digits_nosign(rest, base, 1) do + 1 -> 0 + count -> count + end end - def parse(bin) when is_binary(bin) do - do_parse(bin) + defp count_digits(<>, base) do + count_digits_nosign(rest, base, 0) end - defp do_parse(<< char, bin :: binary >>) when char in ?0..?9, do: do_parse(bin, char - ?0) - defp do_parse(_), do: :error + digits = [{?0..?9, -?0}, {?A..?Z, 10 - ?A}, {?a..?z, 10 - ?a}] - defp do_parse(<< char, rest :: binary >>, acc) when char in ?0..?9 do - do_parse rest, 10 * acc + (char - ?0) - end + for {chars, diff} <- digits, char <- chars do + digit = char + diff - defp do_parse(bitstring, acc) do - {acc, bitstring} + defp count_digits_nosign(<>, base, count) + when base > unquote(digit) do + count_digits_nosign(rest, base, count + 1) + end end + defp count_digits_nosign(<<_::binary>>, _, count), do: count + @doc """ Returns a binary which corresponds to the text representation - of `some_integer`. + of `integer`. Inlined by the compiler. @@ -81,15 +284,26 @@ defmodule Integer do iex> Integer.to_string(123) "123" + iex> Integer.to_string(+456) + "456" + + iex> Integer.to_string(-789) + "-789" + + iex> Integer.to_string(0123) + "123" + """ @spec to_string(integer) :: String.t - def to_string(some_integer) do - :erlang.integer_to_binary(some_integer) + def to_string(integer) do + :erlang.integer_to_binary(integer) end @doc """ Returns a binary which corresponds to the text representation - of `some_integer` in base `base`. + of `integer` in the given `base`. + + `base` can be an integer between 2 and 36. Inlined by the compiler. @@ -98,42 +312,115 @@ defmodule Integer do iex> Integer.to_string(100, 16) "64" + iex> Integer.to_string(-100, 16) + "-64" + + iex> Integer.to_string(882681651, 36) + "ELIXIR" + """ - @spec to_string(integer, pos_integer) :: String.t - def to_string(some_integer, base) do - :erlang.integer_to_binary(some_integer, base) + @spec to_string(integer, 2..36) :: String.t + def to_string(integer, base) do + :erlang.integer_to_binary(integer, base) end @doc """ - Returns a char list which corresponds to the text representation of the given integer. + Returns a charlist which corresponds to the text representation of the given `integer`. Inlined by the compiler. ## Examples - iex> Integer.to_char_list(7) - '7' + iex> Integer.to_charlist(123) + '123' + + iex> Integer.to_charlist(+456) + '456' + + iex> Integer.to_charlist(-789) + '-789' + + iex> Integer.to_charlist(0123) + '123' """ - @spec to_char_list(integer) :: list - def to_char_list(number) do - :erlang.integer_to_list(number) + @spec to_charlist(integer) :: charlist + def to_charlist(integer) do + :erlang.integer_to_list(integer) end @doc """ - Returns a char list which corresponds to the text representation of the - given integer in the given case. + Returns a charlist which corresponds to the text representation of `integer` in the given `base`. + + `base` can be an integer between 2 and 36. Inlined by the compiler. ## Examples - iex> Integer.to_char_list(1023, 16) - '3FF' + iex> Integer.to_charlist(100, 16) + '64' + + iex> Integer.to_charlist(-100, 16) + '-64' + + iex> Integer.to_charlist(882681651, 36) + 'ELIXIR' """ - @spec to_char_list(integer, pos_integer) :: list - def to_char_list(number, base) do - :erlang.integer_to_list(number, base) + @spec to_charlist(integer, 2..36) :: charlist + def to_charlist(integer, base) do + :erlang.integer_to_list(integer, base) end + + @doc """ + Returns the greatest common divisor of the two given numbers. + + The greatest common divisor (GCD) of `int1` and `int2` is the largest positive + integer that divides both `int1` and `int2` without leaving a remainder. + + By convention, `gcd(0, 0)` returns `0`. + + ## Examples + + iex> Integer.gcd(2, 3) + 1 + + iex> Integer.gcd(8, 12) + 4 + + iex> Integer.gcd(8, -12) + 4 + + iex> Integer.gcd(10, 0) + 10 + + iex> Integer.gcd(7, 7) + 7 + + iex> Integer.gcd(0, 0) + 0 + + """ + @spec gcd(0, 0) :: 0 + @spec gcd(integer, integer) :: pos_integer + def gcd(int1, int2) when is_integer(int1) and is_integer(int2) do + gcd_positive(abs(int1), abs(int2)) + end + + defp gcd_positive(0, int2), do: int2 + defp gcd_positive(int1, 0), do: int1 + defp gcd_positive(int1, int2), do: gcd_positive(int2, rem(int1, int2)) + + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + @doc false + @spec to_char_list(integer) :: charlist + def to_char_list(integer), do: Integer.to_charlist(integer) + + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + @doc false + @spec to_char_list(integer, 2..36) :: charlist + def to_char_list(integer, base), do: Integer.to_charlist(integer, base) end diff --git a/lib/elixir/lib/io.ex b/lib/elixir/lib/io.ex index 947b6ffa61f..ffdce312f7f 100644 --- a/lib/elixir/lib/io.ex +++ b/lib/elixir/lib/io.ex @@ -1,32 +1,36 @@ defmodule IO do @moduledoc """ - Functions handling IO. + Functions handling input/output (IO). - Many functions in this module expects an IO device as argument. - An IO device must be a pid or an atom representing a process. + Many functions in this module expect an IO device as an argument. + An IO device must be a PID or an atom representing a process. For convenience, Elixir provides `:stdio` and `:stderr` as shortcuts to Erlang's `:standard_io` and `:standard_error`. - The majority of the functions expect char data, i.e. strings or + The majority of the functions expect chardata, i.e. strings or lists of characters and strings. In case another type is given, - it will do a conversion to string via the `String.Chars` protocol + functions will convert to string via the `String.Chars` protocol (as shown in typespecs). - The functions starting with `bin*` expects iodata as argument, + The functions starting with `bin` expect iodata as an argument, i.e. binaries or lists of bytes and binaries. ## IO devices - An IO device may be an atom or a pid. In case it is an atom, - the atom must be the name of a registered process. However, - there are three exceptions for this rule: + An IO device may be an atom or a PID. In case it is an atom, + the atom must be the name of a registered process. In addition, + Elixir provides two shortcuts: - * `:standard_io` - when the `:standard_io` atom is given, - it is treated as a shortcut for `Process.group_leader` + * `:stdio` - a shortcut for `:standard_io`, which maps to + the current `Process.group_leader/0` in Erlang - * `:stdio` - is a shortcut for `:standard_io` + * `:stderr` - a shortcut for the named process `:standard_error` + provided in Erlang - * `:stderr` - is a shortcut for `:standard_error` + IO devices maintain their position, that means subsequent calls to any + reading or writing functions will start from the place when the device + was last accessed. Position of files can be changed using the + `:file.position/2` function. """ @@ -34,8 +38,6 @@ defmodule IO do @type nodata :: {:error, term} | :eof @type chardata() :: :unicode.chardata() - import :erlang, only: [group_leader: 0] - defmacrop is_iodata(data) do quote do is_list(unquote(data)) or is_binary(unquote(data)) @@ -43,42 +45,77 @@ defmodule IO do end @doc """ - Reads `count` characters from the IO device or until - the end of the line if `:line` is given. It returns: + Reads from the IO `device`. - * `data` - the input characters + The `device` is iterated by the given number of characters or line by line if + `:line` is given. + Alternatively, if `:all` is given, then whole `device` is returned. + + It returns: + + * `data` - the output characters * `:eof` - end of file was encountered * `{:error, reason}` - other (rare) error condition; for instance, `{:error, :estale}` if reading from an NFS volume + + If `:all` is given, `:eof` is never returned, but an + empty string in case the device has reached EOF. """ - @spec read(device, :line | non_neg_integer) :: chardata | nodata - def read(device \\ group_leader, chars_or_line) + @spec read(device, :all | :line | non_neg_integer) :: chardata | nodata + def read(device \\ :stdio, line_or_chars) + + def read(device, :all) do + do_read_all(map_dev(device), "") + end def read(device, :line) do :io.get_line(map_dev(device), '') end - def read(device, count) when count >= 0 do + def read(device, count) when is_integer(count) and count >= 0 do :io.get_chars(map_dev(device), '', count) end + defp do_read_all(mapped_dev, acc) do + case :io.get_line(mapped_dev, "") do + line when is_binary(line) -> do_read_all(mapped_dev, acc <> line) + :eof -> acc + other -> other + end + end + @doc """ - Reads `count` bytes from the IO device or until - the end of the line if `:line` is given. It returns: + Reads from the IO `device`. The operation is Unicode unsafe. - * `data` - the input characters + The `device` is iterated by the given number of bytes or line by line if + `:line` is given. + Alternatively, if `:all` is given, then whole `device` is returned. + + It returns: + + * `data` - the output bytes * `:eof` - end of file was encountered * `{:error, reason}` - other (rare) error condition; for instance, `{:error, :estale}` if reading from an NFS volume + + If `:all` is given, `:eof` is never returned, but an + empty string in case the device has reached EOF. + + Note: do not use this function on IO devices in Unicode mode + as it will return the wrong result. """ - @spec binread(device, :line | non_neg_integer) :: iodata | nodata - def binread(device \\ group_leader, chars_or_line) + @spec binread(device, :all | :line | non_neg_integer) :: iodata | nodata + def binread(device \\ :stdio, line_or_chars) + + def binread(device, :all) do + do_binread_all(map_dev(device), "") + end def binread(device, :line) do case :file.read_line(map_dev(device)) do @@ -87,152 +124,259 @@ defmodule IO do end end - def binread(device, count) when count >= 0 do + def binread(device, count) when is_integer(count) and count >= 0 do case :file.read(map_dev(device), count) do {:ok, data} -> data other -> other end end + @read_all_size 4096 + defp do_binread_all(mapped_dev, acc) do + case :file.read(mapped_dev, @read_all_size) do + {:ok, data} -> do_binread_all(mapped_dev, acc <> data) + :eof -> acc + other -> other + end + end + @doc """ - Writes the given argument to the given device. + Writes `item` to the given `device`. - By default the device is the standard output. + By default the `device` is the standard output. It returns `:ok` if it succeeds. ## Examples IO.write "sample" - #=> "sample" + #=> sample IO.write :stderr, "error" - #=> "error" + #=> error """ @spec write(device, chardata | String.Chars.t) :: :ok - def write(device \\ group_leader(), item) do + def write(device \\ :stdio, item) do :io.put_chars map_dev(device), to_chardata(item) end @doc """ - Writes the given argument to the given device - as a binary, no unicode conversion happens. + Writes `item` as a binary to the given `device`. + No Unicode conversion happens. + The operation is Unicode unsafe. Check `write/2` for more information. + + Note: do not use this function on IO devices in Unicode mode + as it will return the wrong result. """ @spec binwrite(device, iodata) :: :ok | {:error, term} - def binwrite(device \\ group_leader(), item) when is_iodata(item) do + def binwrite(device \\ :stdio, item) when is_iodata(item) do :file.write map_dev(device), item end @doc """ - Writes the argument to the device, similar to `write/2`, - but adds a newline at the end. The argument is expected - to be a chardata. + Writes `item` to the given `device`, similar to `write/2`, + but adds a newline at the end. """ @spec puts(device, chardata | String.Chars.t) :: :ok - def puts(device \\ group_leader(), item) do - erl_dev = map_dev(device) - :io.put_chars erl_dev, [to_chardata(item), ?\n] + def puts(device \\ :stdio, item) do + :io.put_chars map_dev(device), [to_chardata(item), ?\n] end @doc """ - Inspects and writes the given argument to the device. + Writes a `message` to stderr, along with the given `stacktrace`. + + This function also notifies the compiler a warning was printed + (in case --warnings-as-errors was enabled). It returns `:ok` + if it succeeds. + + An empty list can be passed to avoid stacktrace printing. + + ## Examples + + stacktrace = [{MyApp, :main, 1, [file: 'my_app.ex', line: 4]}] + IO.warn "variable bar is unused", stacktrace + #=> warning: variable bar is unused + #=> my_app.ex:4: MyApp.main/1 - It sets by default pretty printing to true and returns - the item itself. + """ + @spec warn(chardata | String.Chars.t, Exception.stacktrace) :: :ok + def warn(message, []) do + :elixir_errors.warn([to_chardata(message), ?\n]) + end + def warn(message, stacktrace) when is_list(stacktrace) do + formatted = Enum.map_join(stacktrace, "\n ", &Exception.format_stacktrace_entry(&1)) + :elixir_errors.warn([to_chardata(message), ?\n, " ", formatted, ?\n]) + end - Note this function does not use the IO device width - because some IO devices does not implement the - appropriate functions. Setting the width must be done - explicitly by passing the `:width` option. + @doc """ + Writes a `message` to stderr, along with the current stacktrace. + + It returns `:ok` if it succeeds. ## Examples - IO.inspect Process.list + IO.warn "variable bar is unused" + #=> warning: variable bar is unused + #=> (iex) evaluator.ex:108: IEx.Evaluator.eval/4 """ - @spec inspect(term, Keyword.t) :: term + @spec warn(chardata | String.Chars.t) :: :ok + def warn(message) do + {:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace) + warn(message, Enum.drop(stacktrace, 2)) + end + + @doc """ + Inspects and writes the given `item` to the device. + + It's important to note that it returns the given `item` unchanged. + This makes it possible to "spy" on values by inserting an + `IO.inspect/2` call almost anywhere in your code, for example, + in the middle of a pipeline. + + It enables pretty printing by default with width of + 80 characters. The width can be changed by explicitly + passing the `:width` option. + + The output can be decorated with a label, by providing the `:label` + option to easily distinguish it from other `IO.inspect/2` calls. + The label will be printed before the inspected `item`. + + See `Inspect.Opts` for a full list of remaining formatting options. + + ## Examples + + IO.inspect <<0, 1, 2>>, width: 40 + + Prints: + + <<0, 1, 2>> + + We can use the `:label` option to decorate the output: + + IO.inspect 1..100, label: "a wonderful range" + + Prints: + + a wonderful range: 1..100 + + The `:label` option is especially useful with pipelines: + + [1, 2, 3] + |> IO.inspect(label: "before") + |> Enum.map(&(&1 * 2)) + |> IO.inspect(label: "after") + |> Enum.sum + + Prints: + + before: [1, 2, 3] + after: [2, 4, 6] + + """ + @spec inspect(item, Keyword.t) :: item when item: var def inspect(item, opts \\ []) do - inspect group_leader(), item, opts + inspect :stdio, item, opts end @doc """ - Inspects the item with options using the given device. + Inspects `item` according to the given options using the IO `device`. + + See `inspect/2` for a full list of options. """ - @spec inspect(device, term, Keyword.t) :: term + @spec inspect(device, item, Keyword.t) :: item when item: var def inspect(device, item, opts) when is_list(opts) do - opts = Keyword.put_new(opts, :pretty, true) - puts device, Kernel.inspect(item, opts) + label = if (label = opts[:label]), do: [to_chardata(label), ": "], else: [] + opts = struct(Inspect.Opts, opts) + chardata = Inspect.Algebra.format(Inspect.Algebra.to_doc(item, opts), opts.width) + puts device, [label, chardata] item end @doc """ - Gets a number of bytes from the io device. If the - io device is a unicode device, `count` implies - the number of unicode codepoints to be retrieved. - Otherwise, `count` is the number of raw bytes to be retrieved. - It returns: + Gets a number of bytes from IO device `:stdio`. - * `data` - the input characters + If `:stdio` is a Unicode device, `count` implies + the number of Unicode codepoints to be retrieved. + Otherwise, `count` is the number of raw bytes to be retrieved. - * `:eof` - end of file was encountered + See `IO.getn/3` for a description of return values. - * `{:error, reason}` - other (rare) error condition; - for instance, `{:error, :estale}` if reading from an - NFS volume """ @spec getn(chardata | String.Chars.t, pos_integer) :: chardata | nodata @spec getn(device, chardata | String.Chars.t) :: chardata | nodata def getn(prompt, count \\ 1) - def getn(prompt, count) when is_integer(count) do - getn(group_leader, prompt, count) + def getn(prompt, count) when is_integer(count) and count > 0 do + getn(:stdio, prompt, count) end - def getn(device, prompt) do + def getn(device, prompt) when not is_integer(prompt) do getn(device, prompt, 1) end @doc """ - Gets a number of bytes from the io device. If the - io device is a unicode device, `count` implies - the number of unicode codepoints to be retrieved. + Gets a number of bytes from the IO `device`. + + If the IO `device` is a Unicode device, `count` implies + the number of Unicode codepoints to be retrieved. Otherwise, `count` is the number of raw bytes to be retrieved. + + It returns: + + * `data` - the input characters + + * `:eof` - end of file was encountered + + * `{:error, reason}` - other (rare) error condition; + for instance, `{:error, :estale}` if reading from an + NFS volume + """ @spec getn(device, chardata | String.Chars.t, pos_integer) :: chardata | nodata - def getn(device, prompt, count) do + def getn(device, prompt, count) when is_integer(count) and count > 0 do :io.get_chars(map_dev(device), to_chardata(prompt), count) end - @doc """ - Reads a line from the IO device. It returns: + @doc ~S""" + Reads a line from the IO `device`. + + It returns: * `data` - the characters in the line terminated - by a LF (or end of file) + by a line-feed (LF) or end of file (EOF) * `:eof` - end of file was encountered * `{:error, reason}` - other (rare) error condition; for instance, `{:error, :estale}` if reading from an NFS volume + + ## Examples + + To display "What is your name?" as a prompt and await user input: + + IO.gets "What is your name?\n" + """ @spec gets(device, chardata | String.Chars.t) :: chardata | nodata - def gets(device \\ group_leader(), prompt) do + def gets(device \\ :stdio, prompt) do :io.get_line(map_dev(device), to_chardata(prompt)) end @doc """ - Converts the io device into a `IO.Stream`. + Converts the IO `device` into an `IO.Stream`. An `IO.Stream` implements both `Enumerable` and `Collectable`, allowing it to be used for both read and write. - The device is iterated line by line if `:line` is given or - by a given number of codepoints. + The `device` is iterated by the given number of characters or line by line if + `:line` is given. - This reads the IO as utf-8. Check out + This reads from the IO as UTF-8. Check out `IO.binstream/2` to handle the IO as a raw binary. Note that an IO stream has side effects and every time @@ -247,25 +391,34 @@ defmodule IO do """ @spec stream(device, :line | pos_integer) :: Enumerable.t - def stream(device, line_or_codepoints) do + def stream(device, line_or_codepoints) + when line_or_codepoints == :line + when is_integer(line_or_codepoints) and line_or_codepoints > 0 do IO.Stream.__build__(map_dev(device), false, line_or_codepoints) end @doc """ - Converts the IO device into a `IO.Stream`. + Converts the IO `device` into an `IO.Stream`. The operation is Unicode unsafe. An `IO.Stream` implements both `Enumerable` and `Collectable`, allowing it to be used for both read and write. - The device is iterated line by line or by a number of bytes. - This reads the IO device as a raw binary. + The `device` is iterated by the given number of bytes or line by line if + `:line` is given. + This reads from the IO device as a raw binary. Note that an IO stream has side effects and every time you go over the stream you may get different results. + + Finally, do not use this function on IO devices in Unicode + mode as it will return the wrong result. + """ @spec binstream(device, :line | pos_integer) :: Enumerable.t - def binstream(device, line_or_bytes) do + def binstream(device, line_or_bytes) + when line_or_bytes == :line + when is_integer(line_or_bytes) and line_or_bytes > 0 do IO.Stream.__build__(map_dev(device), true, line_or_bytes) end @@ -273,8 +426,8 @@ defmodule IO do Converts chardata (a list of integers representing codepoints, lists and strings) into a string. - In case the conversion fails, it raises a `UnicodeConversionError`. - If a string is given, returns the string itself. + In case the conversion fails, it raises an `UnicodeConversionError`. + If a string is given, it returns the string itself. ## Examples @@ -284,6 +437,9 @@ defmodule IO do iex> IO.chardata_to_string([0x0061, "bc"]) "abc" + iex> IO.chardata_to_string("string") + "string" + """ @spec chardata_to_string(chardata) :: String.t | no_return def chardata_to_string(string) when is_binary(string) do @@ -291,25 +447,17 @@ defmodule IO do end def chardata_to_string(list) when is_list(list) do - case :unicode.characters_to_binary(list) do - result when is_binary(result) -> - result - - {:error, encoded, rest} -> - raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :invalid - - {:incomplete, encoded, rest} -> - raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :incomplete - end + List.to_string(list) end @doc """ Converts iodata (a list of integers representing bytes, lists and binaries) into a binary. + The operation is Unicode unsafe. Notice that this function treats lists of integers as raw bytes and does not perform any kind of encoding conversion. If you want - to convert from a char list to a string (UTF-8 encoded), please + to convert from a charlist to a string (UTF-8 encoded), please use `chardata_to_string/1` instead. If this function receives a binary, the same binary is returned. @@ -321,12 +469,12 @@ defmodule IO do iex> bin1 = <<1, 2, 3>> iex> bin2 = <<4, 5>> iex> bin3 = <<6>> - iex> IO.iodata_to_binary([bin1, 1, [2, 3, bin2], 4|bin3]) - <<1,2,3,1,2,3,4,5,4,6>> + iex> IO.iodata_to_binary([bin1, 1, [2, 3, bin2], 4 | bin3]) + <<1, 2, 3, 1, 2, 3, 4, 5, 4, 6>> iex> bin = <<1, 2, 3>> iex> IO.iodata_to_binary(bin) - <<1,2,3>> + <<1, 2, 3>> """ @spec iodata_to_binary(iodata) :: binary @@ -341,7 +489,7 @@ defmodule IO do ## Examples - iex> IO.iodata_length([1, 2|<<3, 4>>]) + iex> IO.iodata_length([1, 2 | <<3, 4>>]) 4 """ @@ -351,32 +499,32 @@ defmodule IO do end @doc false - def each_stream(device, what) do - case read(device, what) do + def each_stream(device, line_or_codepoints) do + case read(device, line_or_codepoints) do :eof -> - nil + {:halt, device} {:error, reason} -> raise IO.StreamError, reason: reason data -> - {data, device} + {[data], device} end end @doc false - def each_binstream(device, what) do - case binread(device, what) do + def each_binstream(device, line_or_chars) do + case binread(device, line_or_chars) do :eof -> - nil + {:halt, device} {:error, reason} -> raise IO.StreamError, reason: reason data -> - {data, device} + {[data], device} end end @compile {:inline, map_dev: 1, to_chardata: 1} - # Map the Elixir names for standard io and error to Erlang names + # Map the Elixir names for standard IO and error to Erlang names defp map_dev(:stdio), do: :standard_io defp map_dev(:stderr), do: :standard_error defp map_dev(other) when is_atom(other) or is_pid(other) or is_tuple(other), do: other diff --git a/lib/elixir/lib/io/ansi.ex b/lib/elixir/lib/io/ansi.ex index bb52036357d..3abc09d655c 100644 --- a/lib/elixir/lib/io/ansi.ex +++ b/lib/elixir/lib/io/ansi.ex @@ -1,13 +1,13 @@ defmodule IO.ANSI.Sequence do @moduledoc false - defmacro defsequence(name, code \\ "", terminator \\ "m") do + defmacro defsequence(name, code, terminator \\ "m") do quote bind_quoted: [name: name, code: code, terminator: terminator] do def unquote(name)() do "\e[#{unquote(code)}#{unquote(terminator)}" end - defp escape_sequence(unquote(Atom.to_char_list(name))) do + defp format_sequence(unquote(name)) do unquote(name)() end end @@ -16,213 +16,237 @@ end defmodule IO.ANSI do @moduledoc """ - Functionality to render ANSI escape sequences - (http://en.wikipedia.org/wiki/ANSI_escape_code) — characters embedded - in text used to control formatting, color, and other output options - on video text terminals. + Functionality to render ANSI escape sequences. + + [ANSI escape sequences](https://en.wikipedia.org/wiki/ANSI_escape_code) + are characters embedded in text used to control formatting, color, and + other output options on video text terminals. """ import IO.ANSI.Sequence + @type ansicode :: atom + @type ansilist :: maybe_improper_list(char | ansicode | binary | ansilist, binary | ansicode | []) + @type ansidata :: ansilist | ansicode | binary + @doc """ - Checks whether the default I/O device is a terminal or a file. + Checks if ANSI coloring is supported and enabled on this machine. + + This function simply reads the configuration value for + `:ansi_enabled` in the `:elixir` application. The value is by + default `false` unless Elixir can detect during startup that + both `stdout` and `stderr` are terminals. + """ + @spec enabled? :: boolean + def enabled? do + Application.get_env(:elixir, :ansi_enabled, false) + end + + @doc "Sets foreground color." + @spec color(0..255) :: String.t + def color(code) when code in 0..255, do: "\e[38;5;#{code}m" + + @doc ~S""" + Sets the foreground color from individual RGB values. + + Valid values for each color are in the range 0 to 5. + """ + @spec color(0..5, 0..5, 0..5) :: String.t + def color(r, g, b) when r in 0..5 and g in 0..5 and b in 0..5 do + color(16 + (36 * r) + (6 * g) + b) + end + + @doc "Sets background color." + @spec color_background(0..255) :: String.t + def color_background(code) when code in 0..255, do: "\e[48;5;#{code}m" + + @doc ~S""" + Sets the background color from individual RGB values. - Used to identify whether printing ANSI escape sequences will likely - be displayed as intended. This is checked by sending a message to - the group leader. In case the group leader does not support the message, - it will likely lead to a timeout (and a slow down on execution time). + Valid values for each color are in the range 0 to 5. """ - @spec terminal? :: boolean - @spec terminal?(:io.device) :: boolean - def terminal?(device \\ :erlang.group_leader) do - !match?({:win32, _}, :os.type()) and - match?({:ok, _}, :io.columns(device)) + @spec color_background(0..5, 0..5, 0..5) :: String.t + def color_background(r, g, b) when r in 0..5 and g in 0..5 and b in 0..5 do + color_background(16 + (36 * r) + (6 * g) + b) end - @doc "Resets all attributes" + @doc "Resets all attributes." defsequence :reset, 0 - @doc "Bright (increased intensity) or Bold" + @doc "Bright (increased intensity) or bold." defsequence :bright, 1 - @doc "Faint (decreased intensity), not widely supported" + @doc "Faint (decreased intensity). Not widely supported." defsequence :faint, 2 @doc "Italic: on. Not widely supported. Sometimes treated as inverse." defsequence :italic, 3 - @doc "Underline: Single" + @doc "Underline: single." defsequence :underline, 4 - @doc "Blink: Slow. Less than 150 per minute" + @doc "Blink: slow. Less than 150 per minute." defsequence :blink_slow, 5 - @doc "Blink: Rapid. MS-DOS ANSI.SYS; 150 per minute or more; not widely supported" + @doc "Blink: rapid. MS-DOS ANSI.SYS; 150 per minute or more; not widely supported." defsequence :blink_rapid, 6 - @doc "Image: Negative. Swap foreground and background" + @doc "Image: negative. Swap foreground and background." defsequence :inverse, 7 - @doc "Image: Negative. Swap foreground and background" + @doc "Image: negative. Swap foreground and background." defsequence :reverse, 7 - @doc "Conceal. Not widely supported" + @doc "Conceal. Not widely supported." defsequence :conceal, 8 @doc "Crossed-out. Characters legible, but marked for deletion. Not widely supported." defsequence :crossed_out, 9 - @doc "Sets primary (default) font" + @doc "Sets primary (default) font." defsequence :primary_font, 10 for font_n <- [1, 2, 3, 4, 5, 6, 7, 8, 9] do - @doc "Sets alternative font #{font_n}" + @doc "Sets alternative font #{font_n}." defsequence :"font_#{font_n}", font_n + 10 end - @doc "Normal color or intensity" + @doc "Normal color or intensity." defsequence :normal, 22 - @doc "Not italic" + @doc "Not italic." defsequence :not_italic, 23 - @doc "Underline: None" + @doc "Underline: none." defsequence :no_underline, 24 - @doc "Blink: off" + @doc "Blink: off." defsequence :blink_off, 25 + @doc "Image: positive. Normal foreground and background." + defsequence :inverse_off, 27 + + @doc "Image: positive. Normal foreground and background." + defsequence :reverse_off, 27 + colors = [:black, :red, :green, :yellow, :blue, :magenta, :cyan, :white] - colors = Enum.zip(0..(length(colors)-1), colors) - for {code, color} <- colors do - @doc "Sets foreground color to #{color}" + for {color, code} <- Enum.with_index(colors) do + @doc "Sets foreground color to #{color}." defsequence color, code + 30 - @doc "Sets background color to #{color}" + @doc "Sets foreground color to light #{color}." + defsequence :"light_#{color}", code + 90 + + @doc "Sets background color to #{color}." defsequence :"#{color}_background", code + 40 + + @doc "Sets background color to light #{color}." + defsequence :"light_#{color}_background", code + 100 end - @doc "Default text color" + @doc "Default text color." defsequence :default_color, 39 - @doc "Default background color" + @doc "Default background color." defsequence :default_background, 49 - @doc "Framed" + @doc "Framed." defsequence :framed, 51 - @doc "Encircled" + @doc "Encircled." defsequence :encircled, 52 - @doc "Overlined" + @doc "Overlined." defsequence :overlined, 53 - @doc "Not framed or encircled" + @doc "Not framed or encircled." defsequence :not_framed_encircled, 54 - @doc "Not overlined" + @doc "Not overlined." defsequence :not_overlined, 55 - @doc "Send cursor home" + @doc "Sends cursor home." defsequence :home, "", "H" - @doc "Clear screen" + @doc "Clears screen." defsequence :clear, "2", "J" - defp escape_sequence(other) do - raise ArgumentError, "invalid ANSI sequence specification: #{other}" + @doc "Clears line." + defsequence :clear_line, "2", "K" + + defp format_sequence(other) do + raise ArgumentError, "invalid ANSI sequence specification: #{inspect other}" end @doc ~S""" - Escapes a string by converting named ANSI sequences into actual ANSI codes. + Formats a chardata-like argument by converting named ANSI sequences into actual + ANSI codes. - The format for referring to sequences is `%{red}` and `%{red,bright}` (for - multiple sequences). + The named sequences are represented by atoms. - It will also append a `%{reset}` to the string. If you don't want this - behaviour, use `escape_fragment/2`. + It will also append an `IO.ANSI.reset/0` to the chardata when a conversion is + performed. If you don't want this behaviour, use `format_fragment/2`. An optional boolean parameter can be passed to enable or disable emitting actual ANSI codes. When `false`, no ANSI codes will emitted. - By default, standard output will be checked if it is a terminal capable - of handling these sequences (using `terminal?/1` function) + By default checks if ANSI is enabled using the `enabled?/0` function. ## Examples - iex> IO.ANSI.escape("Hello %{red,bright,green}yes", true) - "Hello \e[31m\e[1m\e[32myes\e[0m" + iex> IO.ANSI.format(["Hello, ", :red, :bright, "world!"], true) + [[[[[[], "Hello, "] | "\e[31m"] | "\e[1m"], "world!"] | "\e[0m"] """ - @spec escape(String.t, emit :: boolean) :: String.t - def escape(string, emit \\ terminal?) when is_binary(string) and is_boolean(emit) do - {rendered, emitted} = do_escape(string, emit, false, nil, []) - if emitted do - rendered <> reset - else - rendered - end + def format(chardata, emit? \\ enabled?()) when is_boolean(emit?) do + do_format(chardata, [], [], emit?, :maybe) end @doc ~S""" - Escapes a string by converting named ANSI sequences into actual ANSI codes. + Formats a chardata-like argument by converting named ANSI sequences into actual + ANSI codes. - The format for referring to sequences is `%{red}` and `%{red,bright}` (for - multiple sequences). + The named sequences are represented by atoms. An optional boolean parameter can be passed to enable or disable emitting actual ANSI codes. When `false`, no ANSI codes will emitted. - By default, standard output will be checked if it is a terminal capable - of handling these sequences (using `terminal?/1` function) + By default checks if ANSI is enabled using the `enabled?/0` function. ## Examples - iex> IO.ANSI.escape_fragment("Hello %{red,bright,green}yes", true) - "Hello \e[31m\e[1m\e[32myes" - - iex> IO.ANSI.escape_fragment("%{reset}bye", true) - "\e[0mbye" + iex> IO.ANSI.format_fragment([:bright, 'Word'], true) + [[[[[[] | "\e[1m"], 87], 111], 114], 100] """ - @spec escape_fragment(String.t, emit :: boolean) :: String.t - def escape_fragment(string, emit \\ terminal?) when is_binary(string) and is_boolean(emit) do - {escaped, _emitted} = do_escape(string, emit, false, nil, []) - escaped - end - - defp do_escape(<>, emit, emitted, buffer, acc) when is_list(buffer) do - sequences = - buffer - |> Enum.reverse() - |> :string.tokens(',') - |> Enum.map(&(&1 |> :string.strip |> escape_sequence)) - |> Enum.reverse() - - if emit and sequences != [] do - do_escape(t, emit, true, nil, sequences ++ acc) - else - do_escape(t, emit, emitted, nil, acc) - end + def format_fragment(chardata, emit? \\ enabled?()) when is_boolean(emit?) do + do_format(chardata, [], [], emit?, false) + end + + defp do_format([term | rest], rem, acc, emit?, append_reset) do + do_format(term, [rest | rem], acc, emit?, append_reset) + end + + defp do_format(term, rem, acc, true, append_reset) when is_atom(term) do + do_format([], rem, [acc | format_sequence(term)], true, !!append_reset) end - defp do_escape(<>, emit, emitted, buffer, acc) when is_list(buffer) do - do_escape(t, emit, emitted, [h|buffer], acc) + defp do_format(term, rem, acc, false, append_reset) when is_atom(term) do + do_format([], rem, acc, false, append_reset) end - defp do_escape(<<>>, _emit, _emitted, buffer, _acc) when is_list(buffer) do - buffer = IO.iodata_to_binary Enum.reverse(buffer) - raise ArgumentError, "missing } for escape fragment #{buffer}" + defp do_format(term, rem, acc, emit?, append_reset) when not is_list(term) do + do_format([], rem, [acc, term], emit?, append_reset) end - defp do_escape(<>, emit, emitted, nil, acc) do - do_escape(t, emit, emitted, [], acc) + defp do_format([], [next | rest], acc, emit?, append_reset) do + do_format(next, rest, acc, emit?, append_reset) end - defp do_escape(<>, emit, emitted, nil, acc) do - do_escape(t, emit, emitted, nil, [h|acc]) + defp do_format([], [], acc, true, true) do + [acc | IO.ANSI.reset] end - defp do_escape(<<>>, _emit, emitted, nil, acc) do - {IO.iodata_to_binary(Enum.reverse(acc)), emitted} + defp do_format([], [], acc, _emit?, _append_reset) do + acc end end diff --git a/lib/elixir/lib/io/ansi/docs.ex b/lib/elixir/lib/io/ansi/docs.ex index 4dfcfb21ac0..4f2801de371 100644 --- a/lib/elixir/lib/io/ansi/docs.ex +++ b/lib/elixir/lib/io/ansi/docs.ex @@ -2,33 +2,36 @@ defmodule IO.ANSI.Docs do @moduledoc false @bullets [?*, ?-, ?+] + @spaces [" ", "\n", "\t"] @doc """ The default options used by this module. The supported values are: - * `:enabled` - toggles coloring on and off (true) - * `:doc_code` - code blocks (cyan, bright) - * `:doc_inline_code` - inline code (cyan) - * `:doc_headings` - h1 and h2 headings (yellow, bright) - * `:doc_title` - top level heading (reverse, yellow, bright) - * `:doc_bold` - bold text (bright) - * `:doc_underline` - underlined text (underline) - * `:width` - the width to format the text (80) + * `:enabled` - toggles coloring on and off (true) + * `:doc_bold` - bold text (bright) + * `:doc_code` - code blocks (cyan) + * `:doc_headings` - h1, h2, h3, h4, h5, h6 headings (yellow) + * `:doc_inline_code` - inline code (cyan) + * `:doc_table_heading` - style for table headings + * `:doc_title` - top level heading (reverse, yellow) + * `:doc_underline` - underlined text (underline) + * `:width` - the width to format the text (80) Values for the color settings are strings with comma-separated ANSI values. """ def default_options do - [enabled: true, - doc_code: "cyan,bright", - doc_inline_code: "cyan", - doc_headings: "yellow,bright", - doc_title: "reverse,yellow,bright", - doc_bold: "bright", - doc_underline: "underline", - width: 80] + [enabled: true, + doc_bold: [:bright], + doc_code: [:cyan], + doc_headings: [:yellow], + doc_inline_code: [:cyan], + doc_table_heading: [:reverse], + doc_title: [:reverse, :yellow], + doc_underline: [:underline], + width: 80] end @doc """ @@ -38,163 +41,174 @@ defmodule IO.ANSI.Docs do """ def print_heading(heading, options \\ []) do IO.puts IO.ANSI.reset - options = Keyword.merge(default_options, options) + options = Keyword.merge(default_options(), options) width = options[:width] padding = div(width + String.length(heading), 2) - heading = heading |> String.rjust(padding) |> String.ljust(width) + heading = heading |> String.pad_leading(padding) |> String.pad_trailing(width) write(:doc_title, heading, options) + newline_after_block() end @doc """ Prints the documentation body. - In addition to the priting string, takes a set of options + In addition to the printing string, takes a set of options defined in `default_options/1`. """ def print(doc, options \\ []) do - options = Keyword.merge(default_options, options) + options = Keyword.merge(default_options(), options) doc - |> String.split(["\r\n","\n"], trim: false) - |> Enum.map(&String.rstrip/1) - |> process("", options) + |> String.split(["\r\n", "\n"], trim: false) + |> Enum.map(&String.trim_trailing/1) + |> process([], "", options) end - defp process([], _indent, _options), do: nil - - defp process(["# " <> heading | rest], _indent, options) do - write_h1(String.strip(heading), options) - process(rest, "", options) + defp process([], text, indent, options) do + write_text(text, indent, options) end - defp process(["## " <> heading | rest], _indent, options) do - write_h2(String.strip(heading), options) - process(rest, "", options) + defp process(["# " <> _ = heading | rest], text, indent, options) do + write_heading(heading, rest, text, indent, options) end - - defp process(["### " <> heading | rest], indent, options) do - write_h3(String.strip(heading), indent, options) - process(rest, indent, options) + defp process(["## " <> _ = heading | rest], text, indent, options) do + write_heading(heading, rest, text, indent, options) + end + defp process(["### " <> _ = heading | rest], text, indent, options) do + write_heading(heading, rest, text, indent, options) + end + defp process(["#### " <> _ = heading | rest], text, indent, options) do + write_heading(heading, rest, text, indent, options) + end + defp process(["##### " <> _ = heading | rest], text, indent, options) do + write_heading(heading, rest, text, indent, options) + end + defp process(["###### " <> _ = heading | rest], text, indent, options) do + write_heading(heading, rest, text, indent, options) end - defp process(["" | rest], indent, options) do - process(rest, indent, options) + defp process(["" | rest], text, indent, options) do + write_text(text, indent, options) + process(rest, [], indent, options) end - defp process([" " <> line | rest], indent, options) do + defp process([" " <> line | rest], text, indent, options) do + write_text(text, indent, options) process_code(rest, [line], indent, options) end - defp process([line | rest], indent, options) do - {stripped, count} = strip_spaces(line, 0) - case stripped do - <> when bullet in @bullets -> - process_list(item, rest, count, indent, options) - _ -> - process_text(rest, [line], indent, false, options) - end + defp process(["```" <> _line | rest], text, indent, options) do + process_fenced_code_block(rest, text, indent, options, _delimiter = "```") end - defp strip_spaces(" " <> line, acc) do - strip_spaces(line, acc + 1) + defp process(["~~~" <> _line | rest], text, indent, options) do + process_fenced_code_block(rest, text, indent, options, _delimiter = "~~~") end - defp strip_spaces(rest, acc) do - {rest, acc} + defp process(all = [line | rest], text, indent, options) do + {stripped, count} = strip_spaces(line, 0, :infinity) + cond do + link_label?(stripped, count) -> + write_text([line], indent, options, true) + process(rest, text, indent, options) + table_line?(stripped) and rest != [] and table_line?(hd(rest)) -> + write_text(text, indent, options) + process_table(all, indent, options) + true -> + process_rest(stripped, rest, count, text, indent, options) + end end ## Headings - defp write_h1(heading, options) do - write_h2(String.upcase(heading), options) - end - - defp write_h2(heading, options) do - write(:doc_headings, heading, options) - end - - defp write_h3(heading, indent, options) do - IO.write(indent) + defp write_heading(heading, rest, text, indent, options) do + write_text(text, indent, options) write(:doc_headings, heading, options) + newline_after_block() + process(rest, [], "", options) end ## Lists - defp process_list(line, rest, count, indent, options) do - IO.write indent <> "• " - {contents, rest, done} = process_list_next(rest, count, false, []) - process_text(contents, [line], indent <> " ", true, options) - if done, do: IO.puts(IO.ANSI.reset) - process(rest, indent, options) - end - - # Process the thing after a list item entry. It can be either: - # - # * Continuation of the list - # * A nested list - # * The end of the list - # - defp process_list_next([" " <> _ = line | rest], count, _done, acc) do - case list_next(line, count) do - :done -> {Enum.reverse(acc), [line|rest], false} - chopped -> process_list_next(rest, count, false, [chopped|acc]) + defp process_rest(stripped, rest, count, text, indent, options) do + case stripped do + <> when bullet in @bullets -> + write_text(text, indent, options) + process_list("• ", item, rest, count, indent, options) + <> when d1 in ?0..?9 -> + write_text(text, indent, options) + process_list(<>, item, rest, count, indent, options) + <> when d1 in ?0..?9 and d2 in ?0..?9 -> + write_text(text, indent, options) + process_list(<>, item, rest, count, indent, options) + _ -> + process(rest, [stripped | text], indent, options) end end - defp process_list_next([<> | _] = rest, _count, _done, acc) when bullet in @bullets do - {Enum.reverse(acc), rest, false} - end + defp process_list(entry, line, rest, count, indent, options) do + # The first list always win some extra padding + entry = if indent == "", do: " " <> entry, else: entry + new_indent = indent <> String.duplicate(" ", String.length(entry)) - defp process_list_next(["" | rest], count, _done, acc) do - process_list_next(rest, count, true, [""|acc]) - end + {contents, rest, done} = process_list_next(rest, count, byte_size(new_indent), []) + process(contents, [indent <> entry <> line, :no_wrap], new_indent, options) - defp process_list_next(rest, _count, done, acc) do - {Enum.reverse(acc), rest, done} + if done, do: newline_after_block() + process(rest, [], indent, options) end - defp list_next(<>, 0) when bullet in @bullets, do: :done - defp list_next(line, 0), do: chop(line, 2) - defp list_next(" " <> line, acc), do: list_next(line, acc - 1) - defp list_next(line, _acc), do: line - - defp chop(" " <> line, acc) when acc > 0, do: chop(line, acc - 1) - defp chop(line, _acc), do: line - - ## Text (paragraphs / lists) - - defp process_text(doc=["" | _], para, indent, from_list, options) do - write_text(Enum.reverse(para), indent, from_list, options) - process(doc, indent, options) + defp process_list_next([line | rest], count, max, acc) do + {stripped, next_count} = strip_spaces(line, 0, max) + case process_list_next_kind(stripped, rest, count, next_count) do + :next -> process_list_next(rest, count, max, [stripped | acc]) + :done -> {Enum.reverse(acc), [line | rest], true} + :list -> {Enum.reverse(acc), [line | rest], false} + end end - defp process_text([], para, indent, from_list, options) do - write_text(Enum.reverse(para), indent, from_list, options) + defp process_list_next([], _count, _max, acc) do + {Enum.reverse(acc), [], true} end - defp process_text([line | rest], para, indent, true, options) do - {stripped, count} = strip_spaces(line, 0) - case stripped do - <> when bullet in @bullets -> - write_text(Enum.reverse(para), indent, true, options) - process_list(item, rest, count, indent, options) + defp process_list_next_kind(stripped, rest, count, next_count) do + case {stripped, rest} do + {<>, _} when bullet in @bullets and next_count <= count -> + :list + {<>, _} when d1 in ?0..?9 and next_count <= count -> + :list + {<>, _} when d1 in ?0..?9 and d2 in ?0..?9 and next_count <= count -> + :list + {"", [" " <> _ | _]} -> + :next + {"", _} -> + :done _ -> - process_text(rest, [line | para], indent, true, options) + :next end end - defp process_text([line | rest], para, indent, from_list, options) do - process_text(rest, [line | para], indent, from_list, options) + ## Text + + defp write_text(text, indent, options) do + case Enum.reverse(text) do + [:no_wrap | rest] -> write_text(rest, indent, options, true) + rest -> write_text(rest, indent, options, false) + end end - defp write_text(lines, indent, from_list, options) do + defp write_text([], _indent, _options, _no_wrap) do + :ok + end + + defp write_text(lines, indent, options, no_wrap) do lines |> Enum.join(" ") |> handle_links - |> handle_inline(nil, [], [], options) - |> String.split(~r{\s}) - |> write_with_wrap(options[:width] - byte_size(indent), indent, from_list) + |> handle_inline(options) + |> String.split(@spaces) + |> write_with_wrap(options[:width] - byte_size(indent), indent, no_wrap) - unless from_list, do: IO.puts(IO.ANSI.reset) + unless no_wrap, do: newline_after_block() end ## Code blocks @@ -204,28 +218,157 @@ defmodule IO.ANSI.Docs do end # Blank line between code blocks - defp process_code([ "", " " <> line | rest ], code, indent, options) do + defp process_code(["", " " <> line | rest], code, indent, options) do process_code(rest, [line, "" | code], indent, options) end - defp process_code([ " " <> line | rest ], code, indent, options) do - process_code(rest, [line|code], indent, options) + defp process_code([" " <> line | rest], code, indent, options) do + process_code(rest, [line | code], indent, options) end defp process_code(rest, code, indent, options) do write_code(code, indent, options) - process(rest, indent, options) + process(rest, [], indent, options) + end + + defp process_fenced_code_block(rest, text, indent, options, delimiter) do + write_text(text, indent, options) + process_fenced_code(rest, [], indent, options, delimiter) + end + + defp process_fenced_code([], code, indent, options, _delimiter) do + write_code(code, indent, options) + end + + defp process_fenced_code([line | rest], code, indent, options, delimiter) do + if line === delimiter do + process_code(rest, code, indent, options) + else + process_fenced_code(rest, [line | code], indent, options, delimiter) + end end defp write_code(code, indent, options) do - write(:doc_code, "#{indent}┃ #{Enum.join(Enum.reverse(code), "\n#{indent}┃ ")}", options) + write(:doc_code, "#{indent} #{Enum.join(Enum.reverse(code), "\n#{indent} ")}", options) + newline_after_block() + end + + ## Tables + + defp process_table(lines, indent, options) do + {table, rest} = Enum.split_while(lines, &table_line?/1) + table_lines(table, options) + newline_after_block() + process(rest, [], indent, options) + end + + defp table_lines(lines, options) do + lines = Enum.map(lines, &split_into_columns(&1, options)) + count = Enum.map(lines, &length/1) |> Enum.max + lines = Enum.map(lines, &pad_to_number_of_columns(&1, count)) + + widths = + for line <- lines do + for {_col, length} <- line, do: length + end + + col_widths = Enum.reduce(widths, + List.duplicate(0, count), + &max_column_widths/2) + + render_table(lines, col_widths, options) + end + + defp split_into_columns(line, options) do + line + |> String.trim("|") + |> String.trim() + |> String.split(" | ") + |> Enum.map(&render_column(&1, options)) + end + + defp render_column(col, options) do + col = + col + |> String.replace("\\\|", "|") + |> handle_links + |> handle_inline(options) + {col, length_without_escape(col, 0)} + end + + defp pad_to_number_of_columns(cols, col_count), + do: cols ++ List.duplicate({"", 0}, col_count - length(cols)) + + defp max_column_widths(cols, widths), + do: Enum.zip(cols, widths) |> Enum.map(fn {a, b} -> max(a, b) end) + + # If second line is heading separator, use the heading style on the first + defp render_table([first, second | rest], widths, options) do + combined = Enum.zip(first, widths) + if table_header?(second) do + draw_table_row(combined, options, :heading) + render_table(rest, widths, options) + else + draw_table_row(combined, options) + render_table([second | rest], widths, options) + end + end + + defp render_table([first | rest], widths, options) do + combined = Enum.zip(first, widths) + draw_table_row(combined, options) + render_table(rest, widths, options) + end + + defp render_table([], _, _), + do: nil + + defp table_header?(row) do + Enum.all?(row, fn {col, _} -> table_header_column?(col) end) + end + + defp table_header_column?(":" <> row), do: table_header_contents?(row) + defp table_header_column?(row), do: table_header_contents?(row) + + defp table_header_contents?("-" <> row), do: table_header_contents?(row) + defp table_header_contents?(":"), do: true + defp table_header_contents?(""), do: true + defp table_header_contents?(_), do: false + + defp draw_table_row(cols_and_widths, options, heading \\ false) do + columns = + Enum.map_join(cols_and_widths, " | ", fn {{col, length}, width} -> + col <> String.duplicate(" ", width - length) + end) + + if heading do + write(:doc_table_heading, columns, options) + else + IO.puts columns + end + end + + defp table_line?(line) do + line =~ " | " end ## Helpers + defp link_label?("[" <> rest, count) when count <= 3, do: link_label?(rest) + defp link_label?(_, _), do: false + + defp link_label?("]: " <> _), do: true + defp link_label?("]" <> _), do: false + defp link_label?(""), do: false + defp link_label?(<<_>> <> rest), do: link_label?(rest) + + defp strip_spaces(" " <> line, acc, max) when acc < max, + do: strip_spaces(line, acc + 1, max) + defp strip_spaces(rest, acc, _max), + do: {rest, acc} + defp write(style, string, options) do - IO.puts color(style, options) <> string <> IO.ANSI.reset - IO.puts IO.ANSI.reset + IO.puts [color(style, options), string, IO.ANSI.reset] end defp write_with_wrap([], _available, _indent, _first) do @@ -238,13 +381,13 @@ defmodule IO.ANSI.Docs do write_with_wrap(rest, available, indent, false) end - defp take_words([word|words], available, acc) do + defp take_words([word | words], available, acc) do available = available - length_without_escape(word, 0) cond do # It fits, take one for space and continue decreasing available > 0 -> - take_words(words, available - 1, [word|acc]) + take_words(words, available - 1, [word | acc]) # No space but we got no words acc == [] -> @@ -252,7 +395,7 @@ defmodule IO.ANSI.Docs do # Otherwise true -> - {Enum.reverse(acc), [word|words]} + {Enum.reverse(acc), [word | words]} end end @@ -260,17 +403,17 @@ defmodule IO.ANSI.Docs do {Enum.reverse(acc), []} end - defp length_without_escape(<< ?\e, ?[, _, _, ?m, rest :: binary >>, count) do + defp length_without_escape(<> <> rest, count) do length_without_escape(rest, count) end - defp length_without_escape(<< ?\e, ?[, _, ?m, rest :: binary >>, count) do + defp length_without_escape(<> <> rest, count) do length_without_escape(rest, count) end defp length_without_escape(rest, count) do case String.next_grapheme(rest) do - {_, rest} -> length_without_escape(rest, count + 1) + {_, rest} -> length_without_escape(rest, count + 1) nil -> count end end @@ -282,95 +425,141 @@ defmodule IO.ANSI.Docs do end defp escape_underlines_in_link(text) do - case Regex.match?(~r{.*(https?\S*)}, text) do - true -> - Regex.replace(~r{_}, text, "\\\\_") - _ -> - text - end + ~r{https?\S*} + |> Regex.recompile! + |> Regex.replace(text, &String.replace(&1, "_", "\\_")) end defp remove_square_brackets_in_link(text) do - Regex.replace(~r{\[(.*?)\]\((.*?)\)}, text, "\\1 (\\2)") + ~r{\[(.*?)\]\((.*?)\)} + |> Regex.recompile! + |> Regex.replace(text, "\\1 (\\2)") end - # Single inline quotes. - @single [?`, ?_, ?*] + # We have four entries: **, *, _ and `. + # + # The first three behave the same while the last one is simpler + # when it comes to delimiters. But, since the first has two + # characters, we need to handle 3 cases: + # + # 1. ** + # 2. _ and * + # 3. ` + # + # Where the first two should have the same code but match differently. + @single [?_, ?*] + + # Characters that can mark the beginning or the end of a word. + # Only support the most common ones at this moment. + @delimiters [?\s, ?', ?", ?!, ?@, ?#, ?$, ?%, ?^, ?&, ?-, ?+, ?(, ?), ?[, ?], ?{, ?}, ?<, ?>, ?.] - # ` does not require space in between - @spaced [?_, ?*] + # Inline start - # Clauses for handling spaces - defp handle_inline(<>, nil, buffer, acc, options) do - handle_inline(rest, nil, [?\s, ?*, ?*|buffer], acc, options) + defp handle_inline(<>, options) do + handle_inline(rest, ?d, ["**"], [], options) end - defp handle_inline(<>, nil, buffer, acc, options) when mark in @spaced do - handle_inline(rest, nil, [?\s, mark|buffer], acc, options) + defp handle_inline(<>, options) when mark in @single do + handle_inline(rest, mark, [<>], [], options) end - defp handle_inline(<>, limit, buffer, acc, options) do - handle_inline(rest, limit, [?*, ?*, ?\s|buffer], acc, options) + defp handle_inline(rest, options) do + handle_inline(rest, nil, [], [], options) end - defp handle_inline(<>, limit, buffer, acc, options) when mark in @spaced do - handle_inline(rest, limit, [mark, ?\s|buffer], acc, options) + # Inline delimiters + + defp handle_inline(<>, nil, buffer, acc, options) + when rest != "" and delimiter in @delimiters do + handle_inline(rest, ?d, ["**"], [delimiter, Enum.reverse(buffer) | acc], options) end - # Clauses for handling escape - defp handle_inline(<>, limit, buffer, acc, options) do - handle_inline(rest, limit, [?\\|buffer], acc, options) + defp handle_inline(<>, nil, buffer, acc, options) + when rest != "" and delimiter in @delimiters and mark in @single do + handle_inline(rest, mark, [<>], [delimiter, Enum.reverse(buffer) | acc], options) end - defp handle_inline(<>, limit, buffer, acc, options) do - handle_inline(rest, limit, [?*, ?*|buffer], acc, options) + defp handle_inline(<>, nil, buffer, acc, options) + when rest != "" do + handle_inline(rest, ?`, ["`"], [Enum.reverse(buffer) | acc], options) end - # A escape is not valid inside ` - defp handle_inline(<>, limit, buffer, acc, options) - when mark in [?_, ?*, ?`] and not(mark == limit and mark == ?`) do - handle_inline(rest, limit, [mark|buffer], acc, options) + # Clauses for handling escape + + defp handle_inline(<>, nil, buffer, acc, options) + when rest != "" do + handle_inline(rest, ?d, ["**"], [?\\, Enum.reverse(buffer) | acc], options) end - # Inline start - defp handle_inline(<>, nil, buffer, acc, options) when rest != "" do - handle_inline(rest, ?d, ["**"], [Enum.reverse(buffer)|acc], options) + defp handle_inline(<>, nil, buffer, acc, options) + when rest != "" and mark in @single do + handle_inline(rest, mark, [<>], [?\\, Enum.reverse(buffer) | acc], options) end - defp handle_inline(<>, nil, buffer, acc, options) when rest != "" and mark in @single do - handle_inline(rest, mark, [<>], [Enum.reverse(buffer)|acc], options) + defp handle_inline(<>, limit, buffer, acc, options) do + handle_inline(rest, limit, [?\\ | buffer], acc, options) + end + + # An escape is not valid inside ` + defp handle_inline(<>, limit, buffer, acc, options) + when not(mark == limit and mark == ?`) do + handle_inline(rest, limit, [mark | buffer], acc, options) end # Inline end - defp handle_inline(<>, ?d, buffer, acc, options) do - handle_inline(rest, nil, [], [inline_buffer(buffer, options)|acc], options) + + defp handle_inline(<>, ?d, buffer, acc, options) + when delimiter in @delimiters do + handle_inline(<>, nil, [], [inline_buffer(buffer, options) | acc], options) + end + + defp handle_inline(<>, mark, buffer, acc, options) + when delimiter in @delimiters and mark in @single do + handle_inline(<>, nil, [], [inline_buffer(buffer, options) | acc], options) + end + + defp handle_inline(<>, ?d, buffer, acc, options) + when rest == "" do + handle_inline(<<>>, nil, [], [inline_buffer(buffer, options) | acc], options) end - defp handle_inline(<>, mark, buffer, acc, options) when mark in @single do - handle_inline(rest, nil, [], [inline_buffer(buffer, options)|acc], options) + defp handle_inline(<>, mark, buffer, acc, options) + when rest == "" and mark in @single do + handle_inline(<<>>, nil, [], [inline_buffer(buffer, options) | acc], options) end - defp handle_inline(<>, mark, buffer, acc, options) do - handle_inline(rest, mark, [char|buffer], acc, options) + defp handle_inline(<>, ?`, buffer, acc, options) do + handle_inline(rest, nil, [], [inline_buffer(buffer, options) | acc], options) + end + + # Catch all + + defp handle_inline(<>, mark, buffer, acc, options) do + handle_inline(rest, mark, [char | buffer], acc, options) end defp handle_inline(<<>>, _mark, buffer, acc, _options) do - IO.iodata_to_binary Enum.reverse([Enum.reverse(buffer)|acc]) + IO.iodata_to_binary Enum.reverse([Enum.reverse(buffer) | acc]) end defp inline_buffer(buffer, options) do - [h|t] = Enum.reverse([IO.ANSI.reset|buffer]) - [color_for(h, options)|t] + [h | t] = Enum.reverse([IO.ANSI.reset | buffer]) + [color_for(h, options) | t] end - defp color_for("`", colors), do: color(:doc_inline_code, colors) - defp color_for("_", colors), do: color(:doc_underline, colors) - defp color_for("*", colors), do: color(:doc_bold, colors) - defp color_for("**", colors), do: color(:doc_bold, colors) + defp color_for(mark, colors) do + case mark do + "`" -> color(:doc_inline_code, colors) + "_" -> color(:doc_underline, colors) + "*" -> color(:doc_bold, colors) + "**" -> color(:doc_bold, colors) + end + end defp color(style, colors) do color = colors[style] - enabled = colors[:enabled] - IO.ANSI.escape_fragment("%{#{color}}", enabled) + IO.ANSI.format_fragment(color, colors[:enabled]) end + + defp newline_after_block, do: IO.puts(IO.ANSI.reset) end diff --git a/lib/elixir/lib/io/stream.ex b/lib/elixir/lib/io/stream.ex index d275626226e..ee3c9612853 100644 --- a/lib/elixir/lib/io/stream.ex +++ b/lib/elixir/lib/io/stream.ex @@ -10,7 +10,7 @@ end defmodule IO.Stream do @moduledoc """ - Defines a `IO.Stream` struct returned by `IO.stream/2` and `IO.binstream/2`. + Defines an `IO.Stream` struct returned by `IO.stream/2` and `IO.binstream/2`. The following fields are public: @@ -18,20 +18,21 @@ defmodule IO.Stream do * `raw` - a boolean indicating if bin functions should be used * `line_or_bytes` - if reading should read lines or a given amount of bytes + It is worth noting that an IO stream has side effects and every time you go + over the stream you may get different results. + """ defstruct device: nil, raw: true, line_or_bytes: :line + @type t :: %__MODULE__{} + @doc false def __build__(device, raw, line_or_bytes) do %IO.Stream{device: device, raw: raw, line_or_bytes: line_or_bytes} end defimpl Collectable do - def empty(stream) do - stream - end - def into(%{device: device, raw: raw} = stream) do {:ok, into(stream, device, raw)} end @@ -55,7 +56,7 @@ defmodule IO.Stream do true -> &IO.each_binstream(&1, line_or_bytes) false -> &IO.each_stream(&1, line_or_bytes) end - Stream.unfold(device, next_fun).(acc, fun) + Stream.resource(fn -> device end, next_fun, &(&1)).(acc, fun) end def count(_stream) do diff --git a/lib/elixir/lib/kernel.ex b/lib/elixir/lib/kernel.ex index a0778bbd4d1..6c5d9d9f349 100644 --- a/lib/elixir/lib/kernel.ex +++ b/lib/elixir/lib/kernel.ex @@ -7,11 +7,12 @@ import :elixir_bootstrap defmodule Kernel do @moduledoc """ - `Kernel` provides the default macros and functions - Elixir imports into your environment. These macros and functions - can be skipped or cherry-picked via the `import` macro. For - instance, if you want to tell Elixir not to import the `if` - macro, you can do: + Provides the default macros and functions Elixir imports into your + environment. + + These macros and functions can be skipped or cherry-picked via the + `import/2` macro. For instance, if you want to tell Elixir not to + import the `if/2` macro, you can do: import Kernel, except: [if: 2] @@ -19,8 +20,9 @@ defmodule Kernel do cannot be skipped. These are described in `Kernel.SpecialForms`. Some of the functions described in this module are inlined by - the Elixir compiler into their Erlang counterparts in the `:erlang` - module. Those functions are called BIFs (builtin internal functions) + the Elixir compiler into their Erlang counterparts in the + [`:erlang` module](http://www.erlang.org/doc/man/erlang.html). + Those functions are called BIFs (built-in internal functions) in Erlang-land and they exhibit interesting properties, as some of them are allowed in guards and others are used for compiler optimizations. @@ -57,7 +59,7 @@ defmodule Kernel do end @doc """ - Invokes the given `fun` with the array of arguments `args`. + Invokes the given `fun` with the list of arguments `args`. Inlined by the compiler. @@ -73,14 +75,14 @@ defmodule Kernel do end @doc """ - Invokes the given `fun` from `module` with the array of arguments `args`. + Invokes the given `fun` from `module` with the list of arguments `args`. Inlined by the compiler. ## Examples iex> apply(Enum, :reverse, [[1, 2, 3]]) - [3,2,1] + [3, 2, 1] """ @spec apply(module, atom, [any]) :: any @@ -92,7 +94,7 @@ defmodule Kernel do Extracts the part of the binary starting at `start` with length `length`. Binaries are zero-indexed. - If start or length references in any way outside the binary, an + If `start` or `length` reference in any way outside the binary, an `ArgumentError` exception is raised. Allowed in guard tests. Inlined by the compiler. @@ -102,13 +104,14 @@ defmodule Kernel do iex> binary_part("foo", 1, 2) "oo" - A negative length can be used to extract bytes at the end of a binary: + A negative `length` can be used to extract bytes that come *before* the byte + at `start`: - iex> binary_part("foo", 3, -1) - "o" + iex> binary_part("Hello", 5, -3) + "llo" """ - @spec binary_part(binary, pos_integer, integer) :: binary + @spec binary_part(binary, non_neg_integer, integer) :: binary def binary_part(binary, start, length) do :erlang.binary_part(binary, start, length) end @@ -135,8 +138,8 @@ defmodule Kernel do @doc """ Returns the number of bytes needed to contain `bitstring`. - That is, if the number of bits in `bitstring` is not divisible by 8, - the resulting number of bytes will be rounded up. This operation + That is, if the number of bits in `bitstring` is not divisible by 8, the + resulting number of bytes will be rounded up (by excess). This operation happens in constant time. Allowed in guard tests. Inlined by the compiler. @@ -158,19 +161,35 @@ defmodule Kernel do @doc """ Performs an integer division. - Raises an error if one of the arguments is not an integer. + Raises an `ArithmeticError` exception if one of the arguments is not an + integer, or when the `divisor` is `0`. Allowed in guard tests. Inlined by the compiler. + `div/2` performs *truncated* integer division. This means that + the result is always rounded towards zero. + + If you want to perform floored integer division (rounding towards negative infinity), + use `Integer.floor_div/2` instead. + ## Examples - iex> div(5, 2) - 2 + div(5, 2) + #=> 2 + + div(6, -4) + #=> -1 + + div(-99, 2) + #=> -49 + + div(100, 0) + #=> ** (ArithmeticError) bad argument in arithmetic expression """ - @spec div(integer, integer) :: integer - def div(left, right) do - :erlang.div(left, right) + @spec div(integer, neg_integer | pos_integer) :: integer + def div(dividend, divisor) do + :erlang.div(dividend, divisor) end @doc """ @@ -183,9 +202,56 @@ defmodule Kernel do ## Examples + When a process reaches its end, by default it exits with + reason `:normal`. You can also call `exit/1` explicitly if you + want to terminate a process but not signal any failure: + exit(:normal) + + In case something goes wrong, you can also use `exit/1` with + a different reason: + exit(:seems_bad) + If the exit reason is not `:normal`, all the processes linked to the process + that exited will crash (unless they are trapping exits). + + ## OTP exits + + Exits are used by the OTP to determine if a process exited abnormally + or not. The following exits are considered "normal": + + * `exit(:normal)` + * `exit(:shutdown)` + * `exit({:shutdown, term})` + + Exiting with any other reason is considered abnormal and treated + as a crash. This means the default supervisor behaviour kicks in, + error reports are emitted, etc. + + This behaviour is relied on in many different places. For example, + `ExUnit` uses `exit(:shutdown)` when exiting the test process to + signal linked processes, supervision trees and so on to politely + shutdown too. + + ## CLI exits + + Building on top of the exit signals mentioned above, if the + process started by the command line exits with any of the three + reasons above, its exit is considered normal and the Operating + System process will exit with status 0. + + It is, however, possible to customize the Operating System exit + signal by invoking: + + exit({:shutdown, integer}) + + This will cause the OS process to exit with the status given by + `integer` while signaling all linked OTP processes to politely + shutdown. + + Any other exit reason will cause the OS process to exit with + status `1` and linked OTP processes to crash. """ @spec exit(term) :: no_return def exit(reason) do @@ -193,11 +259,25 @@ defmodule Kernel do end @doc """ - Returns the head of a list, raises `badarg` if the list is empty. + Returns the head of a list. Raises `ArgumentError` if the list is empty. + + It works with improper lists. + + Allowed in guard tests. Inlined by the compiler. + + ## Examples + + hd([1, 2, 3, 4]) + #=> 1 + + hd([]) + #=> ** (ArgumentError) argument error + + hd([1 | 2]) + #=> 1 - Inlined by the compiler. """ - @spec hd(list) :: term + @spec hd(nonempty_maybe_improper_list(elem, any)) :: elem when elem: term def hd(list) do :erlang.hd(list) end @@ -218,6 +298,14 @@ defmodule Kernel do A binary always contains a complete number of bytes. Allowed in guard tests. Inlined by the compiler. + + ## Examples + + iex> is_binary "foo" + true + iex> is_binary <<1::3>> + false + """ @spec is_binary(term) :: boolean def is_binary(term) do @@ -228,6 +316,14 @@ defmodule Kernel do Returns `true` if `term` is a bitstring (including a binary); otherwise returns `false`. Allowed in guard tests. Inlined by the compiler. + + ## Examples + + iex> is_bitstring "foo" + true + iex> is_bitstring <<1::3>> + true + """ @spec is_bitstring(term) :: boolean def is_bitstring(term) do @@ -235,8 +331,8 @@ defmodule Kernel do end @doc """ - Returns `true` if `term` is either the atom `true` or the atom `false` (i.e. a boolean); - otherwise returns false. + Returns `true` if `term` is either the atom `true` or the atom `false` (i.e., + a boolean); otherwise returns `false`. Allowed in guard tests. Inlined by the compiler. """ @@ -246,7 +342,7 @@ defmodule Kernel do end @doc """ - Returns `true` if `term` is a floating point number; otherwise returns `false`. + Returns `true` if `term` is a floating-point number; otherwise returns `false`. Allowed in guard tests. Inlined by the compiler. """ @@ -270,6 +366,14 @@ defmodule Kernel do otherwise returns `false`. Allowed in guard tests. Inlined by the compiler. + + ## Examples + + iex> is_function(fn(x) -> x * 2 end, 1) + true + iex> is_function(fn(x) -> x * 2 end, 2) + false + """ @spec is_function(term, non_neg_integer) :: boolean def is_function(term, arity) do @@ -297,7 +401,7 @@ defmodule Kernel do end @doc """ - Returns `true` if `term` is either an integer or a floating point number; + Returns `true` if `term` is either an integer or a floating-point number; otherwise returns `false`. Allowed in guard tests. Inlined by the compiler. @@ -308,7 +412,7 @@ defmodule Kernel do end @doc """ - Returns `true` if `term` is a pid (process identifier); otherwise returns `false`. + Returns `true` if `term` is a PID (process identifier); otherwise returns `false`. Allowed in guard tests. Inlined by the compiler. """ @@ -394,9 +498,17 @@ defmodule Kernel do @doc """ Returns the size of a map. + The size of a map is the number of key-value pairs that the map contains. + This operation happens in constant time. Allowed in guard tests. Inlined by the compiler. + + ## Examples + + iex> map_size(%{a: "foo", b: "bar"}) + 2 + """ @spec map_size(map) :: non_neg_integer def map_size(map) do @@ -404,9 +516,10 @@ defmodule Kernel do end @doc """ - Return the biggest of the two given terms according to - Erlang's term ordering. If the terms compare equal, the - first one is returned. + Returns the biggest of the two given terms according to + Erlang's term ordering. + + If the terms compare equal, the first one is returned. Inlined by the compiler. @@ -414,17 +527,20 @@ defmodule Kernel do iex> max(1, 2) 2 + iex> max(:a, :b) + :b """ - @spec max(term, term) :: term + @spec max(first, second) :: (first | second) when first: term, second: term def max(first, second) do :erlang.max(first, second) end @doc """ - Return the smallest of the two given terms according to - Erlang's term ordering. If the terms compare equal, the - first one is returned. + Returns the smallest of the two given terms according to + Erlang's term ordering. + + If the terms compare equal, the first one is returned. Inlined by the compiler. @@ -432,9 +548,11 @@ defmodule Kernel do iex> min(1, 2) 1 + iex> min("foo", "bar") + "bar" """ - @spec min(term, term) :: term + @spec min(first, second) :: (first | second) when first: term, second: term def min(first, second) do :erlang.min(first, second) end @@ -452,20 +570,24 @@ defmodule Kernel do @doc """ Returns the node where the given argument is located. - The argument can be a pid, a reference, or a port. - If the local node is not alive, `nonode@nohost` is returned. + The argument can be a PID, a reference, or a port. + If the local node is not alive, `:nonode@nohost` is returned. Allowed in guard tests. Inlined by the compiler. """ - @spec node(pid|reference|port) :: node + @spec node(pid | reference | port) :: node def node(arg) do :erlang.node(arg) end @doc """ - Calculates the remainder of an integer division. + Computes the remainder of an integer division. + + `rem/2` uses truncated division, which means that + the result will always have the sign of the `dividend`. - Raises an error if one of the arguments is not an integer. + Raises an `ArithmeticError` exception if one of the arguments is not an + integer, or when the `divisor` is `0`. Allowed in guard tests. Inlined by the compiler. @@ -473,25 +595,37 @@ defmodule Kernel do iex> rem(5, 2) 1 + iex> rem(6, -4) + 2 """ - @spec rem(integer, integer) :: integer - def rem(left, right) do - :erlang.rem(left, right) + @spec rem(integer, neg_integer | pos_integer) :: integer + def rem(dividend, divisor) do + :erlang.rem(dividend, divisor) end @doc """ - Returns an integer by rounding the given number. + Rounds a number to the nearest integer. Allowed in guard tests. Inlined by the compiler. ## Examples - iex> round(5.5) + iex> round(5.6) 6 + iex> round(5.2) + 5 + + iex> round(-9.9) + -10 + + iex> round(-9) + -9 + """ - @spec round(number) :: integer + @spec round(float) :: integer + @spec round(value) :: value when value: integer def round(number) do :erlang.round(number) end @@ -499,7 +633,7 @@ defmodule Kernel do @doc """ Sends a message to the given `dest` and returns the message. - `dest` may be a remote or local pid, a (local) port, a locally + `dest` may be a remote or local PID, a (local) port, a locally registered name, or a tuple `{registered_name, node}` for a registered name at another node. @@ -511,13 +645,13 @@ defmodule Kernel do :hello """ - @spec send(dest :: pid | port | atom | {atom, node}, msg) :: msg when msg: any - def send(dest, msg) do - :erlang.send(dest, msg) + @spec send(dest :: pid | port | atom | {atom, node}, message) :: message when message: any + def send(dest, message) do + :erlang.send(dest, message) end @doc """ - Returns the pid (process identifier) of the calling process. + Returns the PID (process identifier) of the calling process. Allowed in guard clauses. Inlined by the compiler. """ @@ -527,17 +661,23 @@ defmodule Kernel do end @doc """ - Spawns the given function and returns its pid. + Spawns the given function and returns its PID. + + Typically developers do not use the `spawn` functions, instead they use + abstractions such as `Task`, `GenServer` and `Agent`, built on top of + `spawn`, that spawns processes with more conveniences in terms of + introspection and debugging. - Check the modules `Process` and `Node` for other functions - to handle processes, including spawning functions in nodes. + Check the `Process` module for more process-related functions. + + The anonymous function receives 0 arguments, and may return any value. Inlined by the compiler. ## Examples - current = Kernel.self - child = spawn(fn -> send current, {Kernel.self, 1 + 2} end) + current = self() + child = spawn(fn -> send current, {self(), 1 + 2} end) receive do {^child, 3} -> IO.puts "Received 3 back" @@ -550,11 +690,15 @@ defmodule Kernel do end @doc """ - Spawns the given module and function passing the given args - and returns its pid. + Spawns the given function `fun` from the given `module` passing it the given + `args` and returns its PID. + + Typically developers do not use the `spawn` functions, instead they use + abstractions such as `Task`, `GenServer` and `Agent`, built on top of + `spawn`, that spawns processes with more conveniences in terms of + introspection and debugging. - Check the modules `Process` and `Node` for other functions - to handle processes, including spawning functions in nodes. + Check the `Process` module for more process-related functions. Inlined by the compiler. @@ -569,17 +713,24 @@ defmodule Kernel do end @doc """ - Spawns the given function, links it to the current process and returns its pid. + Spawns the given function, links it to the current process, and returns its PID. + + Typically developers do not use the `spawn` functions, instead they use + abstractions such as `Task`, `GenServer` and `Agent`, built on top of + `spawn`, that spawns processes with more conveniences in terms of + introspection and debugging. + + Check the `Process` module for more process-related functions. For more + information on linking, check `Process.link/1`. - Check the modules `Process` and `Node` for other functions - to handle processes, including spawning functions in nodes. + The anonymous function receives 0 arguments, and may return any value. Inlined by the compiler. ## Examples - current = Kernel.self - child = spawn_link(fn -> send current, {Kernel.self, 1 + 2} end) + current = self() + child = spawn_link(fn -> send(current, {self(), 1 + 2}) end) receive do {^child, 3} -> IO.puts "Received 3 back" @@ -592,11 +743,16 @@ defmodule Kernel do end @doc """ - Spawns the given module and function passing the given args, - links it to the current process and returns its pid. + Spawns the given function `fun` from the given `module` passing it the given + `args`, links it to the current process, and returns its PID. + + Typically developers do not use the `spawn` functions, instead they use + abstractions such as `Task`, `GenServer` and `Agent`, built on top of + `spawn`, that spawns processes with more conveniences in terms of + introspection and debugging. - Check the modules `Process` and `Node` for other functions - to handle processes, including spawning functions in nodes. + Check the `Process` module for more process-related functions. For more + information on linking, check `Process.link/1`. Inlined by the compiler. @@ -611,18 +767,24 @@ defmodule Kernel do end @doc """ - Spawns the given function, monitors it and returns its pid + Spawns the given function, monitors it and returns its PID and monitoring reference. - Check the modules `Process` and `Node` for other functions - to handle processes, including spawning functions in nodes. + Typically developers do not use the `spawn` functions, instead they use + abstractions such as `Task`, `GenServer` and `Agent`, built on top of + `spawn`, that spawns processes with more conveniences in terms of + introspection and debugging. + + Check the `Process` module for more process-related functions. + + The anonymous function receives 0 arguments, and may return any value. Inlined by the compiler. ## Examples - current = Kernel.self - spawn_monitor(fn -> send current, {Kernel.self, 1 + 2} end) + current = self() + spawn_monitor(fn -> send current, {self(), 1 + 2} end) """ @spec spawn_monitor((() -> any)) :: {pid, reference} @@ -632,10 +794,14 @@ defmodule Kernel do @doc """ Spawns the given module and function passing the given args, - monitors it and returns its pid and monitoring reference. + monitors it and returns its PID and monitoring reference. - Check the modules `Process` and `Node` for other functions - to handle processes, including spawning functions in nodes. + Typically developers do not use the `spawn` functions, instead they use + abstractions such as `Task`, `GenServer` and `Agent`, built on top of + `spawn`, that spawns processes with more conveniences in terms of + introspection and debugging. + + Check the `Process` module for more process-related functions. Inlined by the compiler. @@ -650,7 +816,9 @@ defmodule Kernel do end @doc """ - A non-local return from a function. Check `Kernel.SpecialForms.try/1` for more information. + A non-local return from a function. + + Check `Kernel.SpecialForms.try/1` for more information. Inlined by the compiler. """ @@ -662,25 +830,53 @@ defmodule Kernel do @doc """ Returns the tail of a list. Raises `ArgumentError` if the list is empty. + It works with improper lists. + Allowed in guard tests. Inlined by the compiler. + + ## Examples + + tl([1, 2, 3, :go]) + #=> [2, 3, :go] + + tl([]) + #=> ** (ArgumentError) argument error + + tl([:one]) + #=> [] + + tl([:a, :b | :c]) + #=> [:b | :c] + + tl([:a | %{b: 1}]) + #=> %{b: 1} + """ - @spec tl(maybe_improper_list) :: maybe_improper_list + @spec tl(nonempty_maybe_improper_list(elem, tail)) :: + maybe_improper_list(elem, tail) | tail when elem: term, tail: term def tl(list) do :erlang.tl(list) end @doc """ - Returns an integer by truncating the given number. + Returns the integer part of `number`. Allowed in guard tests. Inlined by the compiler. ## Examples - iex> trunc(5.5) + iex> trunc(5.4) 5 + iex> trunc(-5.99) + -5 + + iex> trunc(-5) + -5 + """ - @spec trunc(number) :: integer + @spec trunc(value) :: value when value: integer + @spec trunc(float) :: integer def trunc(number) do :erlang.trunc(number) end @@ -691,6 +887,12 @@ defmodule Kernel do This operation happens in constant time. Allowed in guard tests. Inlined by the compiler. + + ## Examples + + iex> tuple_size {:a, :b, :c} + 3 + """ @spec tuple_size(tuple) :: non_neg_integer def tuple_size(tuple) do @@ -698,7 +900,7 @@ defmodule Kernel do end @doc """ - Arithmetic plus. + Arithmetic addition. Allowed in guard tests. Inlined by the compiler. @@ -708,13 +910,16 @@ defmodule Kernel do 3 """ - @spec (number + number) :: number + @spec (integer + integer) :: integer + @spec (float + float) :: float + @spec (integer + float) :: float + @spec (float + integer) :: float def left + right do :erlang.+(left, right) end @doc """ - Arithmetic minus. + Arithmetic subtraction. Allowed in guard tests. Inlined by the compiler. @@ -724,7 +929,10 @@ defmodule Kernel do -1 """ - @spec (number - number) :: number + @spec (integer - integer) :: integer + @spec (float - float) :: float + @spec (integer - float) :: float + @spec (float - integer) :: float def left - right do :erlang.-(left, right) end @@ -740,7 +948,7 @@ defmodule Kernel do 1 """ - @spec (+number) :: number + @spec (+value) :: value when value: number def (+value) do :erlang.+(value) end @@ -756,7 +964,10 @@ defmodule Kernel do -2 """ - @spec (-number) :: number + @spec (-0) :: 0 + @spec (-pos_integer) :: neg_integer + @spec (-neg_integer) :: pos_integer + @spec (-float) :: float def (-value) do :erlang.-(value) end @@ -772,7 +983,10 @@ defmodule Kernel do 2 """ - @spec (number * number) :: number + @spec (integer * integer) :: integer + @spec (float * float) :: float + @spec (integer * float) :: float + @spec (float * integer) :: float def left * right do :erlang.*(left, right) end @@ -780,18 +994,26 @@ defmodule Kernel do @doc """ Arithmetic division. - The result is always a float. Use `div` and `rem` if you want - a natural division or the remainder. + The result is always a float. Use `div/2` and `rem/2` if you want + an integer division or the remainder. + + Raises `ArithmeticError` if `right` is 0 or 0.0. Allowed in guard tests. Inlined by the compiler. ## Examples - iex> 1 / 2 - 0.5 + 1 / 2 + #=> 0.5 + + -3.0 / 2.0 + #=> -1.5 - iex> 2 / 1 - 2.0 + 5 / 1 + #=> 5.0 + + 7 / 0 + #=> ** (ArithmeticError) bad argument in arithmetic expression """ @spec (number / number) :: float @@ -800,18 +1022,37 @@ defmodule Kernel do end @doc """ - Concatenates two lists. + Concatenates a proper list and a term, returning a list. - Allowed in guard tests. Inlined by the compiler. + The complexity of `a ++ b` is proportional to `length(a)`, so avoid repeatedly + appending to lists of arbitrary length, e.g. `list ++ [item]`. + Instead, consider prepending via `[item | rest]` and then reversing. + + If the `right` operand is not a proper list, it returns an improper list. + If the `left` operand is not a proper list, it raises `ArgumentError`. + + Inlined by the compiler. ## Examples iex> [1] ++ [2, 3] - [1,2,3] + [1, 2, 3] iex> 'foo' ++ 'bar' 'foobar' + # returns an improper list + iex> [1] ++ 2 + [1 | 2] + + # returns a proper list + iex> [1] ++ [2] + [1, 2] + + # improper list on the right will return an improper list + iex> [1] ++ [2 | 3] + [1, 2 | 3] + """ @spec (list ++ term) :: maybe_improper_list def left ++ right do @@ -819,10 +1060,15 @@ defmodule Kernel do end @doc """ - Removes the first occurrence of an item on the left + Removes the first occurrence of an item on the left list for each item on the right. - Allowed in guard tests. Inlined by the compiler. + The complexity of `a -- b` is proportional to `length(a) * length(b)`, + meaning that it will be very slow if both `a` and `b` are long lists. + In such cases, consider converting each list to a `MapSet` and using + `MapSet.difference/2`. + + Inlined by the compiler. ## Examples @@ -830,7 +1076,7 @@ defmodule Kernel do [3] iex> [1, 2, 3, 2, 1] -- [1, 2, 2] - [3,1] + [3, 1] """ @spec (list -- list) :: list @@ -838,13 +1084,10 @@ defmodule Kernel do :erlang.--(left, right) end - @doc false - def left xor right do - :erlang.xor(left, right) - end - @doc """ - Boolean not. Argument must be a boolean. + Boolean not. + + `arg` must be a boolean; if it's not, an `ArgumentError` exception is raised. Allowed in guard tests. Inlined by the compiler. @@ -854,9 +1097,10 @@ defmodule Kernel do true """ - @spec not(boolean) :: boolean - def not(arg) do - :erlang.not(arg) + @spec not(true) :: false + @spec not(false) :: true + def not(value) do + :erlang.not(value) end @doc """ @@ -934,7 +1178,7 @@ defmodule Kernel do @doc """ Returns `true` if the two items are equal. - This operator considers 1 and 1.0 to be equal. For match + This operator considers 1 and 1.0 to be equal. For stricter semantics, use `===` instead. All terms in Elixir can be compared with each other. @@ -980,11 +1224,12 @@ defmodule Kernel do end @doc """ - Returns `true` if the two items are match. + Returns `true` if the two items are exactly equal. - This operator gives the same semantics as the one existing in - pattern matching, i.e., `1` and `1.0` are equal, but they do - not match. + The items are only considered to be exactly equal if they + have the same value and are of the same type. For example, + `1 == 1.0` returns true, but since they are of different + types, `1 === 1.0` returns false. All terms in Elixir can be compared with each other. @@ -1005,7 +1250,7 @@ defmodule Kernel do end @doc """ - Returns `true` if the two items do not match. + Returns `true` if the two items are not exactly equal. All terms in Elixir can be compared with each other. @@ -1026,15 +1271,23 @@ defmodule Kernel do end @doc """ - Get the element at the zero-based `index` in `tuple`. + Gets the element at the zero-based `index` in `tuple`. + + It raises `ArgumentError` when index is negative or it is out of range of the tuple elements. Allowed in guard tests. Inlined by the compiler. - ## Example + ## Examples - iex> tuple = {:foo, :bar, 3} - iex> elem(tuple, 1) - :bar + tuple = {:foo, :bar, 3} + elem(tuple, 1) + #=> :bar + + elem({}, 0) + #=> ** (ArgumentError) argument error + + elem({:foo, :bar}, 2) + #=> ** (ArgumentError) argument error """ @spec elem(tuple, non_neg_integer) :: term @@ -1043,11 +1296,11 @@ defmodule Kernel do end @doc """ - Puts the element in `tuple` at the zero-based `index` to the given `value`. + Inserts `value` at the given zero-based `index` in `tuple`. Inlined by the compiler. - ## Example + ## Examples iex> tuple = {:foo, :bar, 3} iex> put_elem(tuple, 0, :baz) @@ -1062,8 +1315,14 @@ defmodule Kernel do ## Implemented in Elixir @doc """ - Boolean or. Requires only the first argument to be a - boolean since it short-circuits. + Boolean or. + + If the first argument is `true`, `true` is returned; otherwise, the second + argument is returned. + + Requires only the first argument to be a boolean since it short-circuits. + If the first argument is not a boolean, an `ArgumentError` exception is + raised. Allowed in guard tests. @@ -1071,15 +1330,22 @@ defmodule Kernel do iex> true or false true + iex> false or 42 + 42 """ defmacro left or right do - quote do: __op__(:orelse, unquote(left), unquote(right)) + quote do: :erlang.orelse(unquote(left), unquote(right)) end @doc """ - Boolean and. Requires only the first argument to be a - boolean since it short-circuits. + Boolean and. + + If the first argument is `false`, `false` is returned; otherwise, the second + argument is returned. + + Requires only the first argument to be a boolean since it short-circuits. If + the first argument is not a boolean, an `ArgumentError` exception is raised. Allowed in guard tests. @@ -1087,16 +1353,21 @@ defmodule Kernel do iex> true and false false + iex> true and "yay!" + "yay!" """ defmacro left and right do - quote do: __op__(:andalso, unquote(left), unquote(right)) + quote do: :erlang.andalso(unquote(left), unquote(right)) end @doc """ - Receives any argument and returns `true` if it is `false` - or `nil`. Returns `false` otherwise. Not allowed in guard - clauses. + Boolean not. + + Receives any argument (not just booleans) and returns `true` if the argument + is `false` or `nil`; returns `false` otherwise. + + Not allowed in guard clauses. ## Examples @@ -1107,20 +1378,20 @@ defmodule Kernel do true """ - defmacro !(arg) + defmacro !(value) - defmacro !({:!, _, [arg]}) do + defmacro !({:!, _, [value]}) do optimize_boolean(quote do - case unquote(arg) do + case unquote(value) do x when x in [false, nil] -> false _ -> true end end) end - defmacro !(arg) do + defmacro !(value) do optimize_boolean(quote do - case unquote(arg) do + case unquote(value) do x when x in [false, nil] -> true _ -> false end @@ -1135,23 +1406,25 @@ defmodule Kernel do iex> "foo" <> "bar" "foobar" - The `<>` operator can also be used in guard clauses as + The `<>` operator can also be used in pattern matching (and guard clauses) as long as the first part is a literal binary: iex> "foo" <> x = "foobar" iex> x "bar" + `x <> "bar" = "foobar"` would have resulted in a `CompileError` exception. + """ defmacro left <> right do concats = extract_concatenations({:<>, [], [left, right]}) - quote do: << unquote_splicing(concats) >> + quote do: <> end # Extracts concatenations in order to optimize many # concatenations into one single clause. defp extract_concatenations({:<>, _, [left, right]}) do - [wrap_concatenation(left)|extract_concatenations(right)] + [wrap_concatenation(left) | extract_concatenations(right)] end defp extract_concatenations(other) do @@ -1169,16 +1442,21 @@ defmodule Kernel do @doc """ Raises an exception. - If the argument is a binary, it raises `RuntimeError` + If the argument `msg` is a binary, it raises a `RuntimeError` exception using the given argument as message. - If an atom, it will become a call to `raise(atom, [])`. + If `msg` is an atom, it just calls `raise/2` with the atom as the first + argument and `[]` as the second argument. - If anything else, it will just raise the given exception. + If `msg` is an exception struct, it is raised as is. + + If `msg` is anything else, `raise` will fail with an `ArgumentError` + exception. ## Examples - raise "Given values do not match" + iex> raise "oops" + ** (RuntimeError) oops try do 1 + :foo @@ -1189,22 +1467,22 @@ defmodule Kernel do end """ - defmacro raise(msg) do + defmacro raise(message) do # Try to figure out the type at compilation time - # to avoid dead code and make dialyzer happy. - msg = case not is_binary(msg) and bootstraped?(Macro) do - true -> Macro.expand(msg, __CALLER__) - false -> msg + # to avoid dead code and make Dialyzer happy. + message = case not is_binary(message) and bootstrapped?(Macro) do + true -> Macro.expand(message, __CALLER__) + false -> message end - case msg do - msg when is_binary(msg) -> + case message do + message when is_binary(message) -> quote do - :erlang.error RuntimeError.exception(unquote(msg)) + :erlang.error RuntimeError.exception(unquote(message)) end - {:<<>>, _, _} = msg -> + {:<<>>, _, _} = message -> quote do - :erlang.error RuntimeError.exception(unquote(msg)) + :erlang.error RuntimeError.exception(unquote(message)) end alias when is_atom(alias) -> quote do @@ -1212,14 +1490,7 @@ defmodule Kernel do end _ -> quote do - case unquote(msg) do - msg when is_binary(msg) -> - :erlang.error RuntimeError.exception(msg) - atom when is_atom(atom) -> - :erlang.error atom.exception([]) - %{__struct__: struct, __exception__: true} = other when is_atom(struct) -> - :erlang.error other - end + :erlang.error Kernel.Utils.raise(unquote(message)) end end end @@ -1227,12 +1498,13 @@ defmodule Kernel do @doc """ Raises an exception. - Calls `.exception` on the given argument passing - the attributes in order to retrieve the appropriate exception - structure. + Calls the `exception/1` function on the given argument (which has to be a + module name like `ArgumentError` or `RuntimeError`) passing `attrs` as the + attributes in order to retrieve the exception struct. - Any module defined via `defexception/1` automatically - implements `exception(attrs)` callback expected by `raise/2`. + Any module that contains a call to the `defexception/1` macro automatically + implements the `c:Exception.exception/1` callback expected by `raise/2`. + For more information, see `defexception/1`. ## Examples @@ -1240,9 +1512,9 @@ defmodule Kernel do ** (ArgumentError) Sample """ - defmacro raise(exception, attrs) do + defmacro raise(exception, attributes) do quote do - :erlang.error unquote(exception).exception(unquote(attrs)) + :erlang.error unquote(exception).exception(unquote(attributes)) end end @@ -1251,52 +1523,55 @@ defmodule Kernel do Works like `raise/1` but does not generate a new stacktrace. - Notice that `System.stacktrace` returns the stacktrace + Notice that `System.stacktrace/0` returns the stacktrace of the last exception. That said, it is common to assign the stacktrace as the first expression inside a `rescue` clause as any other exception potentially raised (and - rescued) in between the rescue clause and the raise call - may change the `System.stacktrace` value. + rescued) between the rescue clause and the raise call + may change the `System.stacktrace/0` value. ## Examples try do - raise "Oops" + raise "oops" rescue exception -> stacktrace = System.stacktrace - if Exception.message(exception) == "Oops" do + if Exception.message(exception) == "oops" do reraise exception, stacktrace end end """ - defmacro reraise(msg, stacktrace) do + defmacro reraise(message, stacktrace) do # Try to figure out the type at compilation time - # to avoid dead code and make dialyzer happy. + # to avoid dead code and make Dialyzer happy. - case Macro.expand(msg, __CALLER__) do - msg when is_binary(msg) -> + case Macro.expand(message, __CALLER__) do + message when is_binary(message) -> quote do - :erlang.raise :error, RuntimeError.exception(unquote(msg)), unquote(stacktrace) + :erlang.raise :error, RuntimeError.exception(unquote(message)), unquote(stacktrace) end - {:<<>>, _, _} = msg -> + {:<<>>, _, _} = message -> quote do - :erlang.raise :error, RuntimeError.exception(unquote(msg)), unquote(stacktrace) + :erlang.raise :error, RuntimeError.exception(unquote(message)), unquote(stacktrace) end alias when is_atom(alias) -> quote do :erlang.raise :error, unquote(alias).exception([]), unquote(stacktrace) end - msg -> + message -> quote do stacktrace = unquote(stacktrace) - case unquote(msg) do - msg when is_binary(msg) -> - :erlang.raise :error, RuntimeError.exception(msg), stacktrace + case unquote(message) do + message when is_binary(message) -> + :erlang.raise :error, RuntimeError.exception(message), stacktrace atom when is_atom(atom) -> :erlang.raise :error, atom.exception([]), stacktrace %{__struct__: struct, __exception__: true} = other when is_atom(struct) -> :erlang.raise :error, other, stacktrace + other -> + message = "reraise/2 expects a module name, string or exception as the first argument, got: #{inspect other}" + :erlang.error ArgumentError.exception(message) end end end @@ -1305,29 +1580,31 @@ defmodule Kernel do @doc """ Raises an exception preserving a previous stacktrace. - Works like `raise/2` but does not generate a new stacktrace. - - See `reraise/2` for more details. + `reraise/3` works like `reraise/2`, except it passes arguments to the + `exception/1` function as explained in `raise/2`. ## Examples try do - raise "Oops" + raise "oops" rescue exception -> stacktrace = System.stacktrace reraise WrapperError, [exception: exception], stacktrace end + """ - defmacro reraise(exception, attrs, stacktrace) do + defmacro reraise(exception, attributes, stacktrace) do quote do - :erlang.raise :error, unquote(exception).exception(unquote(attrs)), unquote(stacktrace) + :erlang.raise :error, unquote(exception).exception(unquote(attributes)), unquote(stacktrace) end end @doc """ Matches the term on the left against the regular expression or string on the - right. Returns true if `left` matches `right` (if it's a regular expression) + right. + + Returns `true` if `left` matches `right` (if it's a regular expression) or contains `right` (if it's a string). ## Examples @@ -1344,7 +1621,13 @@ defmodule Kernel do iex> "abcd" =~ "ad" false + iex> "abcd" =~ "" + true + """ + @spec (String.t =~ (String.t | Regex.t)) :: boolean + def left =~ "" when is_binary(left), do: true + def left =~ right when is_binary(left) and is_binary(right) do :binary.match(left, right) != :nomatch end @@ -1354,8 +1637,8 @@ defmodule Kernel do end @doc ~S""" - Inspect the given argument according to the `Inspect` protocol. - The second argument is a keywords list with options to control + Inspects the given argument according to the `Inspect` protocol. + The second argument is a keyword list with options to control inspection. ## Options @@ -1372,16 +1655,31 @@ defmodule Kernel do iex> inspect [1, 2, 3, 4, 5], limit: 3 "[1, 2, 3, ...]" - iex> inspect("josé" <> <<0>>) - "<<106, 111, 115, 195, 169, 0>>" + iex> inspect [1, 2, 3], pretty: true, width: 0 + "[1,\n 2,\n 3]" + + iex> inspect("olá" <> <<0>>) + "<<111, 108, 195, 161, 0>>" + + iex> inspect("olá" <> <<0>>, binaries: :as_strings) + "\"olá\\0\"" - iex> inspect("josé" <> <<0>>, binaries: :as_strings) - "\"josé\\000\"" + iex> inspect("olá", binaries: :as_binaries) + "<<111, 108, 195, 161>>" - iex> inspect("josé", binaries: :as_binaries) - "<<106, 111, 115, 195, 169>>" + iex> inspect('bar') + "'bar'" - Note that the inspect protocol does not necessarily return a valid + iex> inspect([0 | 'bar']) + "[0, 98, 97, 114]" + + iex> inspect(100, base: :octal) + "0o144" + + iex> inspect(100, base: :hex) + "0x64" + + Note that the `Inspect` protocol does not necessarily return a valid representation of an Elixir term. In such cases, the inspected result must start with `#`. For example, inspecting a function will return: @@ -1390,110 +1688,192 @@ defmodule Kernel do """ @spec inspect(Inspect.t, Keyword.t) :: String.t - def inspect(arg, opts \\ []) when is_list(opts) do - opts = struct(Inspect.Opts, opts) + def inspect(term, opts \\ []) when is_list(opts) do + opts = struct(Inspect.Opts, opts) limit = case opts.pretty do true -> opts.width false -> :infinity end - Inspect.Algebra.pretty(Inspect.Algebra.to_doc(arg, opts), limit) + IO.iodata_to_binary( + Inspect.Algebra.format(Inspect.Algebra.to_doc(term, opts), limit) + ) end @doc """ Creates and updates structs. - The struct argument may be an atom (which defines `defstruct`) - or a struct itself. The second argument is any Enumerable that - emits two-item tuples (key-value) during enumeration. + The `struct` argument may be an atom (which defines `defstruct`) + or a `struct` itself. The second argument is any `Enumerable` that + emits two-element tuples (key-value pairs) during enumeration. - If one of the keys in the Enumerable does not exist in the struct, - they are automatically discarded. + Keys in the `Enumerable` that don't exist in the struct are automatically + discarded. Note that keys must be atoms, as only atoms are allowed when + defining a struct. - This function is useful for dynamically creating and updating - structs. + This function is useful for dynamically creating and updating structs, as + well as for converting maps to structs; in the latter case, just inserting + the appropriate `:__struct__` field into the map may not be enough and + `struct/2` should be used instead. - ## Example + ## Examples defmodule User do - defstruct name: "jose" + defstruct name: "john" end struct(User) - #=> %User{name: "jose"} + #=> %User{name: "john"} - opts = [name: "eric"] + opts = [name: "meg"] user = struct(User, opts) - #=> %User{name: "eric"} + #=> %User{name: "meg"} struct(user, unknown: "value") - #=> %User{name: "eric"} + #=> %User{name: "meg"} - """ - @spec struct(module | map, Enum.t) :: map - def struct(struct, kv \\ []) + struct(User, %{name: "meg"}) + #=> %User{name: "meg"} - def struct(struct, []) when is_atom(struct) or is_tuple(struct) do - apply(struct, :__struct__, []) + # String keys are ignored + struct(User, %{"name" => "meg"}) + #=> %User{name: "john"} + + """ + @spec struct(module | struct, Enum.t) :: struct + def struct(struct, fields \\ []) do + struct(struct, fields, fn {key, val}, acc -> + case Map.has_key?(acc, key) and key != :__struct__ do + true -> Map.put(acc, key, val) + false -> acc + end + end) end - def struct(struct, kv) when is_atom(struct) or is_tuple(struct) do - struct(apply(struct, :__struct__, []), kv) + @doc """ + Similar to `struct/2` but checks for key validity. + + The function `struct!/2` emulates the compile time behaviour + of structs. This means that: + + * when building a struct, as in `struct!(SomeStruct, key: :value)`, + it is equivalent to `%SomeStruct{key: :value}` and therefore this + function will check if every given key-value belongs to the struct. + If the struct is enforcing any key via `@enforce_keys`, those will + be enforced as well; + + * when updating a struct, as in `struct!(%SomeStruct{}, key: :value)`, + it is equivalent to `%SomeStruct{struct | key: :value}` and therefore this + function will check if every given key-value belongs to the struct. + However, updating structs does not enforce keys, as keys are enforced + only when building; + + """ + @spec struct!(module | struct, Enum.t) :: struct | no_return + def struct!(struct, fields \\ []) + + def struct!(struct, fields) when is_atom(struct) do + struct.__struct__(fields) end - def struct(%{__struct__: _} = struct, kv) do - Enum.reduce(kv, struct, fn {k, v}, acc -> - case :maps.is_key(k, acc) and k != :__struct__ do - true -> :maps.put(k, v, acc) - false -> acc - end + def struct!(struct, fields) when is_map(struct) do + struct(struct, fields, fn + {:__struct__, _}, acc -> acc + {key, val}, acc -> + Map.replace!(acc, key, val) end) end + defp struct(struct, [], _fun) when is_atom(struct) do + struct.__struct__() + end + + defp struct(struct, fields, fun) when is_atom(struct) do + struct(struct.__struct__(), fields, fun) + end + + defp struct(%{__struct__: _} = struct, [], _fun) do + struct + end + + defp struct(%{__struct__: _} = struct, fields, fun) do + Enum.reduce(fields, struct, fun) + end + @doc """ Gets a value from a nested structure. - Uses the `Access` protocol to traverse the structures - according to the given `keys`. + Uses the `Access` module to traverse the structures + according to the given `keys`, unless the `key` is a + function. + + If a key is a function, the function will be invoked + passing three arguments, the operation (`:get`), the + data to be accessed, and a function to be invoked next. + + This means `get_in/2` can be extended to provide + custom lookups. The downside is that functions cannot be + stored as keys in the accessed data structures. ## Examples - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> get_in(users, ["josé", :age]) + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> get_in(users, ["john", :age]) 27 In case any of entries in the middle returns `nil`, `nil` will be returned - as per the Access protocol: + as per the Access module: - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} iex> get_in(users, ["unknown", :age]) nil + When one of the keys is a function, the function is invoked. + In the example below, we use a function to get all the maps + inside a list: + + iex> users = [%{name: "john", age: 27}, %{name: "meg", age: 23}] + iex> all = fn :get, data, next -> Enum.map(data, next) end + iex> get_in(users, [all, :age]) + [27, 23] + + If the previous value before invoking the function is `nil`, + the function *will* receive `nil` as a value and must handle it + accordingly. """ @spec get_in(Access.t, nonempty_list(term)) :: term def get_in(data, keys) - def get_in(nil, list) when is_list(list), do: nil - def get_in(data, [h]), do: Access.get(data, h) - def get_in(data, [h|t]), do: get_in(Access.get(data, h), t) + + def get_in(data, [h]) when is_function(h), + do: h.(:get, data, &(&1)) + def get_in(data, [h | t]) when is_function(h), + do: h.(:get, data, &get_in(&1, t)) + + def get_in(nil, [_]), + do: nil + def get_in(nil, [_ | t]), + do: get_in(nil, t) + + def get_in(data, [h]), + do: Access.get(data, h) + def get_in(data, [h | t]), + do: get_in(Access.get(data, h), t) @doc """ Puts a value in a nested structure. - Uses the `Access` protocol to traverse the structures - according to the given `keys`. + Uses the `Access` module to traverse the structures + according to the given `keys`, unless the `key` is a + function. If the key is a function, it will be invoked + as specified in `get_and_update_in/3`. ## Examples - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> put_in(users, ["josé", :age], 28) - %{"josé" => %{age: 28}, "eric" => %{age: 23}} - - In case any of entries in the middle returns `nil`, a map is dynamically - created: - - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> put_in(users, ["dave", :age], 13) - %{"josé" => %{age: 27}, "eric" => %{age: 23}, "dave" => %{age: 13}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> put_in(users, ["john", :age], 28) + %{"john" => %{age: 28}, "meg" => %{age: 23}} + In case any of entries in the middle returns `nil`, + an error will be raised when trying to access it next. """ @spec put_in(Access.t, nonempty_list(term), term) :: Access.t def put_in(data, keys, value) do @@ -1503,34 +1883,49 @@ defmodule Kernel do @doc """ Updates a key in a nested structure. - Uses the `Access` protocol to traverse the structures - according to the given `keys`. + Uses the `Access` module to traverse the structures + according to the given `keys`, unless the `key` is a + function. If the key is a function, it will be invoked + as specified in `get_and_update_in/3`. ## Examples - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> update_in(users, ["josé", :age], &(&1 + 1)) - %{"josé" => %{age: 28}, "eric" => %{age: 23}} - - In case any of entries in the middle returns `nil`, a map is dynamically - created: - - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> update_in(users, ["dave", :age], &((&1 || 0) + 1)) - %{"josé" => %{age: 27}, "eric" => %{age: 23}, "dave" => %{age: 1}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> update_in(users, ["john", :age], &(&1 + 1)) + %{"john" => %{age: 28}, "meg" => %{age: 23}} + In case any of entries in the middle returns `nil`, + an error will be raised when trying to access it next. """ @spec update_in(Access.t, nonempty_list(term), (term -> term)) :: Access.t - def update_in(data, keys, fun) do + def update_in(data, keys, fun) when is_function(fun, 1) do elem(get_and_update_in(data, keys, fn x -> {nil, fun.(x)} end), 1) end @doc """ Gets a value and updates a nested structure. - It expects a tuple to be returned, containing the value retrieved - and the update one. Uses the `Access` protocol to traverse the - structures according to the given `keys`. + `data` is a nested structure (ie. a map, keyword + list, or struct that implements the `Access` behaviour). + + The `fun` argument receives the value of `key` (or `nil` if `key` + is not present) and must return a two-element tuple: the "get" value + (the retrieved value, which can be operated on before being returned) + and the new value to be stored under `key`. The `fun` may also + return `:pop`, implying the current value shall be removed + from the structure and returned. + + It uses the `Access` module to traverse the structures + according to the given `keys`, unless the `key` is a + function. + + If a key is a function, the function will be invoked + passing three arguments, the operation (`:get_and_update`), + the data to be accessed, and a function to be invoked next. + + This means `get_and_update_in/3` can be extended to provide + custom lookups. The downside is that functions cannot be stored + as keys in the accessed data structures. ## Examples @@ -1539,28 +1934,85 @@ defmodule Kernel do update it at the same time. For example, it could be used to increase the age of a user by one and return the previous age in one pass: - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> get_and_update_in(users, ["josé", :age], &{&1, &1 + 1}) - {27, %{"josé" => %{age: 28}, "eric" => %{age: 23}}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> get_and_update_in(users, ["john", :age], &{&1, &1 + 1}) + {27, %{"john" => %{age: 28}, "meg" => %{age: 23}}} - In case any of entries in the middle returns `nil`, a map is dynamically - created: + When one of the keys is a function, the function is invoked. + In the example below, we use a function to get and increment all + ages inside a list: + + iex> users = [%{name: "john", age: 27}, %{name: "meg", age: 23}] + iex> all = fn :get_and_update, data, next -> + ...> Enum.map(data, next) |> :lists.unzip + ...> end + iex> get_and_update_in(users, [all, :age], &{&1, &1 + 1}) + {[27, 23], [%{name: "john", age: 28}, %{name: "meg", age: 24}]} - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> get_and_update_in(users, ["dave", :age], &{&1, 13}) - {nil, %{"josé" => %{age: 27}, "eric" => %{age: 23}, "dave" => %{age: 13}}} + If the previous value before invoking the function is `nil`, + the function *will* receive `nil` as a value and must handle it + accordingly (be it by failing or providing a sane default). + The `Access` module ships with many convenience accessor functions, + like the `all` anonymous function defined above. See `Access.all/0`, + `Access.key/2` and others as examples. """ - @spec get_and_update_in(Access.t, nonempty_list(term), - (term -> {get, term})) :: {get, Access.t} when get: var + @spec get_and_update_in(structure :: Access.t, keys, (term -> {get_value, update_value} | :pop)) :: + {get_value, structure :: Access.t} when keys: nonempty_list(any), get_value: var, update_value: term def get_and_update_in(data, keys, fun) - def get_and_update_in(nil, list, fun), do: get_and_update_in(%{}, list, fun) - def get_and_update_in(data, [h], fun), do: Access.get_and_update(data, h, fun) - def get_and_update_in(data, [h|t], fun) do - Access.get_and_update(data, h, &get_and_update_in(&1, t, fun)) + def get_and_update_in(data, [head], fun) when is_function(head, 3), + do: head.(:get_and_update, data, fun) + + def get_and_update_in(data, [head | tail], fun) when is_function(head, 3), + do: head.(:get_and_update, data, &get_and_update_in(&1, tail, fun)) + + def get_and_update_in(data, [head], fun) when is_function(fun, 1), + do: Access.get_and_update(data, head, fun) + + def get_and_update_in(data, [head | tail], fun) when is_function(fun, 1), + do: Access.get_and_update(data, head, &get_and_update_in(&1, tail, fun)) + + @doc """ + Pops a key from the given nested structure. + + Uses the `Access` protocol to traverse the structures + according to the given `keys`, unless the `key` is a + function. If the key is a function, it will be invoked + as specified in `get_and_update_in/3`. + + ## Examples + + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> pop_in(users, ["john", :age]) + {27, %{"john" => %{}, "meg" => %{age: 23}}} + + In case any entry returns `nil`, its key will be removed + and the deletion will be considered a success. + """ + @spec pop_in(data, nonempty_list(Access.get_and_update_fun(term, data) | term)) :: + {term, data} when data: Access.container + def pop_in(data, keys) + + def pop_in(nil, [key | _]) do + raise ArgumentError, "could not pop key #{inspect key} on a nil value" + end + + def pop_in(data, keys) when is_list(keys) do + pop_in_data(data, keys) end + defp pop_in_data(nil, [_ | _]), + do: :pop + defp pop_in_data(data, [fun]) when is_function(fun), + do: fun.(:get_and_update, data, fn _ -> :pop end) + defp pop_in_data(data, [fun | tail]) when is_function(fun), + do: fun.(:get_and_update, data, &pop_in_data(&1, tail)) + defp pop_in_data(data, [key]), + do: Access.pop(data, key) + defp pop_in_data(data, [key | tail]), + do: Access.get_and_update(data, key, &pop_in_data(&1, tail)) + @doc """ Puts a value in a nested structure via the given `path`. @@ -1579,19 +2031,57 @@ defmodule Kernel do ## Examples - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> put_in(users["josé"][:age], 28) - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> put_in(users["john"][:age], 28) + %{"john" => %{age: 28}, "meg" => %{age: 23}} - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> put_in(users["josé"].age, 28) - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> put_in(users["john"].age, 28) + %{"john" => %{age: 28}, "meg" => %{age: 23}} """ defmacro put_in(path, value) do - [h|t] = unnest(path, [], "put_in/2") - expr = nest_get_and_update_in(h, t, quote(do: fn _ -> {nil, unquote(value)} end)) - quote do: :erlang.element(2, unquote(expr)) + case unnest(path, [], true, "put_in/2") do + {[h | t], true} -> + nest_update_in(h, t, quote(do: fn _ -> unquote(value) end)) + {[h | t], false} -> + expr = nest_get_and_update_in(h, t, quote(do: fn _ -> {nil, unquote(value)} end)) + quote do: :erlang.element(2, unquote(expr)) + end + end + + @doc """ + Pops a key from the nested structure via the given `path`. + + This is similar to `pop_in/2`, except the path is extracted via + a macro rather than passing a list. For example: + + pop_in(opts[:foo][:bar]) + + Is equivalent to: + + pop_in(opts, [:foo, :bar]) + + Note that in order for this macro to work, the complete path must always + be visible by this macro. For more information about the supported path + expressions, please check `get_and_update_in/2` docs. + + ## Examples + + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> pop_in(users["john"][:age]) + {27, %{"john" => %{}, "meg" => %{age: 23}}} + + iex> users = %{john: %{age: 27}, meg: %{age: 23}} + iex> pop_in(users.john[:age]) + {27, %{john: %{}, meg: %{age: 23}}} + + In case any entry returns `nil`, its key will be removed + and the deletion will be considered a success. + """ + defmacro pop_in(path) do + {[h | t], _} = unnest(path, [], true, "pop_in/1") + nest_pop_in(:map, h, t) end @doc """ @@ -1612,19 +2102,23 @@ defmodule Kernel do ## Examples - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> update_in(users["josé"][:age], &(&1 + 1)) - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> update_in(users["john"][:age], &(&1 + 1)) + %{"john" => %{age: 28}, "meg" => %{age: 23}} - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> update_in(users["josé"].age, &(&1 + 1)) - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> update_in(users["john"].age, &(&1 + 1)) + %{"john" => %{age: 28}, "meg" => %{age: 23}} """ defmacro update_in(path, fun) do - [h|t] = unnest(path, [], "update_in/2") - expr = nest_get_and_update_in(h, t, quote(do: fn x -> {nil, unquote(fun).(x)} end)) - quote do: :erlang.element(2, unquote(expr)) + case unnest(path, [], true, "update_in/2") do + {[h | t], true} -> + nest_update_in(h, t, fun) + {[h | t], false} -> + expr = nest_get_and_update_in(h, t, quote(do: fn x -> {nil, unquote(fun).(x)} end)) + quote do: :erlang.element(2, unquote(expr)) + end end @doc """ @@ -1644,93 +2138,144 @@ defmodule Kernel do ## Examples - iex> users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - iex> get_and_update_in(users["josé"][:age], &{&1, &1 + 1}) - {27, %{"josé" => %{age: 28}, "eric" => %{age: 23}}} + iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + iex> get_and_update_in(users["john"].age, &{&1, &1 + 1}) + {27, %{"john" => %{age: 28}, "meg" => %{age: 23}}} ## Paths A path may start with a variable, local or remote call, and must be followed by one or more: - * `foo[bar]` - access a field; in case an intermediate field is not - present or returns nil, an empty map is used + * `foo[bar]` - accesses the key `bar` in `foo`; in case `foo` is nil, + `nil` is returned - * `foo.bar` - access a map/struct field; in case the field is not + * `foo.bar` - accesses a map/struct field; in case the field is not present, an error is raised Here are some valid paths: - users["josé"][:age] - users["josé"].age - User.all["josé"].age - all_users()["josé"].age + users["john"][:age] + users["john"].age + User.all["john"].age + all_users()["john"].age Here are some invalid ones: # Does a remote call after the initial value - users["josé"].do_something(arg1, arg2) + users["john"].do_something(arg1, arg2) - # Does not access any field + # Does not access any key or field users """ defmacro get_and_update_in(path, fun) do - [h|t] = unnest(path, [], "get_and_update_in/2") + {[h | t], _} = unnest(path, [], true, "get_and_update_in/2") nest_get_and_update_in(h, t, fun) end + defp nest_update_in([], fun), do: fun + defp nest_update_in(list, fun) do + quote do + fn x -> unquote(nest_update_in(quote(do: x), list, fun)) end + end + end + defp nest_update_in(h, [{:map, key} | t], fun) do + quote do + Map.update!(unquote(h), unquote(key), unquote(nest_update_in(t, fun))) + end + end + defp nest_get_and_update_in([], fun), do: fun defp nest_get_and_update_in(list, fun) do quote do fn x -> unquote(nest_get_and_update_in(quote(do: x), list, fun)) end end end - - defp nest_get_and_update_in(h, [{:access, key}|t], fun) do + defp nest_get_and_update_in(h, [{:access, key} | t], fun) do quote do Access.get_and_update( - case(unquote(h), do: (nil -> %{}; o -> o)), + unquote(h), unquote(key), unquote(nest_get_and_update_in(t, fun)) ) end end + defp nest_get_and_update_in(h, [{:map, key} | t], fun) do + quote do + Map.get_and_update!(unquote(h), unquote(key), unquote(nest_get_and_update_in(t, fun))) + end + end + + defp nest_pop_in(kind, list) do + quote do + fn x -> unquote(nest_pop_in(kind, quote(do: x), list)) end + end + end + + defp nest_pop_in(:map, h, [{:access, key}]) do + quote do + case unquote(h) do + nil -> {nil, nil} + h -> Access.pop(h, unquote(key)) + end + end + end + + defp nest_pop_in(_, _, [{:map, key}]) do + raise ArgumentError, "cannot use pop_in when the last segment is a map/struct field. " <> + "This would effectively remove the field #{inspect key} from the map/struct" + end + defp nest_pop_in(_, h, [{:map, key} | t]) do + quote do + Map.get_and_update!(unquote(h), unquote(key), unquote(nest_pop_in(:map, t))) + end + end - defp nest_get_and_update_in(h, [{:map, key}|t], fun) do + defp nest_pop_in(_, h, [{:access, key}]) do + quote do + case unquote(h) do + nil -> :pop + h -> Access.pop(h, unquote(key)) + end + end + end + defp nest_pop_in(_, h, [{:access, key} | t]) do quote do - Access.Map.get_and_update!(unquote(h), unquote(key), unquote(nest_get_and_update_in(t, fun))) + Access.get_and_update(unquote(h), unquote(key), unquote(nest_pop_in(:access, t))) end end - defp unnest({{:., _, [Access, :get]}, _, [expr, key]}, acc, kind) do - unnest(expr, [{:access, key}|acc], kind) + defp unnest({{:., _, [Access, :get]}, _, [expr, key]}, acc, _all_map?, kind) do + unnest(expr, [{:access, key} | acc], false, kind) end - defp unnest({{:., _, [expr, key]}, _, []}, acc, kind) - when is_tuple(expr) and elem(expr, 0) != :__aliases__ and elem(expr, 0) != :__MODULE__ do - unnest(expr, [{:map, key}|acc], kind) + defp unnest({{:., _, [expr, key]}, _, []}, acc, all_map?, kind) + when is_tuple(expr) and + :erlang.element(1, expr) != :__aliases__ and + :erlang.element(1, expr) != :__MODULE__ do + unnest(expr, [{:map, key} | acc], all_map?, kind) end - defp unnest(other, [], kind) do + defp unnest(other, [], _all_map?, kind) do raise ArgumentError, - "expected expression given to #{kind} to access at least one field, got: #{Macro.to_string other}" + "expected expression given to #{kind} to access at least one element, got: #{Macro.to_string other}" end - defp unnest(other, acc, kind) do + defp unnest(other, acc, all_map?, kind) do case proper_start?(other) do - true -> [other|acc] + true -> {[other | acc], all_map?} false -> raise ArgumentError, "expression given to #{kind} must start with a variable, local or remote call " <> - "and be followed by field access, got: #{Macro.to_string other}" + "and be followed by an element access, got: #{Macro.to_string other}" end end defp proper_start?({{:., _, [expr, _]}, _, _args}) when is_atom(expr) - when elem(expr, 0) == :__aliases__ - when elem(expr, 0) == :__MODULE__, do: true + when :erlang.element(1, expr) == :__aliases__ + when :erlang.element(1, expr) == :__MODULE__, do: true defp proper_start?({atom, _, _args}) when is_atom(atom), do: true @@ -1750,46 +2295,44 @@ defmodule Kernel do "foo" """ - # If it is a binary at compilation time, simply return it. - defmacro to_string(arg) when is_binary(arg), do: arg - - defmacro to_string(arg) do - quote do: String.Chars.to_string(unquote(arg)) + defmacro to_string(term) do + quote do: String.Chars.to_string(unquote(term)) end @doc """ - Convert the argument to a list according to the List.Chars protocol. + Converts the given term to a charlist according to the `List.Chars` protocol. ## Examples - iex> to_char_list(:foo) + iex> to_charlist(:foo) 'foo' """ - defmacro to_char_list(arg) do - quote do: List.Chars.to_char_list(unquote(arg)) + defmacro to_charlist(term) do + quote do: List.Chars.to_charlist(unquote(term)) end @doc """ - Checks if the given argument is nil or not. + Returns `true` if `term` is `nil`, `false` otherwise. + Allowed in guard clauses. ## Examples - iex> nil?(1) + iex> is_nil(1) false - iex> nil?(nil) + iex> is_nil(nil) true """ - defmacro nil?(x) do - quote do: unquote(x) == nil + defmacro is_nil(term) do + quote do: unquote(term) == nil end @doc """ - A convenient macro that checks if the right side matches - the left side. The left side is allowed to be a match pattern. + A convenience macro that checks if the right side (an expression) matches the + left side (a pattern). ## Examples @@ -1802,37 +2345,40 @@ defmodule Kernel do iex> match?({1, _}, {1, 2}) true - Match can also be used to filter or find a value in an enumerable: + iex> map = %{a: 1, b: 2} + iex> match?(%{a: _}, map) + true - list = [{:a, 1}, {:b, 2}, {:a, 3}] - Enum.filter list, &match?({:a, _}, &1) + iex> a = 1 + iex> match?(^a, 1) + true + + `match?/2` is very useful when filtering of finding a value in an enumerable: + + list = [{:a, 1}, {:b, 2}, {:a, 3}] + Enum.filter list, &match?({:a, _}, &1) + #=> [{:a, 1}, {:a, 3}] Guard clauses can also be given to the match: list = [{:a, 1}, {:b, 2}, {:a, 3}] Enum.filter list, &match?({:a, x} when x < 2, &1) + #=> [{:a, 1}] However, variables assigned in the match will not be available - outside of the function call: - - iex> match?(x, 1) - true + outside of the function call (unlike regular pattern matching with the `=` + operator): - iex> binding([:x]) == [] + iex> match?(_x, 1) true + iex> binding() + [] """ - defmacro match?(pattern, expr) - - # Special case underscore since it always matches - defmacro match?({:_, _, atom}, _right) when is_atom(atom) do - true - end - - defmacro match?(left, right) do + defmacro match?(pattern, expr) do quote do - case unquote(right) do - unquote(left) -> + case unquote(expr) do + unquote(pattern) -> true _ -> false @@ -1841,28 +2387,28 @@ defmodule Kernel do end @doc """ - Read and write attributes of th current module. + Reads and writes attributes of the current module. The canonical example for attributes is annotating that a module - implements the OTP behaviour called `gen_server`: + implements an OTP behaviour, such as `GenServer`: defmodule MyServer do - @behaviour :gen_server + @behaviour GenServer # ... callbacks ... end - By default Elixir supports all Erlang module attributes, but any developer - can also add custom attributes: + By default Elixir supports all the module attributes supported by Erlang, but + custom attributes can be used as well: defmodule MyServer do @my_data 13 IO.inspect @my_data #=> 13 end - Unlike Erlang, such attributes are not stored in the module by - default since it is common in Elixir to use such attributes to store - temporary data. A developer can configure an attribute to behave closer - to Erlang by calling `Module.register_attribute/3`. + Unlike Erlang, such attributes are not stored in the module by default since + it is common in Elixir to use custom attributes to store temporary data that + will be available at compile-time. Custom attributes may be configured to + behave closer to Erlang by using `Module.register_attribute/3`. Finally, notice that attributes can also be read inside functions: @@ -1878,94 +2424,121 @@ defmodule Kernel do It is important to note that reading an attribute takes a snapshot of its current value. In other words, the value is read at compilation - time and not at runtime. Check the module `Module` for other functions + time and not at runtime. Check the `Module` module for other functions to manipulate module attributes. """ defmacro @(expr) - # Typespecs attributes are special cased by the compiler so far - defmacro @({name, _, args}) do - # Check for Macro as it is compiled later than Module - case bootstraped?(Module) do - false -> nil - true -> - assert_module_scope(__CALLER__, :@, 1) - function? = __CALLER__.function != nil - - case is_list(args) and length(args) == 1 and typespec(name) do - false -> - case name == :typedoc and not bootstraped?(Kernel.Typespec) do - true -> nil - false -> do_at(args, name, function?, __CALLER__) - end - macro -> - case bootstraped?(Kernel.Typespec) do - false -> nil - true -> quote do: Kernel.Typespec.unquote(macro)(unquote(hd(args))) - end + defmacro @({name, meta, args}) do + assert_module_scope(__CALLER__, :@, 1) + function? = __CALLER__.function != nil + + cond do + # Check for Module as it is compiled later than Kernel + not bootstrapped?(Macro) -> + nil + + not function? and __CALLER__.context == :match -> + raise ArgumentError, "invalid write attribute syntax, you probably meant to use: @#{name} expression" + + # Typespecs attributes are currently special cased by the compiler + macro = is_list(args) and length(args) == 1 and typespec(name) -> + case bootstrapped?(Kernel.Typespec) do + false -> nil + true -> quote do: Kernel.Typespec.unquote(macro)(unquote(hd(args))) end + + true -> + do_at(args, meta, name, function?, __CALLER__) end end - # @attribute value - defp do_at([arg], name, function?, env) do - case function? do - true -> - raise ArgumentError, "cannot dynamically set attribute @#{name} inside function" - false -> - case name do - :behavior -> - :elixir_errors.warn warn_info(env_stacktrace(env)), - "@behavior attribute is not supported, please use @behaviour instead" - _ -> - :ok - end + # @attribute(value) + defp do_at([arg], meta, name, function?, env) do + line = + case :lists.keymember(:context, 1, meta) do + true -> nil + false -> env.line + end + + cond do + function? -> + raise ArgumentError, "cannot set attribute @#{name} inside function/macro" + + name == :behavior -> + :elixir_errors.warn env.line, env.file, + "@behavior attribute is not supported, please use @behaviour instead" - quote do: Module.put_attribute(__MODULE__, unquote(name), unquote(arg)) + # TODO: Remove :compile check once on 2.0 as we no longer + # need to warn on parse transforms in Module.put_attribute. + name == :compile -> + {stack, _} = :elixir_quote.escape(env_stacktrace(env), false) + quote do: Module.put_attribute(__MODULE__, unquote(name), unquote(arg), + unquote(stack), unquote(line)) + + :lists.member(name, [:moduledoc, :typedoc, :doc]) -> + {stack, _} = :elixir_quote.escape(env_stacktrace(env), false) + arg = {env.line, arg} + quote do: Module.put_attribute(__MODULE__, unquote(name), unquote(arg), + unquote(stack), unquote(line)) + + true -> + quote do: Module.put_attribute(__MODULE__, unquote(name), unquote(arg), + nil, unquote(line)) end end # @attribute or @attribute() - defp do_at(args, name, function?, env) when is_atom(args) or args == [] do + defp do_at(args, _meta, name, function?, env) when is_atom(args) or args == [] do stack = env_stacktrace(env) + doc_attr? = :lists.member(name, [:moduledoc, :typedoc, :doc]) case function? do true -> - attr = Module.get_attribute(env.module, name, stack) - :erlang.element(1, :elixir_quote.escape(attr, false)) + value = + with {_, doc} when doc_attr? <- + Module.get_attribute(env.module, name, stack), + do: doc + try do + :elixir_quote.escape(value, false) + rescue + ex in [ArgumentError] -> + raise ArgumentError, "cannot inject attribute @#{name} into function/macro because " <> Exception.message(ex) + else + {val, _} -> val + end + false -> - escaped = case stack do - [] -> [] - _ -> Macro.escape(stack) + {escaped, _} = :elixir_quote.escape(stack, false) + quote do + with {_, doc} when unquote(doc_attr?) <- + Module.get_attribute(__MODULE__, unquote(name), unquote(escaped)), + do: doc end - quote do: Module.get_attribute(__MODULE__, unquote(name), unquote(escaped)) end end # All other cases - defp do_at(args, name, _function?, _env) do + defp do_at(args, _meta, name, _function?, _env) do raise ArgumentError, "expected 0 or 1 argument for @#{name}, got: #{length(args)}" end - defp warn_info([entry|_]) do - opts = elem(entry, tuple_size(entry) - 1) - Exception.format_file_line(Keyword.get(opts, :file), Keyword.get(opts, :line)) <> " " - end + defp typespec(:type), do: :deftype + defp typespec(:typep), do: :deftypep + defp typespec(:opaque), do: :defopaque + defp typespec(:spec), do: :defspec + defp typespec(:callback), do: :defcallback + defp typespec(:macrocallback), do: :defmacrocallback + defp typespec(_), do: false - defp warn_info([]) do - "" - end + @doc """ + Returns the binding for the given context as a keyword list. - defp typespec(:type), do: :deftype - defp typespec(:typep), do: :deftypep - defp typespec(:opaque), do: :defopaque - defp typespec(:spec), do: :defspec - defp typespec(:callback), do: :defcallback - defp typespec(_), do: false + In the returned result, keys are variable names and values are the + corresponding variable values. - @doc """ - Returns the binding as a keyword list where the variable name - is the key and the variable value is the value. + If the given `context` is `nil` (by default it is), the binding for the + current context is returned. ## Examples @@ -1976,59 +2549,18 @@ defmodule Kernel do iex> binding() [x: 2] - """ - defmacro binding() do - do_binding(nil, nil, __CALLER__.vars, Macro.Env.in_match?(__CALLER__)) - end - - @doc """ - Receives a list of atoms at compilation time and returns the - binding of the given variables as a keyword list where the - variable name is the key and the variable value is the value. - - In case a variable in the list does not exist in the binding, - it is not included in the returned result. - - ## Examples - - iex> x = 1 - iex> binding([:x, :y]) - [x: 1] - - """ - defmacro binding(list) when is_list(list) do - do_binding(list, nil, __CALLER__.vars, Macro.Env.in_match?(__CALLER__)) - end - - defmacro binding(context) when is_atom(context) do - do_binding(nil, context, __CALLER__.vars, Macro.Env.in_match?(__CALLER__)) - end - - @doc """ - Receives a list of atoms at compilation time and returns the - binding of the given variables in the given context as a keyword - list where the variable name is the key and the variable value - is the value. - - In case a variable in the list does not exist in the binding, - it is not included in the returned result. - - ## Examples - - iex> var!(x, :foo) = 1 - iex> binding([:x, :y]) + iex> binding(:foo) [] - iex> binding([:x, :y], :foo) + iex> var!(x, :foo) = 1 + 1 + iex> binding(:foo) [x: 1] """ - defmacro binding(list, context) when is_list(list) and is_atom(context) do - do_binding(list, context, __CALLER__.vars, Macro.Env.in_match?(__CALLER__)) - end - - defp do_binding(list, context, vars, in_match) do - for {v, c} <- vars, c == context, list == nil or :lists.member(v, list) do - {v, wrap_binding(in_match, {v, [], c})} + defmacro binding(context \\ nil) do + in_match? = Macro.Env.in_match?(__CALLER__) + for {v, c} <- __CALLER__.vars, c == context do + {v, wrap_binding(in_match?, {v, [generated: true], c})} end end @@ -2041,15 +2573,18 @@ defmodule Kernel do end @doc """ - Provides an `if` macro. This macro expects the first argument to - be a condition and the rest are keyword arguments. + Provides an `if/2` macro. + + This macro expects the first argument to be a condition and the second + argument to be a keyword list. ## One-liner examples if(foo, do: bar) In the example above, `bar` will be returned if `foo` evaluates to - `true` (i.e. it is neither `false` nor `nil`). Otherwise, `nil` will be returned. + `true` (i.e., it is neither `false` nor `nil`). Otherwise, `nil` will be + returned. An `else` option can be given to specify the opposite: @@ -2057,15 +2592,15 @@ defmodule Kernel do ## Blocks examples - Elixir also allows you to pass a block to the `if` macro. The first + It's also possible to pass a block to the `if/2` macro. The first example above would be translated to: if foo do bar end - Notice that `do/end` becomes delimiters. The second example would - then translate to: + Note that `do/end` become delimiters. The second example would + translate to: if foo do bar @@ -2073,13 +2608,17 @@ defmodule Kernel do baz end - If you want to compare more than two clauses, you can use the `cond/1` - macro. + In order to compare more than two clauses, the `cond/1` macro has to be used. """ defmacro if(condition, clauses) do - do_clause = Keyword.get(clauses, :do, nil) - else_clause = Keyword.get(clauses, :else, nil) + build_if(condition, clauses) + end + defp build_if(condition, do: do_clause) do + build_if(condition, do: do_clause, else: nil) + end + + defp build_if(condition, do: do_clause, else: else_clause) do optimize_boolean(quote do case unquote(condition) do x when x in [false, nil] -> unquote(else_clause) @@ -2088,34 +2627,62 @@ defmodule Kernel do end) end + defp build_if(_condition, _arguments) do + raise(ArgumentError, "invalid or duplicate keys for if, only \"do\" " <> + "and an optional \"else\" are permitted") + end + @doc """ - Evaluates and returns the do-block passed in as a second argument - unless clause evaluates to true. - Returns nil otherwise. - See also `if`. + Provides an `unless` macro. + + This macro evaluates and returns the `do` block passed in as the second + argument unless `clause` evaluates to `true`. Otherwise, it returns the value + of the `else` block if present or `nil` if not. + + See also `if/2`. ## Examples iex> unless(Enum.empty?([]), do: "Hello") nil - iex> unless(Enum.empty?([1,2,3]), do: "Hello") + iex> unless(Enum.empty?([1, 2, 3]), do: "Hello") "Hello" + iex> unless Enum.sum([2, 2]) == 5 do + ...> "Math still works" + ...> else + ...> "Math is broken" + ...> end + "Math still works" + """ - defmacro unless(clause, options) do - do_clause = Keyword.get(options, :do, nil) - else_clause = Keyword.get(options, :else, nil) + defmacro unless(condition, clauses) do + build_unless(condition, clauses) + end + + defp build_unless(condition, do: do_clause) do + build_unless(condition, do: do_clause, else: nil) + end + + defp build_unless(condition, do: do_clause, else: else_clause) do quote do - if(unquote(clause), do: unquote(else_clause), else: unquote(do_clause)) + if(unquote(condition), do: unquote(else_clause), else: unquote(do_clause)) end end + defp build_unless(_condition, _arguments) do + raise(ArgumentError, "invalid or duplicate keys for unless, only \"do\" " <> + "and an optional \"else\" are permitted") + end + @doc """ - Allows you to destructure two lists, assigning each term in the right to the - matching term in the left. Unlike pattern matching via `=`, if the sizes of - the left and right lists don't match, destructuring simply stops instead of - raising an error. + Destructures two lists, assigning each term in the + right one to the matching term in the left one. + + Unlike pattern matching via `=`, if the sizes of the left + and right lists don't match, destructuring simply stops + instead of raising an error. ## Examples @@ -2123,43 +2690,38 @@ defmodule Kernel do iex> {x, y, z} {1, 2, 3} - Notice in the example above, even though the right - size has more entries than the left, destructuring works - fine. If the right size is smaller, the remaining items - are simply assigned to nil: + In the example above, even though the right list has more entries than the + left one, destructuring works fine. If the right list is smaller, the + remaining items are simply set to `nil`: iex> destructure([x, y, z], [1]) iex> {x, y, z} {1, nil, nil} - The left side supports any expression you would use - on the left side of a match: + The left-hand side supports any expression you would use + on the left-hand side of a match: x = 1 destructure([^x, y, z], [1, 2, 3]) - The example above will only work if x matches - the first value from the right side. Otherwise, - it will raise a CaseClauseError. + The example above will only work if `x` matches the first value in the right + list. Otherwise, it will raise a `MatchError` (like the `=` operator would + do). """ defmacro destructure(left, right) when is_list(left) do - Enum.reduce left, right, fn item, acc -> - {:case, meta, args} = - quote do - case unquote(acc) do - [unquote(item)|t] -> - t - other when other == [] or other == nil -> - unquote(item) = nil - end - end - {:case, [{:export_head,true}|meta], args} + quote do + unquote(left) = + Kernel.Utils.destructure(unquote(right), unquote(length(left))) end end @doc """ - Returns a range with the specified start and end. - Includes both ends. + Returns a range with the specified `first` and `last` integers. + + If last is larger than first, the range will be increasing from + first to last. If first is larger than last, the range will be + decreasing from first to last. If first is equal to last, the range + will contain one element, which is the number itself. ## Examples @@ -2176,16 +2738,38 @@ defmodule Kernel do true """ - defmacro first .. last do + defmacro first..last when is_integer(first) and is_integer(last) do {:%{}, [], [__struct__: Elixir.Range, first: first, last: last]} end + defmacro first..last + when is_float(first) or is_float(last) or + is_atom(first) or is_atom(last) or + is_binary(first) or is_binary(last) or + is_list(first) or is_list(last) do + raise ArgumentError, + "ranges (first..last) expect both sides to be integers, " <> + "got: #{Macro.to_string({:.., [], [first, last]})}" + end + + defmacro first..last do + case __CALLER__.context do + nil -> + quote do: Elixir.Range.new(unquote(first), unquote(last)) + _ -> + {:%{}, [], [__struct__: Elixir.Range, first: first, last: last]} + end + end + + @doc """ Provides a short-circuit operator that evaluates and returns - the second expression only if the first one evaluates to true - (i.e. it is not nil nor false). Returns the first expression + the second expression only if the first one evaluates to `true` + (i.e., it is neither `nil` nor `false`). Returns the first expression otherwise. + Not allowed in guard clauses. + ## Examples iex> Enum.empty?([]) && Enum.empty?([]) @@ -2200,9 +2784,8 @@ defmodule Kernel do iex> false && throw(:bad) false - Notice that, unlike Erlang's `and` operator, - this operator accepts any expression as an argument, - not only booleans, however it is not allowed in guards. + Note that, unlike `and/2`, this operator accepts any expression + as the first argument, not only booleans. """ defmacro left && right do quote do @@ -2217,8 +2800,10 @@ defmodule Kernel do @doc """ Provides a short-circuit operator that evaluates and returns the second - expression only if the first one does not evaluate to true (i.e. it - is either nil or false). Returns the first expression otherwise. + expression only if the first one does not evaluate to `true` (i.e., it + is either `nil` or `false`). Returns the first expression otherwise. + + Not allowed in guard clauses. ## Examples @@ -2234,9 +2819,8 @@ defmodule Kernel do iex> Enum.empty?([]) || throw(:bad) true - Notice that, unlike Erlang's `or` operator, - this operator accepts any expression as an argument, - not only booleans, however it is not allowed in guards. + Note that, unlike `or/2`, this operator accepts any expression + as the first argument, not only booleans. """ defmacro left || right do quote do @@ -2250,36 +2834,39 @@ defmodule Kernel do end @doc """ - `|>` is the pipe operator. + Pipe operator. - This operator introduces the expression on the left as - the first argument to the function call on the right. + This operator introduces the expression on the left-hand side as + the first argument to the function call on the right-hand side. ## Examples iex> [1, [2], 3] |> List.flatten() [1, 2, 3] - The example above is the same as calling `List.flatten([1, [2], 3])`, - i.e. the argument on the left side of `|>` is introduced as the first - argument of the function call on the right side. + The example above is the same as calling `List.flatten([1, [2], 3])`. - This pattern is mostly useful when there is a desire to execute - a bunch of operations, resembling a pipeline: + The `|>` operator is mostly useful when there is a desire to execute a series + of operations resembling a pipeline: iex> [1, [2], 3] |> List.flatten |> Enum.map(fn x -> x * 2 end) [2, 4, 6] - The example above will pass the list to `List.flatten/1`, then get - the flattened list and pass to `Enum.map/2`, which will multiply - each entry in the list per two. + In the example above, the list `[1, [2], 3]` is passed as the first argument + to the `List.flatten/1` function, then the flattened list is passed as the + first argument to the `Enum.map/2` function which doubles each element of the + list. In other words, the expression above simply translates to: Enum.map(List.flatten([1, [2], 3]), fn x -> x * 2 end) - Beware of operator precedence when using the pipe operator. - For example, the following expression: + ## Pitfalls + + There are two common pitfalls when using the pipe operator. + + The first one is related to operator precedence. For example, + the following expression: String.graphemes "Hello" |> Enum.reverse @@ -2287,8 +2874,8 @@ defmodule Kernel do String.graphemes("Hello" |> Enum.reverse) - Which will result in an error as Enumerable protocol is not defined - for binaries. Adding explicit parenthesis resolves the ambiguity: + which results in an error as the `Enumerable` protocol is not defined + for binaries. Adding explicit parentheses resolves the ambiguity: String.graphemes("Hello") |> Enum.reverse @@ -2296,58 +2883,96 @@ defmodule Kernel do "Hello" |> String.graphemes |> Enum.reverse + The second pitfall is that the `|>` operator works on calls. + For example, when you write: + + "Hello" |> some_function() + + Elixir sees the right-hand side is a function call and pipes + to it. This means that, if you want to pipe to an anonymous + or captured function, it must also be explicitly called. + + Given the anonymous function: + + fun = fn x -> IO.puts(x) end + fun.("Hello") + + This won't work as it will rather try to invoke the local + function `fun`: + + "Hello" |> fun() + + This works: + + "Hello" |> fun.() + + As you can see, the `|>` operator retains the same semantics + as when the pipe is not used since both require the `fun.(...)` + notation. """ defmacro left |> right do - [{h, _}|t] = Macro.unpipe({:|>, [], [left, right]}) - :lists.foldl fn {x, pos}, acc -> Macro.pipe(acc, x, pos) end, h, t + [{h, _} | t] = Macro.unpipe({:|>, [], [left, right]}) + :lists.foldl fn {x, pos}, acc -> + # TODO: raise an error in `Macro.pipe/3` by 1.5 + case Macro.pipe_warning(x) do + nil -> :ok + message -> + :elixir_errors.warn(__CALLER__.line, __CALLER__.file, message) + end + Macro.pipe(acc, x, pos) + end, h, t end @doc """ - Returns true if the `module` is loaded and contains a - public `function` with the given `arity`, otherwise false. + Returns `true` if `module` is loaded and contains a + public `function` with the given `arity`, otherwise `false`. - Notice that this function does not load the module in case + Note that this function does not load the module in case it is not loaded. Check `Code.ensure_loaded/1` for more information. + + ## Examples + + iex> function_exported?(Enum, :member?, 2) + true + """ - @spec function_exported?(atom | tuple, atom, integer) :: boolean + @spec function_exported?(module, atom, arity) :: boolean def function_exported?(module, function, arity) do :erlang.function_exported(module, function, arity) end @doc """ - Returns true if the `module` is loaded and contains a - public `macro` with the given `arity`, otherwise false. + Returns `true` if `module` is loaded and contains a + public `macro` with the given `arity`, otherwise `false`. - Notice that this function does not load the module in case + Note that this function does not load the module in case it is not loaded. Check `Code.ensure_loaded/1` for more information. - """ - @spec macro_exported?(atom, atom, integer) :: boolean - def macro_exported?(module, macro, arity) do - case :code.is_loaded(module) do - {:file, _} -> :lists.member({macro, arity}, module.__info__(:macros)) - _ -> false - end - end - @doc """ - Access the given element using the qualifier according - to the `Access` protocol. All calls in the form `foo[bar]` - are translated to `access(foo, bar)`. + If `module` is an Erlang module (as opposed to an Elixir module), this + function always returns `false`. - The usage of this protocol is to access a raw value in a - keyword list. + ## Examples - iex> sample = [a: 1, b: 2, c: 3] - iex> sample[:b] - 2 + iex> macro_exported?(Kernel, :use, 2) + true + + iex> macro_exported?(:erlang, :abs, 1) + false """ + @spec macro_exported?(module, atom, arity) :: boolean + def macro_exported?(module, macro, arity) + when is_atom(module) and is_atom(macro) and is_integer(arity) and + (arity >= 0 and arity <= 255) do + function_exported?(module, :__info__, 1) and + :lists.member({macro, arity}, module.__info__(:macros)) + end @doc """ - Checks if the element on the left side is member of the - collection on the right side. + Checks if the element on the left-hand side is a member of the + collection on the right-hand side. ## Examples @@ -2355,19 +2980,27 @@ defmodule Kernel do iex> x in [1, 2, 3] true - This macro simply translates the expression above to: + This operator (which is a macro) simply translates to a call to + `Enum.member?/2`. The example above would translate to: + + Enum.member?([1, 2, 3], x) - Enum.member?([1,2,3], x) + Elixir also supports `left not in right`, which evaluates to + `not(left in right)`: + + iex> x = 1 + iex> x not in [1, 2, 3] + false ## Guards - The `in` operator can be used on guard clauses as long as the - right side is a range or a list. Elixir will then expand the - operator to a valid guard expression. For example: + The `in/2` operator (as well as `not in`) can be used in guard clauses as + long as the right-hand side is a range or a list. In such cases, Elixir will + expand the operator to a valid guard expression. For example: - when x in [1,2,3] + when x in [1, 2, 3] - Translates to: + translates to: when x === 1 or x === 2 or x === 3 @@ -2375,58 +3008,113 @@ defmodule Kernel do when x in 1..3 - Translates to: + translates to: + + when is_integer(x) and x >= 1 and x <= 3 + + Note that only integers can be considered inside a range by `in`. - when x >= 1 and x <= 3 + ### AST considerations + `left not in right` is parsed by the compiler into the AST: + + {:not, _, [{:in, _, [left, right]}]} + + This is the same AST as `not(left in right)`. + + Additionally, `Macro.to_string/2` will translate all occurrences of + this AST to `left not in right`. """ defmacro left in right do - cache = (__CALLER__.context == nil) + in_module? = (__CALLER__.context == nil) - right = case bootstraped?(Macro) do + right = case bootstrapped?(Macro) do true -> Macro.expand(right, __CALLER__) false -> right end case right do - _ when cache -> - quote do: Elixir.Enum.member?(unquote(right), unquote(left)) - [] -> + [] when not in_module? -> false - [h|t] -> - :lists.foldr(fn x, acc -> - quote do - unquote(comp(left, x)) or unquote(acc) - end - end, comp(left, h), t) - {:%{}, [], [__struct__: Elixir.Range, first: first, last: last]} -> - in_range(left, Macro.expand(first, __CALLER__), Macro.expand(last, __CALLER__)) + + [h | t] -> + in_var(in_module?, left, &in_list(&1, h, t)) + + {:%{}, _meta, [__struct__: Elixir.Range, first: first, last: last]} -> + first = Macro.expand(first, __CALLER__) + last = Macro.expand(last, __CALLER__) + in_var(in_module?, left, &in_range(&1, first, last)) + + _ when in_module? -> + quote do: Elixir.Enum.member?(unquote(right), unquote(left)) + + %{__struct__: Elixir.Range, first: _, last: _} -> + raise ArgumentError, "non-literal range in guard should be escaped with Macro.escape/2" + _ -> - raise ArgumentError, <<"invalid args for operator in, it expects a compile time list ", - "or range on the right side when used in guard expressions, got: ", - Macro.to_string(right) :: binary>> + raise ArgumentError, <<"invalid args for operator \"in\", it expects a compile-time list ", + "or compile-time range on the right side when used in guard expressions, got: ", + Macro.to_string(right)::binary>> + end + end + + defp in_var(false, ast, fun), + do: fun.(ast) + defp in_var(true, {atom, _, context} = var, fun) when is_atom(atom) and is_atom(context), + do: fun.(var) + defp in_var(true, ast, fun) do + quote do + var = unquote(ast) + unquote(fun.(quote(do: var))) end end defp in_range(left, first, last) do - case opt_in?(first) and opt_in?(last) do + case is_integer(first) and is_integer(last) do true -> - case first <= last do - true -> increasing_compare(left, first, last) - false -> decreasing_compare(left, first, last) - end + in_range_literal(left, first, last) false -> quote do - (:erlang."=<"(unquote(first), unquote(last)) and - unquote(increasing_compare(left, first, last))) - or - (:erlang."<"(unquote(last), unquote(first)) and - unquote(decreasing_compare(left, first, last))) + (:erlang.is_integer(unquote(left)) and + :erlang.is_integer(unquote(first)) and + :erlang.is_integer(unquote(last))) and + ((:erlang."=<"(unquote(first), unquote(last)) and + unquote(increasing_compare(left, first, last))) or + (:erlang."<"(unquote(last), unquote(first)) and + unquote(decreasing_compare(left, first, last)))) end end end - defp opt_in?(x), do: is_integer(x) or is_float(x) or is_atom(x) + defp in_range_literal(left, first, first) do + quote do + :erlang."=:="(unquote(left), unquote(first)) + end + end + + defp in_range_literal(left, first, last) when first < last do + quote do + :erlang.is_integer(unquote(left)) and + unquote(increasing_compare(left, first, last)) + end + end + + defp in_range_literal(left, first, last) do + quote do + :erlang.is_integer(unquote(left)) and + unquote(decreasing_compare(left, first, last)) + end + end + + defp in_list(left, h, t) do + :lists.foldr(fn x, acc -> + quote do: :erlang.or(unquote(comp(left, x)), unquote(acc)) + end, comp(left, h), t) + end + + defp comp(left, {:|, _, [h, t]}) do + quote(do: :erlang.or(:erlang."=:="(unquote(left), unquote(h)), unquote(left) in unquote(t))) + end defp comp(left, right) do quote(do: :erlang."=:="(unquote(left), unquote(right))) @@ -2435,56 +3123,52 @@ defmodule Kernel do defp increasing_compare(var, first, last) do quote do :erlang.">="(unquote(var), unquote(first)) and - :erlang."=<"(unquote(var), unquote(last)) + :erlang."=<"(unquote(var), unquote(last)) end end defp decreasing_compare(var, first, last) do quote do :erlang."=<"(unquote(var), unquote(first)) and - :erlang.">="(unquote(var), unquote(last)) + :erlang.">="(unquote(var), unquote(last)) end end @doc """ - When used inside quoting, marks that the variable should - not be hygienized. The argument can be either a variable - unquoted or in standard tuple form `{name, meta, context}`. + When used inside quoting, marks that the given variable should + not be hygienized. + + The argument can be either a variable unquoted or in standard tuple form + `{name, meta, context}`. Check `Kernel.SpecialForms.quote/2` for more information. """ defmacro var!(var, context \\ nil) defmacro var!({name, meta, atom}, context) when is_atom(name) and is_atom(atom) do - do_var!(name, meta, context, __CALLER__) - end - - defmacro var!(x, _context) do - raise ArgumentError, "expected a var to be given to var!, got: #{Macro.to_string(x)}" - end - - defp do_var!(name, meta, context, env) do # Remove counter and force them to be vars meta = :lists.keydelete(:counter, 1, meta) meta = :lists.keystore(:var, 1, meta, {:var, true}) - case Macro.expand(context, env) do - x when is_atom(x) -> - {name, meta, x} - x -> - raise ArgumentError, "expected var! context to expand to an atom, got: #{Macro.to_string(x)}" + case Macro.expand(context, __CALLER__) do + context when is_atom(context) -> + {name, meta, context} + other -> + raise ArgumentError, "expected var! context to expand to an atom, got: #{Macro.to_string(other)}" end end + defmacro var!(other, _context) do + raise ArgumentError, "expected a variable to be given to var!, got: #{Macro.to_string(other)}" + end + @doc """ - When used inside quoting, marks that the alias should not - be hygienezed. This means the alias will be expanded when + When used inside quoting, marks that the given alias should not + be hygienized. This means the alias will be expanded when the macro is expanded. Check `Kernel.SpecialForms.quote/2` for more information. """ - defmacro alias!(alias) - defmacro alias!(alias) when is_atom(alias) do alias end @@ -2500,8 +3184,13 @@ defmodule Kernel do @doc ~S""" Defines a module given by name with the given contents. - It returns the module name, the module binary and the - block contents result. + This macro defines a module with the given `alias` as its name and with the + given contents. It returns a tuple with four elements: + + * `:module` + * the module name + * the binary contents of the module + * the result of evaluating the contents block ## Examples @@ -2513,39 +3202,61 @@ defmodule Kernel do ## Nesting - Nesting a module inside another module affects its name: + Nesting a module inside another module affects the name of the nested module: defmodule Foo do defmodule Bar do end end - In the example above, two modules `Foo` and `Foo.Bar` are created. - When nesting, Elixir automatically creates an alias, allowing the - second module `Foo.Bar` to be accessed as `Bar` in the same lexical - scope. + In the example above, two modules - `Foo` and `Foo.Bar` - are created. + When nesting, Elixir automatically creates an alias to the inner module, + allowing the second module `Foo.Bar` to be accessed as `Bar` in the same + lexical scope where it's defined (the `Foo` module). + + If the `Foo.Bar` module is moved somewhere else, the references to `Bar` in + the `Foo` module need to be updated to the fully-qualified name (`Foo.Bar`) or + an alias has to be explicitly set in the `Foo` module with the help of + `Kernel.SpecialForms.alias/2`. + + defmodule Foo.Bar do + # code + end + + defmodule Foo do + alias Foo.Bar + # code here can refer to "Foo.Bar" as just "Bar" + end + + ## Module names - This means that, if the module `Bar` is moved to another file, - the references to `Bar` needs to be updated or an alias needs to - be explicitly set with the help of `Kernel.SpecialForms.alias/2`. + A module name can be any atom, but Elixir provides a special syntax which is + usually used for module names. What is called a module name is an + _uppercase ASCII letter_ followed by any number of _lowercase or + uppercase ASCII letters_, _numbers_, or _underscores_. + This identifier is equivalent to an atom prefixed by `Elixir.`. So in the + `defmodule Foo` example `Foo` is equivalent to `:"Elixir.Foo"` ## Dynamic names Elixir module names can be dynamically generated. This is very - useful for macros. For instance, one could write: + useful when working with macros. For instance, one could write: defmodule String.to_atom("Foo#{1}") do # contents ... end - Elixir will accept any module name as long as the expression - returns an atom. Note that, when a dynamic name is used, Elixir - won't nest the name under the current module nor automatically - set up an alias. + Elixir will accept any module name as long as the expression passed as the + first argument to `defmodule/2` evaluates to an atom. + Note that, when a dynamic name is used, Elixir won't nest the name under the + current module nor automatically set up an alias. + """ + defmacro defmodule(alias, do_block) + defmacro defmodule(alias, do: block) do env = __CALLER__ - boot? = bootstraped?(Macro) + boot? = bootstrapped?(Macro) expanded = case boot? do @@ -2561,13 +3272,23 @@ defmodule Kernel do # Generate the alias for this module definition {new, old} = module_nesting(env.module, full) - meta = [defined: full, context: true] ++ alias_meta(alias) + meta = [defined: full, context: env.module] ++ alias_meta(alias) {full, {:alias, meta, [old, [as: new, warn: false]]}} false -> {expanded, nil} end + # We do this so that the block is not tail-call optimized and stacktraces + # are not messed up. Basically, we just insert something between the return + # value of the block and what is returned by defmodule. Using just ":ok" or + # similar doesn't work because it's likely optimized away by the compiler. + block = quote do + result = unquote(block) + :elixir_utils.noop() + result + end + {escaped, _} = :elixir_quote.escape(block, false) module_vars = module_vars(env.vars, 0) @@ -2586,7 +3307,7 @@ defmodule Kernel do do: raw # defmodule Elixir.Alias - defp expand_module({:__aliases__, _, [:Elixir|t]}, module, _env) when t != [], + defp expand_module({:__aliases__, _, [:Elixir | t]}, module, _env) when t != [], do: module # defmodule Alias in root @@ -2595,71 +3316,71 @@ defmodule Kernel do # defmodule Alias nested defp expand_module({:__aliases__, _, t}, _module, env), - do: :elixir_aliases.concat([env.module|t]) + do: :elixir_aliases.concat([env.module | t]) # defmodule _ defp expand_module(_raw, module, env), do: :elixir_aliases.concat([env.module, module]) # quote vars to be injected into the module definition - defp module_vars([{key, kind}|vars], counter) do + defp module_vars([{key, kind} | vars], counter) do var = case is_atom(kind) do - true -> {key, [], kind} - false -> {key, [counter: kind], nil} + true -> {key, [generated: true], kind} + false -> {key, [counter: kind, generated: true], nil} end under = String.to_atom(<<"_@", :erlang.integer_to_binary(counter)::binary>>) args = [key, kind, under, var] - [{:{}, [], args}|module_vars(vars, counter+1)] + [{:{}, [], args} | module_vars(vars, counter + 1)] end defp module_vars([], _counter) do [] end - # Gets two modules names and return an alias + # Gets two modules' names and returns an alias # which can be passed down to the alias directive # and it will create a proper shortcut representing # the given nesting. # # Examples: # - # module_nesting('Elixir.Foo.Bar', 'Elixir.Foo.Bar.Baz.Bat') - # {'Elixir.Baz', 'Elixir.Foo.Bar.Baz'} + # module_nesting(:"Elixir.Foo.Bar", :"Elixir.Foo.Bar.Baz.Bat") + # {:"Elixir.Baz", :"Elixir.Foo.Bar.Baz"} # # In case there is no nesting/no module: # - # module_nesting(nil, 'Elixir.Foo.Bar.Baz.Bat') - # {false, 'Elixir.Foo.Bar.Baz.Bat'} + # module_nesting(nil, :"Elixir.Foo.Bar.Baz.Bat") + # {nil, :"Elixir.Foo.Bar.Baz.Bat"} # defp module_nesting(nil, full), - do: {false, full} + do: {nil, full} defp module_nesting(prefix, full) do case split_module(prefix) do - [] -> {false, full} + [] -> {nil, full} prefix -> module_nesting(prefix, split_module(full), [], full) end end - defp module_nesting([x|t1], [x|t2], acc, full), - do: module_nesting(t1, t2, [x|acc], full) - defp module_nesting([], [h|_], acc, _full), + defp module_nesting([x | t1], [x | t2], acc, full), + do: module_nesting(t1, t2, [x | acc], full) + defp module_nesting([], [h | _], acc, _full), do: {String.to_atom(<<"Elixir.", h::binary>>), - :elixir_aliases.concat(:lists.reverse([h|acc]))} + :elixir_aliases.concat(:lists.reverse([h | acc]))} defp module_nesting(_, _, _acc, full), - do: {false, full} + do: {nil, full} defp split_module(atom) do case :binary.split(Atom.to_string(atom), ".", [:global]) do - ["Elixir"|t] -> t + ["Elixir" | t] -> t _ -> [] end end - @doc """ - Defines a function with the given name and contents. + @doc ~S""" + Defines a function with the given name and body. ## Examples @@ -2669,7 +3390,7 @@ defmodule Kernel do Foo.bar #=> :baz - A function that expects arguments can be defined as follow: + A function that expects arguments can be defined as follows: defmodule Foo do def sum(a, b) do @@ -2677,8 +3398,89 @@ defmodule Kernel do end end - In the example above, we defined a function `sum` that receives - two arguments and sums them. + In the example above, a `sum/2` function is defined; this function receives + two arguments and returns their sum. + + ## Default arguments + + `\\` is used to specify a default value for a parameter of a function. For + example: + + defmodule MyMath do + def multiply_by(number, factor \\ 2) do + number * factor + end + end + + MyMath.multiply_by(4, 3) #=> 12 + MyMath.multiply_by(4) #=> 8 + + The compiler translates this into multiple functions with different arities, + here `Foo.multiply_by/1` and `Foo.multiply_by/2`, that represent cases when + arguments for parameters with default values are passed or not passed. + + When defining a function with default arguments as well as multiple + explicitly declared clauses, you must write a function head that declares the + defaults. For example: + + defmodule MyString do + def join(string1, string2 \\ nil, separator \\ " ") + + def join(string1, nil, _separator) do + string1 + end + + def join(string1, string2, separator) do + string1 <> separator <> string2 + end + end + + Note that `\\` can't be used with anonymous functions because they + can only have a single arity. + + ## Function and variable names + + Function and variable names have the following syntax: + A _lowercase ASCII letter_ or an _underscore_, followed by any number of + _lowercase or uppercase ASCII letters_, _numbers_, or _underscores_. + Optionally they can end in either an _exclamation mark_ or a _question mark_. + + For variables, any identifier starting with an underscore should indicate an + unused variable. For example: + + def foo(bar) do + [] + end + #=> warning: variable bar is unused + + def foo(_bar) do + [] + end + #=> no warning + + def foo(_bar) do + _bar + end + #=> warning: the underscored variable "_bar" is used after being set + + ## rescue/catch/after + + Function bodies support `rescue`, `catch` and `after` as `SpecialForms.try/1` + does. The following two functions are equivalent: + + def format(value) do + try do + format!(value) + catch + :exit, reason -> {:error, reason} + end + end + + def format(value) do + format!(value) + catch + :exit, reason -> {:error, reason} + end """ defmacro def(call, expr \\ nil) do @@ -2686,10 +3488,13 @@ defmodule Kernel do end @doc """ - Defines a function that is private. Private functions are - only accessible from within the module in which they are defined. + Defines a private function with the given name and body. + + Private functions are only accessible from within the module in which they are + defined. Trying to access a private function from outside the module it's + defined in results in an `UndefinedFunctionError` exception. - Check `def/2` for more information + Check `def/2` for more information. ## Examples @@ -2701,15 +3506,18 @@ defmodule Kernel do defp sum(a, b), do: a + b end - In the example above, `sum` is private and accessing it - through `Foo.sum` will raise an error. + Foo.bar #=> 3 + Foo.sum(1, 2) #=> ** (UndefinedFunctionError) undefined function Foo.sum/2 + """ defmacro defp(call, expr \\ nil) do define(:defp, call, expr, __CALLER__) end @doc """ - Defines a macro with the given name and contents. + Defines a macro with the given name and body. + + Check `def/2` for rules on naming and default arguments. ## Examples @@ -2732,10 +3540,14 @@ defmodule Kernel do end @doc """ - Defines a macro that is private. Private macros are - only accessible from the same module in which they are defined. + Defines a private macro with the given name and body. + + Private macros are only accessible from the same module in which they are + defined. + + Check `defmacro/2` for more information, and check `def/2` for rules on + naming and default arguments. - Check `defmacro/2` for more information """ defmacro defmacrop(call, expr \\ nil) do define(:defmacrop, call, expr, __CALLER__) @@ -2744,36 +3556,32 @@ defmodule Kernel do defp define(kind, call, expr, env) do assert_module_scope(env, kind, 2) assert_no_function_scope(env, kind, 2) - line = env.line - {call, uc} = :elixir_quote.escape(call, true) - {expr, ue} = :elixir_quote.escape(expr, true) + {call, unquoted_call} = :elixir_quote.escape(call, true) + {expr, unquoted_expr} = :elixir_quote.escape(expr, true) # Do not check clauses if any expression was unquoted - check_clauses = not(ue or uc) + check_clauses = not(unquoted_expr or unquoted_call) pos = :elixir_locals.cache_env(env) quote do - :elixir_def.store_definition(unquote(line), unquote(kind), unquote(check_clauses), + :elixir_def.store_definition(unquote(kind), unquote(check_clauses), unquote(call), unquote(expr), unquote(pos)) end end @doc """ - Defines a struct for the current module. + Defines a struct. A struct is a tagged map that allows developers to provide default values for keys, tags to be used in polymorphic dispatches and compile time assertions. - To define a struct, a developer needs to only define - a function named `__struct__/0` that returns a map with the - structs field. This macro is a convenience for defining such - function, with the addition of a type `t` and deriving - conveniences. + To define a struct, a developer must define both `__struct__/0` and + `__struct__/1` functions. `defstruct/1` is a convenience macro which + defines such functions with some conveniences. - For more information about structs, please check - `Kernel.SpecialForms.%/2`. + For more information about structs, please check `Kernel.SpecialForms.%/2`. ## Examples @@ -2781,170 +3589,189 @@ defmodule Kernel do defstruct name: nil, age: nil end - Struct fields are evaluated at definition time, which allows - them to be dynamic. In the example below, `10 + 11` will be - evaluated at compilation time and the age field will be stored + Struct fields are evaluated at compile-time, which allows + them to be dynamic. In the example below, `10 + 11` is + evaluated at compile-time and the age field is stored with value `21`: defmodule User do defstruct name: nil, age: 10 + 11 end + The `fields` argument is usually a keyword list with field names + as atom keys and default values as corresponding values. `defstruct/1` + also supports a list of atoms as its argument: in that case, the atoms + in the list will be used as the struct's field names and they will all + default to `nil`. + + defmodule Post do + defstruct [:title, :content, :author] + end + ## Deriving Although structs are maps, by default structs do not implement - any of the protocols implemented for maps. For example, if you - attempt to use the access protocol with the User struct, it - will lead to an error: + any of the protocols implemented for maps. For example, attempting + to use a protocol with the `User` struct leads to an error: - %User{}[:age] - ** (Protocol.UndefinedError) protocol Access not implemented for %User{...} + john = %User{name: "John"} + MyProtocol.call(john) + ** (Protocol.UndefinedError) protocol MyProtocol not implemented for %User{...} - However, `defstruct/2` allows implementation for protocols to - derived by defining a `@derive` attribute as a list before `defstruct/2` - is invoked: + `defstruct/1`, however, allows protocol implementations to be + *derived*. This can be done by defining a `@derive` attribute as a + list before invoking `defstruct/1`: defmodule User do - @derive [Access] + @derive [MyProtocol] defstruct name: nil, age: 10 + 11 end - %User{}[:age] #=> 21 + MyProtocol.call(john) #=> works - For each protocol given to `@derive`, Elixir will assert there is an - implementation of that protocol for maps and check if the map - implementation defines a `__deriving__/3` callback. If so, the callback - is invoked, otherwise an implementation that simply points to the map - one is automatically derived. + For each protocol in the `@derive` list, Elixir will assert there is an + implementation of that protocol for any (regardless if fallback to any + is `true`) and check if the any implementation defines a `__deriving__/3` + callback. If so, the callback is invoked, otherwise an implementation + that simply points to the any implementation is automatically derived. - ## Types + ## Enforcing keys + + When building a struct, Elixir will automatically guarantee all keys + belongs to the struct: + + %User{name: "john", unknown: :key} + ** (KeyError) key :unknown not found in: %User{age: 21, name: nil} - `defstruct` automatically generates a type `t` unless one exists. - The following definition: + Elixir also allows developers to enforce certain keys must always be + given when building the struct: defmodule User do - defstruct name: "José" :: String.t, - age: 25 :: integer + @enforce_keys [:name] + defstruct name: nil, age: 10 + 11 end - Generates a type as follows: - - @type t :: %User{name: String.t, age: integer} + Now trying to build a struct without the name key will fail: - In case a struct does not declare a field type, it defaults to `term`. - """ - defmacro defstruct(kv) do - {fields, types} = split_fields_and_types(kv) + %User{age: 21} + ** (ArgumentError) the following keys must also be given when building struct User: [:name] - fields = - quote bind_quoted: [fields: fields] do - fields = :lists.map(fn - { key, _ } = pair when is_atom(key) -> pair - key when is_atom(key) -> { key, nil } - other -> raise ArgumentError, "struct field names must be atoms, got: #{inspect other}" - end, fields) + Keep in mind `@enforce_keys` is a simple compile-time guarantee + to aid developers when building structs. It is not enforced on + updates and it does not provide any sort of value-validation. - @struct :maps.put(:__struct__, __MODULE__, :maps.from_list(fields)) + ## Types - case Module.get_attribute(__MODULE__, :derive) do - [] -> - :ok - derive -> - Protocol.__derive__(derive, __MODULE__, __ENV__) - end + It is recommended to define types for structs. By convention such type + is called `t`. To define a struct inside a type, the struct literal syntax + is used: - @spec __struct__() :: t - def __struct__() do - @struct - end + defmodule User do + defstruct name: "John", age: 25 + @type t :: %User{name: String.t, age: non_neg_integer} end - types = - case bootstraped?(Kernel.Typespec) do - true when types == [] -> - quote unquote: false do - unless Kernel.Typespec.defines_type?(__MODULE__, :t, 0) do - types = :lists.map(fn {key, _} -> - {key, quote(do: term)} - end, fields) - @type t :: %{unquote_splicing(types), __struct__: __MODULE__} - end - end + It is recommended to only use the struct syntax when defining the struct's + type. When referring to another struct it's better to use `User.t` instead of + `%User{}`. + + The types of the struct fields that are not included in `%User{}` default to + `term`. + + Structs whose internal structure is private to the local module (pattern + matching them or directly accessing their fields should not be allowed) should + use the `@opaque` attribute. Structs whose internal structure is public should + use `@type`. + """ + defmacro defstruct(fields) do + builder = + case bootstrapped?(Enum) do true -> quote do - unless Kernel.Typespec.defines_type?(__MODULE__, :t, 0) do - @type t :: %{unquote_splicing(types), __struct__: __MODULE__} + def __struct__(kv) do + {map, keys} = + Enum.reduce(kv, {__struct__(), @enforce_keys}, fn {key, val}, {map, keys} -> + {Map.replace!(map, key, val), List.delete(keys, key)} + end) + case keys do + [] -> map + _ -> raise ArgumentError, "the following keys must also be given when building " <> + "struct #{inspect __MODULE__}: #{inspect keys}" + end end end false -> - nil + quote do + _ = @enforce_keys + def __struct__(kv) do + :lists.foldl(fn {key, val}, acc -> + Map.replace!(acc, key, val) + end, __struct__(), kv) + end + end end quote do - unquote(fields) - unquote(types) - fields - end - end - - defp split_fields_and_types(kv) do - case Keyword.keyword?(kv) do - true -> split_fields_and_types(kv, [], []) - false -> {kv, []} - end - end + if Module.get_attribute(__MODULE__, :struct) do + raise ArgumentError, "defstruct has already been called for " <> + "#{Kernel.inspect(__MODULE__)}, defstruct can only be called once per module" + end - defp split_fields_and_types([{field, {:::, _, [default, type]}}|t], fields, types) do - split_fields_and_types(t, [{field, default}|fields], [{field, type}|types]) - end + {struct, keys, derive} = Kernel.Utils.defstruct(__MODULE__, unquote(fields)) + @struct struct + @enforce_keys keys - defp split_fields_and_types([{field, default}|t], fields, types) do - split_fields_and_types(t, [{field, default}|fields], [{field, quote(do: term)}|types]) - end + case derive do + [] -> :ok + _ -> Protocol.__derive__(derive, __MODULE__, __ENV__) + end - defp split_fields_and_types([field|t], fields, types) do - split_fields_and_types(t, [field|fields], [{field, quote(do: term)}|types]) - end + def __struct__() do + @struct + end - defp split_fields_and_types([], fields, types) do - {:lists.reverse(fields), :lists.reverse(types)} + unquote(builder) + Kernel.Utils.announce_struct(__MODULE__) + struct + end end @doc ~S""" Defines an exception. Exceptions are structs backed by a module that implements - the Exception behaviour. The Exception behaviour requires + the `Exception` behaviour. The `Exception` behaviour requires two functions to be implemented: - * `exception/1` - that receives the arguments given to `raise/2` - and returns the exception struct. The default implementation - accepts a set of keyword arguments that is merged into the - struct. + * `exception/1` - receives the arguments given to `raise/2` + and returns the exception struct. The default implementation + accepts either a set of keyword arguments that is merged into + the struct or a string to be used as the exception's message. * `message/1` - receives the exception struct and must return its message. Most commonly exceptions have a message field which - by default is accessed by this function. However, if your exception + by default is accessed by this function. However, if an exception does not have a message field, this function must be explicitly implemented. - Since exceptions are structs, all the API supported by `defstruct/1` + Since exceptions are structs, the API supported by `defstruct/1` is also available in `defexception/1`. ## Raising exceptions - The most common way to raise an exception is via the `raise/2` - function: + The most common way to raise an exception is via `raise/2`: defmodule MyAppError do defexception [:message] end + value = [:hello] + raise MyAppError, message: "did not get what was expected, got: #{inspect value}" In many cases it is more convenient to pass the expected value to - `raise` and generate the message in the `exception/1` callback: + `raise/2` and generate the message in the `c:Exception.exception/1` callback: defmodule MyAppError do defexception [:message] @@ -2957,38 +3784,54 @@ defmodule Kernel do raise MyAppError, value - The example above is the preferred mechanism for customizing + The example above shows the preferred strategy for customizing exception messages. """ defmacro defexception(fields) do - fields = case is_list(fields) do - true -> [{:__exception__, true}|fields] - false -> quote(do: [{:__exception__, true}] ++ unquote(fields)) - end - - quote do + quote bind_quoted: [fields: fields] do @behaviour Exception - fields = defstruct unquote(fields) + struct = defstruct([__exception__: true] ++ fields) - @spec exception(term) :: t - def exception(args) when is_list(args) do - Kernel.struct(__struct__, args) - end - - defoverridable exception: 1 - - if Keyword.has_key?(fields, :message) do - @spec message(t) :: String.t + if Map.has_key?(struct, :message) do + @spec message(Exception.t) :: String.t def message(exception) do exception.message end defoverridable message: 1 + + @spec exception(String.t) :: Exception.t + def exception(msg) when is_binary(msg) do + exception(message: msg) + end + end + + @spec exception(Keyword.t) :: Exception.t + # TODO: Only call Kernel.struct! by Elixir v1.5 + def exception(args) when is_list(args) do + struct = __struct__() + {valid, invalid} = Enum.split_with(args, fn {k, _} -> Map.has_key?(struct, k) end) + + case invalid do + [] -> + :ok + _ -> + IO.warn "the following fields are unknown when raising " <> + "#{inspect __MODULE__}: #{inspect invalid}. " <> + "Please make sure to only give known fields when raising " <> + "or redefine #{inspect __MODULE__}.exception/1 to " <> + "discard unknown fields. Future Elixir versions will raise on " <> + "unknown fields given to raise/2" + end + + Kernel.struct!(struct, valid) end + + defoverridable exception: 1 end end - @doc """ + @doc ~S""" Defines a protocol. A protocol specifies an API that should be defined by its @@ -2996,43 +3839,46 @@ defmodule Kernel do ## Examples - In Elixir, only `false` and `nil` are considered falsy values. - Everything else evaluates to true in `if` clauses. Depending - on the application, it may be important to specify a `blank?` - protocol that returns a boolean for other data types that should - be considered `blank?`. For instance, an empty list or an empty - binary could be considered blanks. - - We could implement this protocol as follow: - - defprotocol Blank do - @doc "Returns true if data is considered blank/empty" - def blank?(data) + In Elixir, we have two verbs for checking how many items there + are in a data structure: `length` and `size`. `length` means the + information must be computed. For example, `length(list)` needs to + traverse the whole list to calculate its length. On the other hand, + `tuple_size(tuple)` and `byte_size(binary)` do not depend on the + tuple and binary size as the size information is precomputed in + the data structure. + + Although Elixir includes specific functions such as `tuple_size`, + `binary_size` and `map_size`, sometimes we want to be able to + retrieve the size of a data structure regardless of its type. + In Elixir we can write polymorphic code, i.e. code that works + with different shapes/types, by using protocols. A size protocol + could be implemented as follows: + + defprotocol Size do + @doc "Calculates the size (and not the length!) of a data structure" + def size(data) end - Now that the protocol is defined, we can implement it. We need - to implement the protocol for each Elixir type. For example: + Now that the protocol can be implemented for every data structure + the protocol may have a compliant implementation for: - # Integers are never blank - defimpl Blank, for: Integer do - def blank?(number), do: false + defimpl Size, for: BitString do + def size(binary), do: byte_size(binary) end - # Just empty list is blank - defimpl Blank, for: List do - def blank?([]), do: true - def blank?(_), do: false + defimpl Size, for: Map do + def size(map), do: map_size(map) end - # Just the atoms false and nil are blank - defimpl Blank, for: Atom do - def blank?(false), do: true - def blank?(nil), do: true - def blank?(_), do: false + defimpl Size, for: Tuple do + def size(tuple), do: tuple_size(tuple) end - And we would have to define the implementation for all types. - The supported types available are: + Notice we didn't implement it for lists as we don't have the + `size` information on lists, rather its value needs to be + computed with `length`. + + It is possible to implement protocols for all Elixir types: * Structs (see below) * `Tuple` @@ -3048,58 +3894,72 @@ defmodule Kernel do * `Reference` * `Any` (see below) - ## Protocols + Structs + ## Protocols and Structs The real benefit of protocols comes when mixed with structs. For instance, Elixir ships with many data types implemented as - structs, like `HashDict` and `HashSet`. We can implement the - `Blank` protocol for those types as well: + structs, like `MapSet`. We can implement the `Size` protocol + for those types as well: - defimpl Blank, for: [HashDict, HashSet] do - def blank?(enum_like), do: Enum.empty?(enum_like) + defimpl Size, for: MapSet do + def size(map_set), do: MapSet.size(map_set) end - If a protocol is not found for a given type, it will fallback to - `Any`. + When implementing a protocol for a struct, the `:for` option can + be omitted if the `defimpl` call is inside the module that defines + the struct: + + defmodule User do + defstruct [:email, :name] + + defimpl Size do + def size(%User{}), do: 2 # two fields + end + end + + If a protocol implementation is not found for a given type, + invoking the protocol will raise unless it is configured to + fallback to `Any`. Conveniences for building implementations + on top of existing ones are also available, look at `defstruct/1` + for more information about deriving + protocols. ## Fallback to any In some cases, it may be convenient to provide a default - implementation for all types. This can be achieved by - setting `@fallback_to_any` to `true` in the protocol + implementation for all types. This can be achieved by setting + the `@fallback_to_any` attribute to `true` in the protocol definition: - defprotocol Blank do + defprotocol Size do @fallback_to_any true - def blank?(data) + def size(data) end - Which can now be implemented as: + The `Size` protocol can now be implemented for `Any`: - defimpl Blank, for: Any do - def blank?(_), do: true + defimpl Size, for: Any do + def size(_), do: 0 end - One may wonder why such fallback is not true by default. - - It is two-fold: first, the majority of protocols cannot - implement an action in a generic way for all types. In fact, - providing a default implementation may be harmful, because users - may rely on the default implementation instead of providing a - specialized one. - - Second, falling back to `Any` adds an extra lookup to all types, - which is unnecessary overhead unless an implementation for Any is - required. + Although the implementation above is arguably not a reasonable + one. For example, it makes no sense to say a PID or an Integer + have a size of 0. That's one of the reasons why `@fallback_to_any` + is an opt-in behaviour. For the majority of protocols, raising + an error when a protocol is not implemented is the proper behaviour. ## Types Defining a protocol automatically defines a type named `t`, which - can be used as: - - @spec present?(Blank.t) :: boolean - def present?(blank) do - not Blank.blank?(blank) + can be used as follows: + + @spec print_size(Size.t) :: :ok + def print_size(data) do + IO.puts(case Size.size(data) do + 0 -> "data has no items" + 1 -> "data has one item" + n -> "data has #{n} items" + end) end The `@spec` above expresses that all types allowed to implement the @@ -3109,9 +3969,9 @@ defmodule Kernel do Any protocol module contains three extra functions: - * `__protocol__/1` - returns the protocol name when `:name` is given, and a - keyword list with the protocol functions when `:functions` is given + keyword list with the protocol functions and their arities when + `:functions` is given * `impl_for/1` - receives a structure and returns the module that implements the protocol for the structure, `nil` otherwise @@ -3119,6 +3979,15 @@ defmodule Kernel do * `impl_for!/1` - same as above but raises an error if an implementation is not found + Enumerable.__protocol__(:functions) + #=> [count: 1, member?: 2, reduce: 3] + + Enumerable.impl_for([]) + #=> Enumerable.List + + Enumerable.impl_for(42) + #=> nil + ## Consolidation In order to cope with code loading in development, protocols in @@ -3128,21 +3997,24 @@ defmodule Kernel do In order to speed up dispatching in production environments, where all implementations are known up-front, Elixir provides a feature called protocol consolidation. For this reason, all protocols are - compiled with `debug_info` set to true, regardless of the option - set by `elixirc` compiler. The debug info though may be removed - after consolidation. + compiled with `debug_info` set to `true`, regardless of the option + set by `elixirc` compiler. The debug info though may be removed after + consolidation. - For more information on how to apply protocol consolidation to - a given project, please check the functions in the `Protocol` - module or the `mix compile.protocols` task. + Protocol consolidation is applied by default to all Mix projects. + For applying consolidation manually, please check the functions in + the `Protocol` module or the `mix compile.protocols` task. """ + defmacro defprotocol(name, do_block) + defmacro defprotocol(name, do: block) do Protocol.__protocol__(name, do: block) end @doc """ - Defines an implementation for the given protocol. See - `defprotocol/2` for examples. + Defines an implementation for the given protocol. + + See `defprotocol/2` for more information and examples on protocols. Inside an implementation, the name of the protocol can be accessed via `@protocol` and the current target as `@for`. @@ -3154,8 +4026,10 @@ defmodule Kernel do end @doc """ - Makes the given functions in the current module overridable. An overridable - function is lazily defined, allowing a developer to customize it. + Makes the given functions in the current module overridable. + + An overridable function is lazily defined, allowing a developer to override + it. ## Example @@ -3179,23 +4053,64 @@ defmodule Kernel do end end - As seen as in the example `super` can be used to call the default + As seen as in the example above, `super` can be used to call the default implementation. + + If `@behaviour` has been defined, `defoverridable` can also be called with a + module as an argument. All implemented callbacks from the behaviour above the + call to `defoverridable` will be marked as overridable. + + ## Example + + defmodule Behaviour do + @callback foo :: any + end + + defmodule DefaultMod do + defmacro __using__(_opts) do + quote do + @behaviour Behaviour + + def foo do + "Override me" + end + + defoverridable Behaviour + end + end + end + + defmodule InheritMod do + use DefaultMod + + def foo do + "Overriden" + end + end + """ - defmacro defoverridable(tuples) do + defmacro defoverridable(keywords_or_behaviour) do quote do - Module.make_overridable(__MODULE__, unquote(tuples)) + Module.make_overridable(__MODULE__, unquote(keywords_or_behaviour)) end end @doc """ - `use` is a simple mechanism for using a given module into - the current context. + Uses the given module in the current context. + + When calling: + + use MyModule, some: :options + + the `__using__/1` macro from the `MyModule` module is invoked with the second + argument passed to `use` as its argument. Since `__using__/1` is a macro, all + the usual macro rules apply, and its return value should be quoted code + that is then inserted where `use/2` is called. ## Examples - For example, in order to write tests using the ExUnit framework, - a developer should use the `ExUnit.Case` module: + For example, in order to write test cases using the `ExUnit` framework + provided with Elixir, a developer should `use` the `ExUnit.Case` module: defmodule AssertionTest do use ExUnit.Case, async: true @@ -3205,10 +4120,8 @@ defmodule Kernel do end end - By calling `use`, a hook called `__using__` will be invoked in - `ExUnit.Case` which will then do the proper setup. - - Simply put, `use` is simply a translation to: + In this example, `ExUnit.Case.__using__/1` is called with the keyword list + `[async: true]` as its argument; `use/2` translates to: defmodule AssertionTest do require ExUnit.Case @@ -3219,89 +4132,156 @@ defmodule Kernel do end end + `ExUnit.Case` will then define the `__using__/1` macro: + + defmodule ExUnit.Case do + defmacro __using__(opts) do + # do something with opts + quote do + # return some code to inject in the caller + end + end + end + + ## Best practices + + `__using__/1` is typically used when there is a need to set some state (via + module attributes) or callbacks (like `@before_compile`, see the documentation + for `Module` for more information) into the caller. + + `__using__/1` may also be used to alias, require, or import functionality + from different modules: + + defmodule MyModule do + defmacro __using__(_opts) do + quote do + import MyModule.Foo + import MyModule.Bar + import MyModule.Baz + + alias MyModule.Repo + end + end + end + + However, do not provide `__using__/1` if all it does is to import, + alias or require the module itself. For example, avoid this: + + defmodule MyModule do + defmacro __using__(_opts) do + quote do + import MyModule + end + end + end + + In such cases, developers should instead import or alias the module + directly, so that they can customize those as they wish, + without the indirection behind `use/2`. + + Finally, developers should also avoid defining functions inside + the `__using__/1` callback, unless those functions are the default + implementation of a previously defined `@callback` or are functions + meant to be overridden (see `defoverridable/1`). Even in these cases, + defining functions should be seen as a "last resort". + + In case you want to provide some existing functionality to the user module, + please define it in a module which will be imported accordingly; for example, + `ExUnit.Case` doesn't define the `test/3` macro in the module that calls + `use ExUnit.Case`, but it defines `ExUnit.Case.test/3` and just imports that + into the caller when used. """ defmacro use(module, opts \\ []) do - expanded = Macro.expand(module, __CALLER__) - - case is_atom(expanded) do - false -> - raise ArgumentError, "invalid arguments for use, expected an atom or alias as argument" - true -> + calls = Enum.map(expand_aliases(module, __CALLER__), fn + expanded when is_atom(expanded) -> quote do require unquote(expanded) unquote(expanded).__using__(unquote(opts)) end - end + _otherwise -> + raise ArgumentError, "invalid arguments for use, expected a compile time atom or alias, got: #{Macro.to_string(module)}" + end) + quote(do: (unquote_splicing calls)) + end + + defp expand_aliases({{:., _, [base, :{}]}, _, refs}, env) do + base = Macro.expand(base, env) + Enum.map(refs, fn + {:__aliases__, _, ref} -> + Module.concat([base | ref]) + ref when is_atom(ref) -> + Module.concat(base, ref) + other -> other + end) + end + + defp expand_aliases(module, env) do + [Macro.expand(module, env)] end @doc """ - Defines the given functions in the current module that will - delegate to the given `target`. Functions defined with - `defdelegate` are public and are allowed to be invoked - from external. If you find yourself wishing to define a - delegation as private, you should likely use import - instead. + Defines a function that delegates to another module. + + Functions defined with `defdelegate/2` are public and can be invoked from + outside the module they're defined in (like if they were defined using + `def/2`). When the desire is to delegate as private functions, `import/2` should + be used. - Delegation only works with functions, delegating to macros - is not supported. + Delegation only works with functions; delegating macros is not supported. + + Check `def/2` for rules on naming and default arguments. ## Options - * `:to` - the expression to delegate to. Any expression - is allowed and its results will be calculated on runtime. + * `:to` - the module to dispatch to. * `:as` - the function to call on the target given in `:to`. This parameter is optional and defaults to the name being - delegated. - - * `:append_first` - if true, when delegated, first argument - passed to the delegate will be relocated to the end of the - arguments when dispatched to the target. - - The motivation behind this is because Elixir normalizes - the "handle" as a first argument and some Erlang modules - expect it as last argument. + delegated (`funs`). ## Examples defmodule MyList do defdelegate reverse(list), to: :lists - defdelegate [reverse(list), map(callback, list)], to: :lists defdelegate other_reverse(list), to: :lists, as: :reverse end MyList.reverse([1, 2, 3]) - #=> [3,2,1] + #=> [3, 2, 1] MyList.other_reverse([1, 2, 3]) - #=> [3,2,1] + #=> [3, 2, 1] """ defmacro defdelegate(funs, opts) do funs = Macro.escape(funs, unquote: true) quote bind_quoted: [funs: funs, opts: opts] do target = Keyword.get(opts, :to) || - raise ArgumentError, "Expected to: to be given as argument" + raise ArgumentError, "expected to: to be given as argument" + + # TODO: Raise on 2.0 + %{file: file, line: line} = __ENV__ + if is_list(funs) do + :elixir_errors.warn(line, file, + "passing a list to Kernel.defdelegate/2 is deprecated, " <> + "please define each delegate separately") + end - append_first = Keyword.get(opts, :append_first, false) + # TODO: Remove on 2.0 + if Keyword.has_key?(opts, :append_first) do + :elixir_errors.warn(line, file, + "Kernel.defdelegate/2 :append_first option is deprecated") + end for fun <- List.wrap(funs) do - {name, args} = - case Macro.decompose_call(fun) do - {_, _} = pair -> pair - _ -> raise ArgumentError, "invalid syntax in defdelegate #{Macro.to_string(fun)}" - end - - actual_args = - case append_first and args != [] do - true -> tl(args) ++ [hd(args)] - false -> args - end + {name, args, as, as_args} = Kernel.Utils.defdelegate(fun, opts) - fun = Keyword.get(opts, :as, name) + unless Module.get_attribute(__MODULE__, :doc) do + @doc "See `#{inspect target}.#{as}/#{:erlang.length args}`." + end def unquote(name)(unquote_splicing(args)) do - unquote(target).unquote(fun)(unquote_splicing(actual_args)) + unquote(target).unquote(as)(unquote_splicing(as_args)) end end end @@ -3309,129 +4289,222 @@ defmodule Kernel do ## Sigils - @doc """ - Handles the sigil ~S. It simply returns a string - without escaping characters and without interpolations. + @doc ~S""" + Handles the sigil `~S`. + + It simply returns a string without escaping characters and without + interpolations. ## Examples iex> ~S(foo) "foo" - iex> ~S(f\#{o}o) - "f\\\#{o}o" + iex> ~S(f#{o}o) + "f\#{o}o" """ - defmacro sigil_S(string, []) do - string - end + defmacro sigil_S(term, modifiers) + defmacro sigil_S({:<<>>, _, [binary]}, []) when is_binary(binary), do: binary - @doc """ - Handles the sigil ~s. It returns a string as if it was double quoted - string, unescaping characters and replacing interpolations. + @doc ~S""" + Handles the sigil `~s`. + + It returns a string as if it was a double quoted string, unescaping characters + and replacing interpolations. ## Examples iex> ~s(foo) "foo" - iex> ~s(f\#{:o}o) + iex> ~s(f#{:o}o) "foo" + iex> ~s(f\#{:o}o) + "f\#{:o}o" + """ + defmacro sigil_s(term, modifiers) + defmacro sigil_s({:<<>>, _, [piece]}, []) when is_binary(piece) do + Macro.unescape_string(piece) + end defmacro sigil_s({:<<>>, line, pieces}, []) do {:<<>>, line, Macro.unescape_tokens(pieces)} end - @doc """ - Handles the sigil ~C. It simply returns a char list - without escaping characters and without interpolations. + @doc ~S""" + Handles the sigil `~C`. + + It simply returns a charlist without escaping characters and without + interpolations. ## Examples iex> ~C(foo) 'foo' - iex> ~C(f\#{o}o) - 'f\\\#{o}o' + iex> ~C(f#{o}o) + 'f\#{o}o' """ - defmacro sigil_C({:<<>>, _line, [string]}, []) when is_binary(string) do - String.to_char_list(string) + defmacro sigil_C(term, modifiers) + defmacro sigil_C({:<<>>, _meta, [string]}, []) when is_binary(string) do + String.to_charlist(string) end - @doc """ - Handles the sigil ~c. It returns a char list as if it were a single - quoted string, unescaping characters and replacing interpolations. + @doc ~S""" + Handles the sigil `~c`. + + It returns a charlist as if it were a single quoted string, unescaping + characters and replacing interpolations. ## Examples iex> ~c(foo) 'foo' - iex> ~c(f\#{:o}o) + iex> ~c(f#{:o}o) 'foo' + iex> ~c(f\#{:o}o) + 'f\#{:o}o' + """ + defmacro sigil_c(term, modifiers) # We can skip the runtime conversion if we are # creating a binary made solely of series of chars. - defmacro sigil_c({:<<>>, _line, [string]}, []) when is_binary(string) do - String.to_char_list(Macro.unescape_string(string)) + defmacro sigil_c({:<<>>, _meta, [string]}, []) when is_binary(string) do + String.to_charlist(Macro.unescape_string(string)) end - defmacro sigil_c({:<<>>, line, pieces}, []) do - binary = {:<<>>, line, Macro.unescape_tokens(pieces)} - quote do: String.to_char_list(unquote(binary)) + defmacro sigil_c({:<<>>, meta, pieces}, []) do + binary = {:<<>>, meta, Macro.unescape_tokens(pieces)} + quote do: String.to_charlist(unquote(binary)) end @doc """ - Handles the sigil ~r. It returns a Regex pattern. + Handles the sigil `~r`. + + It returns a regular expression pattern, unescaping characters and replacing + interpolations. + + More information on regexes can be found in the `Regex` module. ## Examples iex> Regex.match?(~r(foo), "foo") true + iex> Regex.match?(~r/a#{:b}c/, "abc") + true + """ - defmacro sigil_r({:<<>>, _line, [string]}, options) when is_binary(string) do + defmacro sigil_r(term, modifiers) + defmacro sigil_r({:<<>>, _meta, [string]}, options) when is_binary(string) do binary = Macro.unescape_string(string, fn(x) -> Regex.unescape_map(x) end) regex = Regex.compile!(binary, :binary.list_to_bin(options)) Macro.escape(regex) end - defmacro sigil_r({:<<>>, line, pieces}, options) do - binary = {:<<>>, line, Macro.unescape_tokens(pieces, fn(x) -> Regex.unescape_map(x) end)} + defmacro sigil_r({:<<>>, meta, pieces}, options) do + binary = {:<<>>, meta, Macro.unescape_tokens(pieces, fn(x) -> Regex.unescape_map(x) end)} quote do: Regex.compile!(unquote(binary), unquote(:binary.list_to_bin(options))) end - @doc """ - Handles the sigil ~R. It returns a Regex pattern without escaping + @doc ~S""" + Handles the sigil `~R`. + + It returns a regular expression pattern without escaping nor interpreting interpolations. + More information on regexes can be found in the `Regex` module. + ## Examples - iex> Regex.match?(~R(f\#{1,3}o), "f\#o") + iex> Regex.match?(~R(f#{1,3}o), "f#o") true """ - defmacro sigil_R({:<<>>, _line, [string]}, options) when is_binary(string) do + defmacro sigil_R(term, modifiers) + defmacro sigil_R({:<<>>, _meta, [string]}, options) when is_binary(string) do regex = Regex.compile!(string, :binary.list_to_bin(options)) Macro.escape(regex) end - @doc """ - Handles the sigil ~w. It returns a list of "words" split by whitespace. + @doc ~S""" + Handles the sigil `~D` for dates. + + The lower case `~d` variant does not exist as interpolation + and escape characters are not useful for date sigils. + + ## Examples + + iex> ~D[2015-01-13] + ~D[2015-01-13] + """ + defmacro sigil_D(date, modifiers) + defmacro sigil_D({:<<>>, _, [string]}, []) do + Macro.escape(Date.from_iso8601!(string)) + end + + @doc ~S""" + Handles the sigil `~T` for times. + + The lower case `~t` variant does not exist as interpolation + and escape characters are not useful for time sigils. + + ## Examples + + iex> ~T[13:00:07] + ~T[13:00:07] + iex> ~T[13:00:07.001] + ~T[13:00:07.001] + + """ + defmacro sigil_T(date, modifiers) + defmacro sigil_T({:<<>>, _, [string]}, []) do + Macro.escape(Time.from_iso8601!(string)) + end + + @doc ~S""" + Handles the sigil `~N` for naive date times. + + The lower case `~n` variant does not exist as interpolation + and escape characters are not useful for datetime sigils. + + ## Examples + + iex> ~N[2015-01-13 13:00:07] + ~N[2015-01-13 13:00:07] + iex> ~N[2015-01-13T13:00:07.001] + ~N[2015-01-13 13:00:07.001] + + """ + defmacro sigil_N(date, modifiers) + defmacro sigil_N({:<<>>, _, [string]}, []) do + Macro.escape(NaiveDateTime.from_iso8601!(string)) + end + + @doc ~S""" + Handles the sigil `~w`. + + It returns a list of "words" split by whitespace. Character unescaping and + interpolation happens for each word. ## Modifiers - * `s`: strings (default) - * `a`: atoms - * `c`: char lists + * `s`: words in the list are strings (default) + * `a`: words in the list are atoms + * `c`: words in the list are charlists ## Examples - iex> ~w(foo \#{:bar} baz) + iex> ~w(foo #{:bar} baz) + ["foo", "bar", "baz"] + + iex> ~w(foo #{" bar baz "}) ["foo", "bar", "baz"] iex> ~w(--source test/enum_test.exs) @@ -3441,75 +4514,80 @@ defmodule Kernel do [:foo, :bar, :baz] """ - - defmacro sigil_w({:<<>>, _line, [string]}, modifiers) when is_binary(string) do + defmacro sigil_w(term, modifiers) + defmacro sigil_w({:<<>>, _meta, [string]}, modifiers) when is_binary(string) do split_words(Macro.unescape_string(string), modifiers) end - defmacro sigil_w({:<<>>, line, pieces}, modifiers) do - binary = {:<<>>, line, Macro.unescape_tokens(pieces)} + defmacro sigil_w({:<<>>, meta, pieces}, modifiers) do + binary = {:<<>>, meta, Macro.unescape_tokens(pieces)} split_words(binary, modifiers) end - @doc """ - Handles the sigil ~W. It returns a list of "words" split by whitespace - without escaping nor interpreting interpolations. + @doc ~S""" + Handles the sigil `~W`. + + It returns a list of "words" split by whitespace without escaping nor + interpreting interpolations. ## Modifiers - * `s`: strings (default) - * `a`: atoms - * `c`: char lists + * `s`: words in the list are strings (default) + * `a`: words in the list are atoms + * `c`: words in the list are charlists ## Examples - iex> ~W(foo \#{bar} baz) - ["foo", "\\\#{bar}", "baz"] + iex> ~W(foo #{bar} baz) + ["foo", "\#{bar}", "baz"] """ - defmacro sigil_W({:<<>>, _line, [string]}, modifiers) when is_binary(string) do + defmacro sigil_W(term, modifiers) + defmacro sigil_W({:<<>>, _meta, [string]}, modifiers) when is_binary(string) do split_words(string, modifiers) end - defp split_words("", _modifiers), do: [] - - defp split_words(string, modifiers) do - mod = - case modifiers do - [] -> ?s - [mod] when mod == ?s or mod == ?a or mod == ?c -> mod - _else -> raise ArgumentError, "modifier must be one of: s, a, c" - end + defp split_words(string, []) do + split_words(string, [?s]) + end + defp split_words(string, [mod]) + when mod == ?s or mod == ?a or mod == ?c do case is_binary(string) do true -> + parts = String.split(string) case mod do - ?s -> String.split(string) - ?a -> for p <- String.split(string), do: String.to_atom(p) - ?c -> for p <- String.split(string), do: String.to_char_list(p) + ?s -> parts + ?a -> :lists.map(&String.to_atom/1, parts) + ?c -> :lists.map(&String.to_charlist/1, parts) end false -> + parts = quote(do: String.split(unquote(string))) case mod do - ?s -> quote do: String.split(unquote(string)) - ?a -> quote do: for(p <- String.split(unquote(string)), do: String.to_atom(p)) - ?c -> quote do: for(p <- String.split(unquote(string)), do: String.to_char_list(p)) + ?s -> parts + ?a -> quote(do: :lists.map(&String.to_atom/1, unquote(parts))) + ?c -> quote(do: :lists.map(&String.to_charlist/1, unquote(parts))) end end end + defp split_words(_string, _mods) do + raise ArgumentError, "modifier must be one of: s, a, c" + end + ## Shared functions defp optimize_boolean({:case, meta, args}) do - {:case, [{:optimize_boolean, true}|meta], args} + {:case, [{:optimize_boolean, true} | meta], args} end # We need this check only for bootstrap purposes. # Once Kernel is loaded and we recompile, it is a no-op. case :code.ensure_loaded(Kernel) do {:module, _} -> - defp bootstraped?(_), do: true + defp bootstrapped?(_), do: true {:error, _} -> - defp bootstraped?(module), do: :code.ensure_loaded(module) == {:module, module} + defp bootstrapped?(module), do: :code.ensure_loaded(module) == {:module, module} end defp assert_module_scope(env, fun, arity) do @@ -3527,9 +4605,16 @@ defmodule Kernel do end defp env_stacktrace(env) do - case bootstraped?(Path) do + case bootstrapped?(Path) do true -> Macro.Env.stacktrace(env) false -> [] end end + + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + defmacro to_char_list(arg) do + quote do: Kernel.to_charlist(unquote(arg)) + end end diff --git a/lib/elixir/lib/kernel/cli.ex b/lib/elixir/lib/kernel/cli.ex index 1e36a9a176c..e524ec33b14 100644 --- a/lib/elixir/lib/kernel/cli.ex +++ b/lib/elixir/lib/kernel/cli.ex @@ -3,7 +3,7 @@ defmodule Kernel.CLI do @blank_config %{commands: [], output: ".", compile: [], halt: true, compiler_options: [], errors: [], - verbose_compile: false} + pa: [], pz: [], verbose_compile: false} @doc """ This is the API invoked by Elixir boot process. @@ -14,7 +14,7 @@ defmodule Kernel.CLI do {config, argv} = parse_argv(argv) System.argv(argv) - run fn -> + run fn _ -> errors = process_commands(config) if errors != [] do @@ -33,65 +33,99 @@ defmodule Kernel.CLI do by escripts generated by Elixir. """ def run(fun, halt \\ true) do - try do - fun.() - if halt do - at_exit(0) - System.halt(0) + {ok_or_shutdown, status} = exec_fun(fun, {:ok, 0}) + if ok_or_shutdown == :shutdown or halt do + {_, status} = at_exit({ok_or_shutdown, status}) + + # Ensure Logger messages are flushed before halting + case :erlang.whereis(Logger) do + pid when is_pid(pid) -> Logger.flush() + _ -> :ok end - catch - :exit, reason when is_integer(reason) -> - at_exit(reason) - System.halt(reason) - :exit, :normal -> - at_exit(0) - System.halt(0) - kind, reason -> - at_exit(1) - print_error(kind, reason, System.stacktrace) - System.halt(1) + + System.halt(status) end end - @doc """ - Parses ARGV returning the CLI config and trailing args. - """ + @doc false def parse_argv(argv) do parse_argv(argv, @blank_config) end - @doc """ - Process commands according to the parsed config from `parse_argv/1`. - Returns all errors. - """ + @doc false def process_commands(config) do results = Enum.map(Enum.reverse(config.commands), &process_command(&1, config)) errors = for {:error, msg} <- results, do: msg Enum.reverse(config.errors, errors) end - ## Helpers + @doc false + def format_error(kind, reason, stacktrace) do + {blamed, stacktrace} = Exception.blame(kind, reason, stacktrace) + iodata = + case blamed do + %FunctionClauseError{} -> + [Exception.format_banner(kind, reason, stacktrace), + pad(FunctionClauseError.blame(blamed, &inspect/1, &blame_match/2))] + _ -> + Exception.format_banner(kind, blamed, stacktrace) + end + [iodata, ?\n, Exception.format_stacktrace(prune_stacktrace(stacktrace))] + end - defp at_exit(status) do - hooks = :elixir_code_server.call(:flush_at_exit) + ## Helpers - for hook <- hooks do - try do - hook.(status) - catch - kind, reason -> - print_error(kind, reason, System.stacktrace) - end + defp at_exit(res) do + hooks = :elixir_config.get_and_put(:at_exit, []) + res = Enum.reduce(hooks, res, &exec_fun/2) + if hooks == [], do: res, else: at_exit(res) + end + + defp exec_fun(fun, res) when is_function(fun, 1) and is_tuple(res) do + parent = self() + + {pid, ref} = + spawn_monitor(fn -> + try do + fun.(elem(res, 1)) + catch + :exit, {:shutdown, int} when is_integer(int) -> + send parent, {self(), {:shutdown, int}} + exit({:shutdown, int}) + :exit, reason + when reason == :normal + when reason == :shutdown + when tuple_size(reason) == 2 and elem(reason, 0) == :shutdown -> + send parent, {self(), {:shutdown, 0}} + exit(reason) + kind, reason -> + stack = System.stacktrace + print_error(kind, reason, stack) + send parent, {self(), {:shutdown, 1}} + exit(to_exit(kind, reason, stack)) + else + _ -> + send parent, {self(), res} + end + end) + + receive do + {^pid, res} -> + :erlang.demonitor(ref, [:flush]) + res + {:DOWN, ^ref, _, _, other} -> + print_error({:EXIT, pid}, other, []) + {:shutdown, 1} end - - # If an at_exit callback adds a - # new hook we need to invoke it. - unless hooks == [], do: at_exit(status) end + defp to_exit(:throw, reason, stack), do: {{:nocatch, reason}, stack} + defp to_exit(:error, reason, stack), do: {reason, stack} + defp to_exit(:exit, reason, _stack), do: reason + defp shared_option?(list, config, callback) do case parse_shared(list, config) do - {[h|hs], _} when h == hd(list) -> + {[h | hs], _} when h == hd(list) -> new_config = %{config | errors: ["#{h} : Unknown option" | config.errors]} callback.(hs, new_config) {new_list, new_config} -> @@ -99,22 +133,47 @@ defmodule Kernel.CLI do end end - defp print_error(kind, reason, trace) do - IO.puts :stderr, Exception.format(kind, reason, prune_stacktrace(trace)) + ## Error handling + + defp print_error(kind, reason, stacktrace) do + IO.write :stderr, format_error(kind, reason, stacktrace) end - @elixir_internals [:elixir_compiler, :elixir_module, :elixir_translator, :elixir_expand] + defp blame_match(%{match?: true, node: node}, _), + do: blame_ansi(:normal, "+", node) + defp blame_match(%{match?: false, node: node}, _), + do: blame_ansi(:red, "-", node) + defp blame_match(_, string), + do: string - defp prune_stacktrace([{mod, _, _, _}|t]) when mod in @elixir_internals do + defp blame_ansi(color, no_ansi, node) do + if IO.ANSI.enabled? do + [color | Macro.to_string(node)] + |> IO.ANSI.format(true) + |> IO.iodata_to_binary() + else + no_ansi <> Macro.to_string(node) <> no_ansi + end + end + + defp pad(string) do + " " <> String.replace(string, "\n", "\n ") + end + + @elixir_internals [:elixir, :elixir_expand, :elixir_compiler, :elixir_module, + :elixir_clauses, :elixir_lexical, :elixir_def, :elixir_map, + :elixir_erl, :elixir_erl_clauses, :elixir_erl_pass, Kernel.ErrorHandler] + + defp prune_stacktrace([{mod, _, _, _} | t]) when mod in @elixir_internals do prune_stacktrace(t) end - defp prune_stacktrace([{__MODULE__, :wrapper, 1, _}|_]) do + defp prune_stacktrace([{__MODULE__, :wrapper, 1, _} | _]) do [] end - defp prune_stacktrace([h|t]) do - [h|prune_stacktrace(t)] + defp prune_stacktrace([h | t]) do + [h | prune_stacktrace(t)] end defp prune_stacktrace([]) do @@ -123,46 +182,54 @@ defmodule Kernel.CLI do # Parse shared options - defp parse_shared([opt|_t], _config) when opt in ["-v", "--version"] do - IO.puts "Elixir #{System.version}" + defp parse_shared([opt | _t], _config) when opt in ["-v", "--version"] do + if function_exported?(IEx, :started?, 0) and IEx.started? do + IO.puts "IEx " <> System.build_info[:build] + else + IO.puts :erlang.system_info(:system_version) + IO.puts "Elixir " <> System.build_info[:build] + end + System.halt 0 end - defp parse_shared(["-pa", h|t], config) do - add_code_path(h, &Code.prepend_path/1) - parse_shared t, config + defp parse_shared(["-pa", h | t], config) do + paths = expand_code_path(h) + Enum.each(paths, &:code.add_patha/1) + parse_shared t, %{config | pa: config.pa ++ paths} end - defp parse_shared(["-pz", h|t], config) do - add_code_path(h, &Code.append_path/1) - parse_shared t, config + defp parse_shared(["-pz", h | t], config) do + paths = expand_code_path(h) + Enum.each(paths, &:code.add_pathz/1) + parse_shared t, %{config | pz: config.pz ++ paths} end - defp parse_shared(["--app", h|t], config) do + defp parse_shared(["--app", h | t], config) do parse_shared t, %{config | commands: [{:app, h} | config.commands]} end - defp parse_shared(["--no-halt"|t], config) do + defp parse_shared(["--no-halt" | t], config) do parse_shared t, %{config | halt: false} end - defp parse_shared(["-e", h|t], config) do + defp parse_shared(["-e", h | t], config) do parse_shared t, %{config | commands: [{:eval, h} | config.commands]} end - defp parse_shared(["-r", h|t], config) do + defp parse_shared(["-r", h | t], config) do parse_shared t, %{config | commands: [{:require, h} | config.commands]} end - defp parse_shared(["-pr", h|t], config) do + defp parse_shared(["-pr", h | t], config) do parse_shared t, %{config | commands: [{:parallel_require, h} | config.commands]} end - defp parse_shared([erl, _|t], config) when erl in ["--erl", "--sname", "--name", "--cookie"] do + defp parse_shared([erl, _ | t], config) when erl in ["--erl", "--sname", "--name", "--cookie", "--logger-otp-reports", "--logger-sasl-reports"] do parse_shared t, config end - defp parse_shared([erl|t], config) when erl in ["--detached", "--hidden"] do + defp parse_shared([erl | t], config) when erl in ["--detached", "--hidden", "--werl"] do parse_shared t, config end @@ -170,34 +237,33 @@ defmodule Kernel.CLI do {list, config} end - - defp add_code_path(path, fun) do + defp expand_code_path(path) do path = Path.expand(path) case Path.wildcard(path) do - [] -> fun.(path) - list -> Enum.each(list, fun) + [] -> [to_charlist(path)] + list -> Enum.map(list, &to_charlist/1) end end # Process init options - defp parse_argv(["--"|t], config) do + defp parse_argv(["--" | t], config) do {config, t} end - defp parse_argv(["+elixirc"|t], config) do + defp parse_argv(["+elixirc" | t], config) do parse_compiler t, config end - defp parse_argv(["+iex"|t], config) do + defp parse_argv(["+iex" | t], config) do parse_iex t, config end - defp parse_argv(["-S", h|t], config) do + defp parse_argv(["-S", h | t], config) do {%{config | commands: [{:script, h} | config.commands]}, t} end - defp parse_argv([h|t] = list, config) do + defp parse_argv([h | t] = list, config) do case h do "-" <> _ -> shared_option? list, config, &parse_argv(&1, &2) @@ -216,35 +282,35 @@ defmodule Kernel.CLI do # Parse compiler options - defp parse_compiler(["--"|t], config) do + defp parse_compiler(["--" | t], config) do {config, t} end - defp parse_compiler(["-o", h|t], config) do + defp parse_compiler(["-o", h | t], config) do parse_compiler t, %{config | output: h} end - defp parse_compiler(["--no-docs"|t], config) do + defp parse_compiler(["--no-docs" | t], config) do parse_compiler t, %{config | compiler_options: [{:docs, false} | config.compiler_options]} end - defp parse_compiler(["--no-debug-info"|t], config) do + defp parse_compiler(["--no-debug-info" | t], config) do parse_compiler t, %{config | compiler_options: [{:debug_info, false} | config.compiler_options]} end - defp parse_compiler(["--ignore-module-conflict"|t], config) do + defp parse_compiler(["--ignore-module-conflict" | t], config) do parse_compiler t, %{config | compiler_options: [{:ignore_module_conflict, true} | config.compiler_options]} end - defp parse_compiler(["--warnings-as-errors"|t], config) do + defp parse_compiler(["--warnings-as-errors" | t], config) do parse_compiler t, %{config | compiler_options: [{:warnings_as_errors, true} | config.compiler_options]} end - defp parse_compiler(["--verbose"|t], config) do + defp parse_compiler(["--verbose" | t], config) do parse_compiler t, %{config | verbose_compile: true} end - defp parse_compiler([h|t] = list, config) do + defp parse_compiler([h | t] = list, config) do case h do "-" <> _ -> shared_option? list, config, &parse_compiler(&1, &2) @@ -255,30 +321,30 @@ defmodule Kernel.CLI do end defp parse_compiler([], config) do - {%{config | commands: [{:compile, config.compile}|config.commands]}, []} + {%{config | commands: [{:compile, config.compile} | config.commands]}, []} end - # Parse iex options + # Parse IEx options - defp parse_iex(["--"|t], config) do + defp parse_iex(["--" | t], config) do {config, t} end # This clause is here so that Kernel.CLI does not # error out with "unknown option" - defp parse_iex(["--dot-iex", _|t], config) do + defp parse_iex(["--dot-iex", _ | t], config) do parse_iex t, config end - defp parse_iex([opt, _|t], config) when opt in ["--remsh"] do + defp parse_iex([opt, _ | t], config) when opt in ["--remsh"] do parse_iex t, config end - defp parse_iex(["-S", h|t], config) do + defp parse_iex(["-S", h | t], config) do {%{config | commands: [{:script, h} | config.commands]}, t} end - defp parse_iex([h|t] = list, config) do + defp parse_iex([h | t] = list, config) do case h do "-" <> _ -> shared_option? list, config, &parse_iex(&1, &2) @@ -352,7 +418,8 @@ defmodule Kernel.CLI do end defp process_command({:compile, patterns}, config) do - :filelib.ensure_dir(:filename.join(config.output, ".")) + # If ensuring the dir returns an error no files will be found. + _ = :filelib.ensure_dir(:filename.join(config.output, ".")) case filter_multiple_patterns(patterns) do {:ok, []} -> @@ -360,8 +427,13 @@ defmodule Kernel.CLI do {:ok, files} -> wrapper fn -> Code.compiler_options(config.compiler_options) - Kernel.ParallelCompiler.files_to_path(files, config.output, - each_file: fn file -> if config.verbose_compile do IO.puts "Compiled #{file}" end end) + opts = + if config.verbose_compile do + [each_long_compilation: &IO.puts("Compiling #{&1} (it's taking more than 5s)")] + else + [] + end + Kernel.ParallelCompiler.files_to_path(files, config.output, opts) end {:missing, missing} -> {:error, "No files matched pattern(s) #{Enum.join(missing, ",")}"} @@ -369,34 +441,29 @@ defmodule Kernel.CLI do end defp filter_patterns(pattern) do - Enum.filter(Enum.uniq(Path.wildcard(pattern)), &File.regular?(&1)) + pattern + |> Path.wildcard + |> :lists.usort + |> Enum.filter(&File.regular?/1) end defp filter_multiple_patterns(patterns) do - matched_files = Enum.map patterns, fn(pattern) -> - case filter_patterns(pattern) do - [] -> {:missing, pattern} - files -> {:ok, files} + {files, missing} = + Enum.reduce patterns, {[], []}, fn pattern, {files, missing} -> + case filter_patterns(pattern) do + [] -> {files, [pattern | missing]} + match -> {match ++ files, missing} + end end - end - - files = Enum.filter_map matched_files, - fn(match) -> elem(match, 0) == :ok end, - &elem(&1, 1) - missing_patterns = Enum.filter_map matched_files, - fn(match) -> elem(match, 0) == :missing end, - &elem(&1, 1) - - if missing_patterns == [] do - {:ok, Enum.uniq(Enum.concat(files))} - else - {:missing, Enum.uniq(missing_patterns)} + case missing do + [] -> {:ok, :lists.usort(files)} + _ -> {:missing, :lists.usort(missing)} end end defp wrapper(fun) do - fun.() + _ = fun.() :ok end @@ -407,8 +474,8 @@ defmodule Kernel.CLI do # the actual Elixir executable. case :os.type() do {:win32, _} -> - exec = Path.rootname(exec) - if File.regular?(exec), do: exec + base = Path.rootname(exec) + if File.regular?(base), do: base, else: exec _ -> exec end diff --git a/lib/elixir/lib/kernel/error_handler.ex b/lib/elixir/lib/kernel/error_handler.ex index dfc1dbb58e8..70f74377d84 100644 --- a/lib/elixir/lib/kernel/error_handler.ex +++ b/lib/elixir/lib/kernel/error_handler.ex @@ -3,38 +3,40 @@ defmodule Kernel.ErrorHandler do @moduledoc false + @spec undefined_function(module, atom, list) :: term def undefined_function(module, fun, args) do - ensure_loaded(module) + ensure_loaded(module) or ensure_compiled(module, :module) :error_handler.undefined_function(module, fun, args) end + @spec undefined_lambda(module, fun, list) :: term def undefined_lambda(module, fun, args) do - ensure_loaded(module) + ensure_loaded(module) or ensure_compiled(module, :module) :error_handler.undefined_lambda(module, fun, args) end - def release() do - # On release, no further allow elixir_ensure_compiled - # directives and revert to the original error handler. - # Note we should not delete the elixir_compiler_pid though, - # as we still want to send notifications to the compiler. - :erlang.erase(:elixir_ensure_compiled) - :erlang.process_flag(:error_handler, :error_handler) - :ok + @spec ensure_loaded(module) :: boolean + def ensure_loaded(module) do + case :code.ensure_loaded(module) do + {:module, _} -> true + {:error, _} -> false + end + end + + @spec ensure_compiled(module, atom) :: boolean + # Never wait on nil because it should never be defined. + def ensure_compiled(nil, _kind) do + false end - defp ensure_loaded(module) do - case Code.ensure_loaded(module) do - {:module, _} -> [] - {:error, _} -> - parent = :erlang.get(:elixir_compiler_pid) - ref = :erlang.make_ref - send parent, {:waiting, module, self(), ref, module} - :erlang.garbage_collect(self) - receive do - {^ref, :ready} -> :ok - {^ref, :release} -> release() - end + def ensure_compiled(module, kind) do + parent = :erlang.get(:elixir_compiler_pid) + ref = :erlang.make_ref + send parent, {:waiting, kind, self(), ref, module, :elixir_module.compiler_modules()} + :erlang.garbage_collect(self()) + receive do + {^ref, :found} -> true + {^ref, :not_found} -> false end end end diff --git a/lib/elixir/lib/kernel/lexical_tracker.ex b/lib/elixir/lib/kernel/lexical_tracker.ex index 09e205b5214..1af038bc660 100644 --- a/lib/elixir/lib/kernel/lexical_tracker.ex +++ b/lib/elixir/lib/kernel/lexical_tracker.ex @@ -1,8 +1,6 @@ -# This is a module Elixir responsible for tracking -# the usage of aliases, imports and requires in the Elixir scope. -# -# The implementation simply stores dispatch information in an -# ETS table and then consults this table once compilation is done. +# This is an Elixir module responsible for tracking references +# to modules, remote dispatches, and the usage of +# aliases/imports/requires in the Elixir scope. # # Note that since this is required for bootstrap, we can't use # any of the `GenServer.Behaviour` conveniences. @@ -11,37 +9,41 @@ defmodule Kernel.LexicalTracker do @timeout 30_000 @behaviour :gen_server - @import 2 - @alias 3 + @doc """ + Returns all remotes referenced in this lexical scope. + """ + def remote_references(arg) do + :gen_server.call(to_pid(arg), :remote_references, @timeout) + end @doc """ - Returns all remotes linked to in this lexical scope. + Returns all remote dispatches in this lexical scope. """ - def remotes(arg) do - # If the module is compiled from a function, its lexical - # scope may be long gone, so it has no associated PID. - if pid = to_pid(arg) do - ets = :gen_server.call(pid, :ets, @timeout) - :ets.match(ets, {:"$1", :_, :_}) |> List.flatten - else - [] - end + def remote_dispatches(arg) do + :gen_server.call(to_pid(arg), :remote_dispatches, @timeout) + end + + @doc """ + Gets the destination the lexical scope is meant to + compile to. + """ + def dest(arg) do + :gen_server.call(to_pid(arg), :dest, @timeout) end defp to_pid(pid) when is_pid(pid), do: pid defp to_pid(mod) when is_atom(mod) do table = :elixir_module.data_table(mod) - [{_, val}] = :ets.lookup(table, :__lexical_tracker) + [{_, val}] = :ets.lookup(table, {:elixir, :lexical_tracker}) val end # Internal API - # Starts the tracker and returns its pid. + # Starts the tracker and returns its PID. @doc false - def start_link do - {:ok, pid} = :gen_server.start_link(__MODULE__, [], []) - pid + def start_link(dest) do + :gen_server.start_link(__MODULE__, dest, []) end @doc false @@ -50,127 +52,195 @@ defmodule Kernel.LexicalTracker do end @doc false - def add_import(pid, module, line, warn) do - :gen_server.cast(pid, {:add_import, module, line, warn}) + def add_import(pid, module, fas, line, warn) when is_atom(module) do + :gen_server.cast(pid, {:add_import, module, fas, line, warn}) end @doc false - def add_alias(pid, module, line, warn) do + def add_alias(pid, module, line, warn) when is_atom(module) do :gen_server.cast(pid, {:add_alias, module, line, warn}) end @doc false - def remote_dispatch(pid, module) do - :gen_server.cast(pid, {:remote_dispatch, module}) + def remote_reference(pid, module, mode) when is_atom(module) do + :gen_server.cast(pid, {:remote_reference, module, mode}) end @doc false - def import_dispatch(pid, module) do - :gen_server.cast(pid, {:import_dispatch, module}) + def remote_dispatch(pid, module, fa, line, mode) when is_atom(module) do + :gen_server.cast(pid, {:remote_dispatch, module, fa, line, mode}) end @doc false - def alias_dispatch(pid, module) do + def import_dispatch(pid, module, fa, line, mode) when is_atom(module) do + :gen_server.cast(pid, {:import_dispatch, module, fa, line, mode}) + end + + @doc false + def alias_dispatch(pid, module) when is_atom(module) do :gen_server.cast(pid, {:alias_dispatch, module}) end @doc false def collect_unused_imports(pid) do - unused(pid, @import) + unused(pid, :import) end @doc false def collect_unused_aliases(pid) do - unused(pid, @alias) + unused(pid, :alias) end - defp unused(pid, pos) do - ets = :gen_server.call(pid, :ets, @timeout) - :ets.foldl(fn - {module, _, _} = tuple, acc when is_integer(:erlang.element(pos, tuple)) -> - [{module, :erlang.element(pos, tuple)}|acc] - _, acc -> - acc - end, [], ets) |> Enum.sort + defp unused(pid, tag) do + :gen_server.call(pid, {:unused, tag}, @timeout) end # Callbacks + def init(dest) do + {:ok, %{directives: %{}, references: %{}, compile: %{}, + runtime: %{}, dest: dest}} + end + + @doc false + def handle_call({:unused, tag}, _from, state) do + directives = + for {{^tag, module_or_mfa}, marker} <- state.directives, + is_integer(marker), + do: {module_or_mfa, marker} - def init([]) do - {:ok, :ets.new(:lexical, [:protected])} + {:reply, Enum.sort(directives), state} end - def handle_call(:ets, _from, d) do - {:reply, d, d} + def handle_call(:remote_references, _from, state) do + {:reply, partition(Enum.to_list(state.references), [], []), state} end - def handle_call(request, _from, d) do - {:stop, {:bad_call, request}, d} + def handle_call(:remote_dispatches, _from, state) do + {:reply, {state.compile, state.runtime}, state} end - def handle_cast({:remote_dispatch, module}, d) do - add_module(d, module) - {:noreply, d} + def handle_call(:dest, _from, state) do + {:reply, state.dest, state} end - def handle_cast({:import_dispatch, module}, d) do - add_dispatch(d, module, @import) - {:noreply, d} + def handle_cast({:remote_reference, module, mode}, state) do + {:noreply, %{state | references: add_reference(state.references, module, mode)}} end - def handle_cast({:alias_dispatch, module}, d) do - add_dispatch(d, module, @alias) - {:noreply, d} + def handle_cast({:remote_dispatch, module, fa, line, mode}, state) do + references = add_reference(state.references, module, mode) + state = add_remote_dispatch(state, module, fa, line, mode) + {:noreply, %{state | references: references}} end - def handle_cast({:add_import, module, line, warn}, d) do - add_directive(d, module, line, warn, @import) - {:noreply, d} + def handle_cast({:import_dispatch, module, {function, arity} = fa, line, mode}, state) do + state = + state + |> add_import_dispatch(module, function, arity) + |> add_remote_dispatch(module, fa, line, mode) + + {:noreply, state} end - def handle_cast({:add_alias, module, line, warn}, d) do - add_directive(d, module, line, warn, @alias) - {:noreply, d} + def handle_cast({:alias_dispatch, module}, state) do + {:noreply, %{state | directives: add_dispatch(state.directives, module, :alias)}} end - def handle_cast(:stop, d) do - {:stop, :normal, d} + def handle_cast({:add_import, module, fas, line, warn}, state) do + directives = + state.directives + |> Enum.reject(&match?({{:import, {^module, _, _}}, _}, &1)) + |> :maps.from_list + |> add_directive(module, line, warn, :import) + + directives = + Enum.reduce(fas, directives, fn {function, arity}, directives -> + add_directive(directives, {module, function, arity}, line, warn, :import) + end) + + {:noreply, %{state | directives: directives}} end - def handle_cast(msg, d) do - {:stop, {:bad_cast, msg}, d} + def handle_cast({:add_alias, module, line, warn}, state) do + {:noreply, %{state | directives: add_directive(state.directives, module, line, warn, :alias)}} end - def handle_info(_msg, d) do - {:noreply, d} + def handle_cast(:stop, state) do + {:stop, :normal, state} end - def terminate(_reason, _d) do + @doc false + def handle_info(_msg, state) do + {:noreply, state} + end + + @doc false + def terminate(_reason, _state) do :ok end - def code_change(_old, d, _extra) do - {:ok, d} + @doc false + def code_change(_old, state, _extra) do + {:ok, state} end + defp partition([{remote, :compile} | t], compile, runtime), + do: partition(t, [remote | compile], runtime) + defp partition([{remote, :runtime} | t], compile, runtime), + do: partition(t, compile, [remote | runtime]) + defp partition([], compile, runtime), + do: {compile, runtime} + # Callbacks helpers - # In the table we keep imports and aliases. - # If the value is false, it was not imported/aliased - # If the value is true, it was imported/aliased - # If the value is a line, it was imported/aliased and has a pending warning - defp add_module(d, module) do - :ets.insert_new(d, {module, false, false}) + defp add_reference(references, module, :runtime) when is_atom(module), + do: map_put_new(module, :runtime, references) + defp add_reference(references, module, :compile) when is_atom(module), + do: :maps.put(module, :compile, references) + + defp add_remote_dispatch(state, module, fa, line, mode) when is_atom(module) do + map_update mode, %{module => %{fa => [line]}}, state, fn mode_dispatches -> + map_update module, %{fa => [line]}, mode_dispatches, fn module_dispatches -> + map_update fa, [line], module_dispatches, &[line | List.delete(&1, line)] + end + end end - defp add_dispatch(d, module, pos) do - :ets.update_element(d, module, {pos, true}) + defp add_import_dispatch(state, module, function, arity) do + directives = + add_dispatch(state.directives, module, :import) + |> add_dispatch({module, function, arity}, :import) + # Always compile time because we depend + # on the module at compile time + references = add_reference(state.references, module, :compile) + + %{state | directives: directives, references: references} end - defp add_directive(d, module, line, warn, pos) do - add_module(d, module) + # In the map we keep imports and aliases. + # If the value is a line, it was imported/aliased and has a pending warning + # If the value is true, it was imported/aliased and used + defp add_directive(directives, module_or_mfa, line, warn, tag) do marker = if warn, do: line, else: true - :ets.update_element(d, module, {pos, marker}) + :maps.put({tag, module_or_mfa}, marker, directives) + end + + defp add_dispatch(directives, module_or_mfa, tag) do + :maps.put({tag, module_or_mfa}, true, directives) + end + + defp map_update(key, initial, map, fun) do + case :maps.find(key, map) do + {:ok, val} -> :maps.put(key, fun.(val), map) + :error -> :maps.put(key, initial, map) + end + end + + defp map_put_new(key, value, map) do + case :maps.find(key, map) do + {:ok, _} -> map + :error -> :maps.put(key, value, map) + end end end diff --git a/lib/elixir/lib/kernel/parallel_compiler.ex b/lib/elixir/lib/kernel/parallel_compiler.ex index ee1e58f48df..5170541c6e1 100644 --- a/lib/elixir/lib/kernel/parallel_compiler.ex +++ b/lib/elixir/lib/kernel/parallel_compiler.ex @@ -15,235 +15,314 @@ defmodule Kernel.ParallelCompiler do is set to `true` and there is a warning, this function will fail with an exception. - This function receives a set of callbacks as options: + This function accepts the following options: * `:each_file` - for each file compiled, invokes the callback passing the file + * `:each_long_compilation` - for each file that takes more than a given + timeout (see the `:long_compilation_threshold` option) to compile, invoke + this callback passing the file as its argument + + * `:long_compilation_threshold` - the timeout (in seconds) after the + `:each_long_compilation` callback is invoked; defaults to `10` + * `:each_module` - for each module compiled, invokes the callback passing the file, module and the module bytecode - The compiler doesn't care about the return values of the callbacks. + * `:dest` - the destination directory for the BEAM files. When using `files/2`, + this information is only used to properly annotate the BEAM files before + they are loaded into memory. If you want a file to actually be written to + `dest`, use `files_to_path/3` instead. + Returns the modules generated by each compiled file. """ - def files(files, callbacks \\ []) + def files(files, options \\ []) - def files(files, callbacks) when is_list(callbacks) do - spawn_compilers(files, nil, callbacks) + def files(files, options) when is_list(options) do + spawn_compilers(files, nil, options) end @doc """ Compiles the given files to the given path. Read `files/2` for more information. """ - def files_to_path(files, path, callbacks \\ []) + def files_to_path(files, path, options \\ []) - def files_to_path(files, path, callbacks) when is_binary(path) and is_list(callbacks) do - spawn_compilers(files, path, callbacks) + def files_to_path(files, path, options) when is_binary(path) and is_list(options) do + spawn_compilers(files, path, options) end - defp spawn_compilers(files, path, callbacks) do - Code.ensure_loaded(Kernel.ErrorHandler) + defp spawn_compilers(files, path, options) do + true = Code.ensure_loaded?(Kernel.ErrorHandler) compiler_pid = self() :elixir_code_server.cast({:reset_warnings, compiler_pid}) schedulers = max(:erlang.system_info(:schedulers_online), 2) - result = spawn_compilers(files, files, path, callbacks, [], [], schedulers, []) + result = spawn_compilers(%{ + entries: files, + original: files, + output: path, + options: options, + waiting: [], + queued: [], + schedulers: schedulers, + result: [], + }) # In case --warning-as-errors is enabled and there was a warning, - # compilation status will be set to error and we fail with CompileError + # compilation status will be set to error. case :elixir_code_server.call({:compilation_status, compiler_pid}) do - :ok -> result - :error -> exit(1) + :ok -> + result + :error -> + IO.puts :stderr, "Compilation failed due to warnings while using the --warnings-as-errors option" + exit({:shutdown, 1}) end end - # We already have 4 currently running, don't spawn new ones - defp spawn_compilers(entries, original, output, callbacks, waiting, queued, schedulers, result) when - length(queued) - length(waiting) >= schedulers do - wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result) + # We already have n=schedulers currently running, don't spawn new ones + defp spawn_compilers(%{queued: queued, waiting: waiting, schedulers: schedulers} = state) + when length(queued) - length(waiting) >= schedulers do + wait_for_messages(state) end # Release waiting processes - defp spawn_compilers([h|t], original, output, callbacks, waiting, queued, schedulers, result) when is_pid(h) do - {_kind, ^h, ref, _module} = List.keyfind(waiting, h, 1) - send h, {ref, :ready} - waiting = List.keydelete(waiting, h, 1) - spawn_compilers(t, original, output, callbacks, waiting, queued, schedulers, result) + defp spawn_compilers(%{entries: [{ref, found} | t], waiting: waiting} = state) do + waiting = + case List.keytake(waiting, ref, 2) do + {{_kind, pid, ^ref, _on, _defining}, waiting} -> + send pid, {ref, found} + waiting + nil -> + waiting + end + spawn_compilers(%{state | entries: t, waiting: waiting}) end - # Spawn a compiler for each file in the list until we reach the limit - defp spawn_compilers([h|t], original, output, callbacks, waiting, queued, schedulers, result) do + defp spawn_compilers(%{entries: [file | files], queued: queued, output: output, options: options} = state) do parent = self() {pid, ref} = :erlang.spawn_monitor fn -> - # Notify Code.ensure_compiled/2 that we should - # attempt to compile the module by doing a dispatch. - :erlang.put(:elixir_ensure_compiled, true) - # Set the elixir_compiler_pid used by our custom Kernel.ErrorHandler. :erlang.put(:elixir_compiler_pid, parent) + :erlang.put(:elixir_compiler_file, file) :erlang.process_flag(:error_handler, Kernel.ErrorHandler) exit(try do - if output do - :elixir_compiler.file_to_path(h, output) + _ = if output do + :elixir_compiler.file_to_path(file, output) else - :elixir_compiler.file(h) + :elixir_compiler.file(file, Keyword.get(options, :dest)) end - {:compiled, h} + {:shutdown, file} catch kind, reason -> {:failure, kind, reason, System.stacktrace} end) end - spawn_compilers(t, original, output, callbacks, waiting, - [{pid, ref, h}|queued], schedulers, result) + timeout = Keyword.get(options, :long_compilation_threshold, 10) * 1_000 + timer_ref = Process.send_after(self(), {:timed_out, pid}, timeout) + + new_queued = [{pid, ref, file, timer_ref} | queued] + spawn_compilers(%{state | entries: files, queued: new_queued}) end # No more files, nothing waiting, queue is empty, we are done - defp spawn_compilers([], _original, _output, _callbacks, [], [], _schedulers, result) do + defp spawn_compilers(%{entries: [], waiting: [], queued: [], result: result}) do for {:module, mod} <- result, do: mod end # Queued x, waiting for x: POSSIBLE ERROR! Release processes so we get the failures - defp spawn_compilers([], original, output, callbacks, waiting, queued, schedulers, result) when length(waiting) == length(queued) do - Enum.each queued, fn {child, _, _} -> - {_kind, ^child, ref, _module} = List.keyfind(waiting, child, 1) - send child, {ref, :release} + defp spawn_compilers(%{entries: [], waiting: waiting, queued: queued} = state) when length(waiting) == length(queued) do + entries = for {pid, _, _, _} <- queued, + entry = waiting_on_without_definition(waiting, pid), + {_, _, ref, on, _} = entry, + do: {on, {ref, :not_found}} + + # Instead of releasing all files at once, we release them in groups + # based on the module they are waiting on. We pick the module being + # depended on with less edges, as it is the mostly likely source of + # error (for example, someone made a typo). This may not always be + # true though: for example, if there is a macro injecting code into + # multiple modules and such code becomes faulty, now multiple modules + # are waiting on the same module required by the faulty code. However, + # since we need to pick something to be first, the one with fewer edges + # sounds like a sane choice. + entries + |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) + |> Enum.sort_by(&length(elem(&1, 1))) + |> case do + [{_on, refs} | _] -> spawn_compilers(%{state | entries: refs}) + [] -> handle_deadlock(waiting, queued) end - wait_for_messages([], original, output, callbacks, waiting, queued, schedulers, result) end # No more files, but queue and waiting are not full or do not match - defp spawn_compilers([], original, output, callbacks, waiting, queued, schedulers, result) do - wait_for_messages([], original, output, callbacks, waiting, queued, schedulers, result) + defp spawn_compilers(%{entries: []} = state) do + wait_for_messages(state) + end + + defp waiting_on_without_definition(waiting, pid) do + {_, ^pid, _, on, _} = entry = List.keyfind(waiting, pid, 1) + if Enum.any?(waiting, fn {_, _, _, _, defining} -> on in defining end) do + nil + else + entry + end end # Wait for messages from child processes - defp wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result) do + defp wait_for_messages(state) do + %{entries: entries, options: options, waiting: waiting, queued: queued, result: result} = state + receive do {:struct_available, module} -> - available = for {:struct, pid, _, waiting_module} <- waiting, + available = for {:struct, _, ref, waiting_module, _defining} <- waiting, module == waiting_module, - not pid in entries, - do: pid + do: {ref, :found} - spawn_compilers(available ++ entries, original, output, callbacks, - waiting, queued, schedulers, [{:struct, module}|result]) + spawn_compilers(%{state | entries: available ++ entries, result: [{:struct, module} | result]}) {:module_available, child, ref, file, module, binary} -> - if callback = Keyword.get(callbacks, :each_module) do + if callback = Keyword.get(options, :each_module) do callback.(file, module, binary) end # Release the module loader which is waiting for an ack send child, {ref, :ack} - available = for {_kind, pid, _, waiting_module} <- waiting, + available = for {:module, _, ref, waiting_module, _defining} <- waiting, module == waiting_module, - not pid in entries, - do: pid + do: {ref, :found} - spawn_compilers(available ++ entries, original, output, callbacks, - waiting, queued, schedulers, [{:module, module}|result]) + cancel_waiting_timer(queued, child) - {:waiting, kind, child, ref, on} -> - defined = fn {k, m} -> on == m and k in [kind, :module] end + spawn_compilers(%{state | entries: available ++ entries, result: [{:module, module} | result]}) + {:waiting, kind, child, ref, on, defining} -> # Oops, we already got it, do not put it on waiting. - if :lists.any(defined, result) do - send child, {ref, :ready} - else - waiting = [{kind, child, ref, on}|waiting] - end + # Alternatively, we're waiting on ourselves, + # send :found so that we can crash with a better error. + waiting = + if :lists.any(&match?({^kind, ^on}, &1), result) or on in defining do + send child, {ref, :found} + waiting + else + [{kind, child, ref, on, defining} | waiting] + end - spawn_compilers(entries, original, output, callbacks, waiting, queued, schedulers, result) + spawn_compilers(%{state | waiting: waiting}) + + {:timed_out, child} -> + callback = Keyword.get(options, :each_long_compilation) + case List.keyfind(queued, child, 0) do + {^child, _, file, _} when not is_nil(callback) -> + callback.(file) + _ -> + :ok + end + spawn_compilers(state) - {:DOWN, _down_ref, :process, down_pid, {:compiled, file}} -> - if callback = Keyword.get(callbacks, :each_file) do + {:DOWN, _down_ref, :process, down_pid, {:shutdown, file}} -> + if callback = Keyword.get(options, :each_file) do callback.(file) end + cancel_waiting_timer(queued, down_pid) + # Sometimes we may have spurious entries in the waiting # list because someone invoked try/rescue UndefinedFunctionError new_entries = List.delete(entries, down_pid) new_queued = List.keydelete(queued, down_pid, 0) new_waiting = List.keydelete(waiting, down_pid, 1) - spawn_compilers(new_entries, original, output, callbacks, new_waiting, new_queued, schedulers, result) + spawn_compilers(%{state | entries: new_entries, waiting: new_waiting, queued: new_queued}) {:DOWN, down_ref, :process, _down_pid, reason} -> - handle_failure(down_ref, reason, entries, waiting, queued) - wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result) + handle_failure(down_ref, reason, queued) + wait_for_messages(state) end end - defp handle_failure(ref, reason, entries, waiting, queued) do + defp handle_deadlock(waiting, queued) do + deadlock = + for {pid, _, file, _} <- queued do + {:current_stacktrace, stacktrace} = Process.info(pid, :current_stacktrace) + Process.exit(pid, :kill) + + {_kind, ^pid, _, on, _} = List.keyfind(waiting, pid, 1) + error = CompileError.exception(description: "deadlocked waiting on module #{inspect on}", + file: nil, line: nil) + print_failure(file, {:failure, :error, error, stacktrace}) + + {file, on} + end + + IO.puts """ + + Compilation failed because of a deadlock between files. + The following files depended on the following modules: + """ + + max = + deadlock + |> Enum.map(& &1 |> elem(0) |> String.length) + |> Enum.max + + for {file, mod} <- deadlock do + IO.puts [" ", String.pad_leading(file, max), " => " | inspect(mod)] + end + + IO.puts "" + exit({:shutdown, 1}) + end + + defp handle_failure(ref, reason, queued) do if file = find_failure(ref, queued) do print_failure(file, reason) - if all_missing?(entries, waiting, queued) do - collect_failures(queued, length(queued) - 1) + for {pid, _, _, _} <- queued do + Process.exit(pid, :kill) end - exit(1) + exit({:shutdown, 1}) end end defp find_failure(ref, queued) do case List.keyfind(queued, ref, 1) do - {_child, ^ref, file} -> file + {_child, ^ref, file, _timer_ref} -> file _ -> nil end end - defp print_failure(_file, {:compiled, _}) do + defp print_failure(_file, {:shutdown, _}) do :ok end defp print_failure(file, {:failure, kind, reason, stacktrace}) do - IO.puts "\n== Compilation error on file #{Path.relative_to_cwd(file)} ==" - IO.puts Exception.format(kind, reason, prune_stacktrace(stacktrace)) + IO.write ["\n== Compilation error in file #{Path.relative_to_cwd(file)} ==\n", + Kernel.CLI.format_error(kind, reason, stacktrace)] end defp print_failure(file, reason) do - IO.puts "\n== Compilation error on file #{Path.relative_to_cwd(file)} ==" - IO.puts Exception.format(:exit, reason, []) - end - - @elixir_internals [:elixir_compiler, :elixir_module, :elixir_translator, :elixir_expand] - - defp prune_stacktrace([{mod, _, _, _}|t]) when mod in @elixir_internals do - prune_stacktrace(t) + IO.write ["\n== Compilation error in file #{Path.relative_to_cwd(file)} ==\n", + Kernel.CLI.print_error(:exit, reason, [])] end - defp prune_stacktrace([h|t]) do - [h|prune_stacktrace(t)] - end - - defp prune_stacktrace([]) do - [] - end - - defp all_missing?(entries, waiting, queued) do - entries == [] and waiting != [] and - length(waiting) == length(queued) - end - - defp collect_failures(_queued, 0), do: :ok - - defp collect_failures(queued, remaining) do - receive do - {:DOWN, down_ref, :process, _down_pid, reason} -> - if file = find_failure(down_ref, queued) do - print_failure(file, reason) - collect_failures(queued, remaining - 1) - else - collect_failures(queued, remaining) + defp cancel_waiting_timer(queued, child_pid) do + case List.keyfind(queued, child_pid, 0) do + {^child_pid, _ref, _file, timer_ref} -> + Process.cancel_timer(timer_ref) + # Let's flush the message in case it arrived before we canceled the + # timeout. + receive do + {:timed_out, ^child_pid} -> :ok + after + 0 -> :ok end - after - # Give up if no failure appears in 5 seconds - 5000 -> :ok + nil -> + :ok end end end diff --git a/lib/elixir/lib/kernel/parallel_require.ex b/lib/elixir/lib/kernel/parallel_require.ex index a9132618b33..5ea5999b11c 100644 --- a/lib/elixir/lib/kernel/parallel_require.ex +++ b/lib/elixir/lib/kernel/parallel_require.ex @@ -3,78 +3,107 @@ defmodule Kernel.ParallelRequire do A module responsible for requiring files in parallel. """ - defmacrop default_callback, do: quote(do: fn x -> x end) - @doc """ Requires the given files. - A callback that is invoked every time a file is required - can be optionally given as argument. + A callback that will be invoked with each file, or a keyword list of `callbacks` can be provided: + + * `:each_file` - invoked with each file + + * `:each_module` - invoked with file, module name, and binary code Returns the modules generated by each required file. """ - def files(files, callback \\ default_callback) do - schedulers = max(:erlang.system_info(:schedulers_online), 2) - spawn_requires(files, [], callback, schedulers, []) + def files(files, callbacks \\ []) + + def files(files, callback) when is_function(callback, 1) do + files(files, [each_file: callback]) end - defp spawn_requires([], [], _callback, _schedulers, result), do: result + def files(files, callbacks) when is_list(callbacks) do + compiler_pid = self() + :elixir_code_server.cast({:reset_warnings, compiler_pid}) + schedulers = max(:erlang.system_info(:schedulers_online), 2) + result = spawn_requires(files, [], callbacks, schedulers, []) - defp spawn_requires([], waiting, callback, schedulers, result) do - wait_for_messages([], waiting, callback, schedulers, result) + # In case --warning-as-errors is enabled and there was a warning, + # compilation status will be set to error. + case :elixir_code_server.call({:compilation_status, compiler_pid}) do + :ok -> + result + :error -> + IO.puts :stderr, "\nExecution failed due to warnings while using the --warnings-as-errors option" + exit({:shutdown, 1}) + end end - defp spawn_requires(files, waiting, callback, schedulers, result) when length(waiting) >= schedulers do - wait_for_messages(files, waiting, callback, schedulers, result) - end + defp spawn_requires([], [], _callbacks, _schedulers, result), do: result - defp spawn_requires([h|t], waiting, callback, schedulers, result) do - parent = self + defp spawn_requires([], waiting, callbacks, schedulers, result) do + wait_for_messages([], waiting, callbacks, schedulers, result) + end - compiler_pid = :erlang.get(:elixir_compiler_pid) - ensure_compiled = :erlang.get(:elixir_ensure_compiled) - {:error_handler, handler} = :erlang.process_info(parent, :error_handler) + defp spawn_requires(files, waiting, callbacks, schedulers, result) when length(waiting) >= schedulers do + wait_for_messages(files, waiting, callbacks, schedulers, result) + end + defp spawn_requires([file | files], waiting, callbacks, schedulers, result) do + parent = self() {pid, ref} = :erlang.spawn_monitor fn -> - if compiler_pid != :undefined do - :erlang.put(:elixir_compiler_pid, compiler_pid) - end - - if ensure_compiled != :undefined do - :erlang.put(:elixir_ensure_compiled, ensure_compiled) - end - - :erlang.process_flag(:error_handler, handler) + :erlang.put(:elixir_compiler_pid, parent) + :erlang.put(:elixir_compiler_file, file) exit(try do - new = Code.require_file(h) || [] - {:required, Enum.map(new, &elem(&1, 0)), h} + new = Code.require_file(file) || [] + {:required, Enum.map(new, &elem(&1, 0)), file} catch kind, reason -> {:failure, kind, reason, System.stacktrace} end) end - spawn_requires(t, [{pid, ref}|waiting], callback, schedulers, result) + spawn_requires(files, [{pid, ref} | waiting], callbacks, schedulers, result) end - defp wait_for_messages(files, waiting, callback, schedulers, result) do + defp wait_for_messages(files, waiting, callbacks, schedulers, result) do receive do {:DOWN, ref, :process, pid, status} -> tuple = {pid, ref} if tuple in waiting do + waiting = List.delete(waiting, tuple) + case status do {:required, mods, file} -> - callback.(file) - result = mods ++ result - waiting = List.delete(waiting, tuple) + if each_file_callback = callbacks[:each_file] do + each_file_callback.(file) + end + + spawn_requires(files, waiting, callbacks, schedulers, mods ++ result) + {:failure, kind, reason, stacktrace} -> :erlang.raise(kind, reason, stacktrace) + other -> :erlang.raise(:exit, other, []) end + else + spawn_requires(files, waiting, callbacks, schedulers, result) end - spawn_requires(files, waiting, callback, schedulers, result) + + {:module_available, child, ref, file, module, binary} -> + if each_module_callback = callbacks[:each_module] do + each_module_callback.(file, module, binary) + end + + send(child, {ref, :ack}) + spawn_requires(files, waiting, callbacks, schedulers, result) + + {:struct_available, _} -> + spawn_requires(files, waiting, callbacks, schedulers, result) + + {:waiting, _, child, ref, _, _} -> + send(child, {ref, :not_found}) + spawn_requires(files, waiting, callbacks, schedulers, result) end end end diff --git a/lib/elixir/lib/kernel/special_forms.ex b/lib/elixir/lib/kernel/special_forms.ex index b3b97bcf8b1..d0ab17a0f58 100644 --- a/lib/elixir/lib/kernel/special_forms.ex +++ b/lib/elixir/lib/kernel/special_forms.ex @@ -1,106 +1,75 @@ defmodule Kernel.SpecialForms do @moduledoc """ - In this module we define Elixir special forms. Special forms - cannot be overridden by the developer and are the basic - building blocks of Elixir code. + Special forms are the basic building blocks of Elixir, and therefore + cannot be overridden by the developer. - Some of those forms are lexical (like `alias`, `case`, etc). - The macros `{}` and `<<>>` are also special forms used to define - tuple and binary data structures respectively. + We define them in this module. Some of these forms are lexical (like + `alias/2`, `case/2`, etc). The macros `{}` and `<<>>` are also special + forms used to define tuple and binary data structures respectively. - This module also documents Elixir's pseudo variables (`__ENV__`, - `__MODULE__`, `__DIR__` and `__CALLER__`). Pseudo variables return - information about Elixir's compilation environment and can only - be read, never assigned to. + This module also documents macros that return information about Elixir's + compilation environment, such as (`__ENV__/0`, `__MODULE__/0`, `__DIR__/0` and `__CALLER__/0`). - Finally, it also documents 2 special forms, `__block__` and - `__aliases__`, which are not intended to be called directly by the + Finally, it also documents two special forms, `__block__/1` and + `__aliases__/1`, which are not intended to be called directly by the developer but they appear in quoted contents since they are essential in Elixir's constructs. """ + defmacrop error!(args) do + quote do + _ = unquote(args) + message = "Elixir's special forms are expanded by the compiler and must not be invoked directly" + :erlang.error(RuntimeError.exception(message)) + end + end + @doc """ Creates a tuple. - Only two item tuples are considered literals in Elixir. - Therefore all other tuples are represented in the AST - as a call to the special form `:{}`. + More information about the tuple data type and about functions to manipulate + tuples can be found in the `Tuple` module; some functions for working with + tuples are also available in `Kernel` (such as `Kernel.elem/2` or + `Kernel.tuple_size/1`). - Conveniences for manipulating tuples can be found in the - `Tuple` module. Some functions for working with tuples are - also available in `Kernel`, namely `Kernel.elem/2`, - `Kernel.put_elem/3` and `Kernel.tuple_size/1`. + ## AST representation - ## Examples + Only two-item tuples are considered literals in Elixir and return themselves + when quoted. Therefore, all other tuples are represented in the AST as calls to + the `:{}` special form. - iex> {1, 2, 3} - {1, 2, 3} + iex> quote do + ...> {1, 2} + ...> end + {1, 2} - iex> quote do: {1, 2, 3} - {:{}, [], [1,2,3]} + iex> quote do + ...> {1, 2, 3} + ...> end + {:{}, [], [1, 2, 3]} """ - defmacro unquote(:{})(args) + defmacro unquote(:{})(args), do: error!([args]) @doc """ Creates a map. - Maps are key-value stores where keys are compared - using the match operator (`===`). Maps can be created with - the `%{}` special form where keys are associated via `=>`: - - %{1 => 2} - - Maps also support the keyword notation, as other special forms, - as long as they are at the end of the argument list: - - %{hello: :world, with: :keywords} - %{:hello => :world, with: :keywords} - - If a map has duplicated keys, the last key will always have - higher precedence: - - iex> %{a: :b, a: :c} - %{a: :c} - - Conveniences for manipulating maps can be found in the - `Map` module. - - ## Access syntax - - Besides the access functions available in the `Map` module, - like `Map.get/3` and `Map.fetch/2`, a map can be accessed using the - `.` operator: - - iex> map = %{a: :b} - iex> map.a - :b - - Note that the `.` operator expects the field to exist in the map. - If not, an `ArgumentError` is raised. - - ## Update syntax - - Maps also support an update syntax: - - iex> map = %{:a => :b} - iex> %{map | :a => :c} - %{:a => :c} - - Notice the update syntax requires the given keys to exist. - Trying to update a key that does not exist will raise an `ArgumentError`. + See the `Map` module for more information about maps, their syntax, and ways to + access and manipulate them. ## AST representation - Regardless if `=>` or the keywords syntax is used, Maps are - always represented internally as a list of two-items tuples - for simplicity: + Regardless of whether `=>` or the keyword syntax is used, key-value pairs in + maps are always represented internally as a list of two-element tuples for + simplicity: - iex> quote do: %{:a => :b, c: :d} - {:%{}, [], [{:a, :b}, {:c, :d}]} + iex> quote do + ...> %{"a" => :b, c: :d} + ...> end + {:%{}, [], [{"a", :b}, {:c, :d}]} """ - defmacro unquote(:%{})(args) + defmacro unquote(:%{})(args), do: error!([args]) @doc """ Creates a struct. @@ -109,43 +78,26 @@ defmodule Kernel.SpecialForms do default values for keys, tags to be used in polymorphic dispatches and compile time assertions. - To define a struct, you just need to implement the `__struct__/0` - function in a module: - - defmodule User do - def __struct__ do - %{name: "josé", age: 27} - end - end - - In practice though, structs are usually defined with the - `Kernel.defstruct/1` macro: + Structs are usually defined with the `Kernel.defstruct/1` macro: defmodule User do - defstruct name: "josé", age: 27 + defstruct name: "john", age: 27 end Now a struct can be created as follows: %User{} - Underneath a struct is just a map with a `__struct__` field + Underneath a struct is just a map with a `:__struct__` key pointing to the `User` module: - %User{} == %{__struct__: User, name: "josé", age: 27} + %User{} == %{__struct__: User, name: "john", age: 27} A struct also validates that the given keys are part of the defined struct. The example below will fail because there is no key `:full_name` in the `User` struct: - %User{full_name: "José Valim"} - - Note that a struct specifies a minimum set of keys required - for operations. Other keys can be added to structs via the - regular map operations: - - user = %User{} - Map.put(user, :a_non_struct_key, :value) + %User{full_name: "john doe"} An update operation specific for structs is also available: @@ -159,147 +111,212 @@ defmodule Kernel.SpecialForms do any of the protocols implemented for maps. Check `Kernel.defprotocol/2` for more information on how structs can be used with protocols for polymorphic dispatch. Also - see `Kernel.struct/2` for examples on how to create and update - structs dynamically. + see `Kernel.struct/2` and `Kernel.struct!/2` for examples on + how to create and update structs dynamically. """ - defmacro unquote(:%)(struct, map) + defmacro unquote(:%)(struct, map), do: error!([struct, map]) @doc """ Defines a new bitstring. ## Examples - iex> << 1, 2, 3 >> - << 1, 2, 3 >> + iex> <<1, 2, 3>> + <<1, 2, 3>> + + ## Types - ## Bitstring types + A bitstring is made of many segments and each segment has a + type. There are 9 types used in bitstrings: - A bitstring is made of many segments. Each segment has a - type, which defaults to integer: + - `integer` + - `float` + - `bits` (alias for `bitstring`) + - `bitstring` + - `binary` + - `bytes` (alias for `binary`) + - `utf8` + - `utf16` + - `utf32` + + When no type is specified, the default is `integer`: iex> <<1, 2, 3>> <<1, 2, 3>> Elixir also accepts by default the segment to be a literal - string or a literal char list, which are by expanded to integers: + string or a literal charlist, which are by default expanded to integers: iex> <<0, "foo">> <<0, 102, 111, 111>> - Any other type needs to be explicitly tagged. For example, - in order to store a float type in the binary, one has to do: - - iex> <<3.14 :: float>> - <<64, 9, 30, 184, 81, 235, 133, 31>> - - This also means that variables need to be explicitly tagged, - otherwise Elixir defaults to integer: + Variables or any other type need to be explicitly tagged: iex> rest = "oo" iex> <<102, rest>> ** (ArgumentError) argument error - We can solve this by explicitly tagging it as a binary: + We can solve this by explicitly tagging it as `binary`: - <<102, rest :: binary>> + iex> rest = "oo" + iex> <<102, rest::binary>> + "foo" - The type can be integer, float, bitstring/bits, binary/bytes, - utf8, utf16 or utf32, e.g.: + The `utf8`, `utf16`, and `utf32` types are for Unicode codepoints. They + can also be applied to literal strings and charlists: - <<102 :: float, rest :: binary>> + iex> <<"foo"::utf16>> + <<0, 102, 0, 111, 0, 111>> + iex> <<"foo"::utf32>> + <<0, 0, 0, 102, 0, 0, 0, 111, 0, 0, 0, 111>> - An integer can be any arbitrary precision integer. A float is an - IEEE 754 binary32 or binary64 floating point number. A bitstring - is an arbitrary series of bits. A binary is a special case of - bitstring that has a total size divisible by 8. + ## Options - The utf8, utf16, and utf32 types are for unicode codepoints. They - can also be applied to literal strings and char lists: + Many options can be given by using `-` as separator. Order is + arbitrary, so the following are all equivalent: - iex> <<"foo" :: utf16>> - <<0,102,0,111,0,111>> + <<102::integer-native, rest::binary>> + <<102::native-integer, rest::binary>> + <<102::unsigned-big-integer, rest::binary>> + <<102::unsigned-big-integer-size(8), rest::binary>> + <<102::unsigned-big-integer-8, rest::binary>> + <<102::8-integer-big-unsigned, rest::binary>> + <<102, rest::binary>> - The bits type is an alias for bitstring. The bytes type is an - alias for binary. + ### Unit and Size - The signedness can also be given as signed or unsigned. The - signedness only matters for matching and relevant only for - integers. If unspecified, it defaults to unsigned. Example: + The length of the match is equal to the `unit` (a number of bits) times the + `size` (the number of repeated segments of length `unit`). - iex> <<-100 :: signed, _rest :: binary>> = <<-100, "foo">> - <<156,102,111,111>> + Type | Default Unit + --------- | ------------ + `integer` | 1 bit + `float` | 1 bit + `binary` | 8 bits - This match would have failed if we did not specify that the - value -100 is signed. If we're matching into a variable instead - of a value, the signedness won't be checked; rather, the number - will simply be interpreted as having the given (or implied) - signedness, e.g.: + Sizes for types are a bit more nuanced. The default size for integers is 8. - iex> <> = <<-100, "foo">> - iex> val - 156 + For floats, it is 64. For floats, `size * unit` must result in 32 or 64, + corresponding to [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point) + binary32 and binary64, respectively. - Here, `val` is interpreted as unsigned. + For binaries, the default is the size of the binary. Only the last binary in a + match can use the default size. All others must have their size specified + explicitly, even if the match is unambiguous. For example: - The endianness of a segment can be big, little or native (the - latter meaning it will be resolved at VM load time). Passing - many options can be done by giving a list: + iex> <> = <<"Frank the Walrus">> + "Frank the Walrus" + iex> {name, species} + {"Frank", "Walrus"} - <<102 :: [integer, native], rest :: binary>> + Failing to specify the size for the non-last causes compilation to fail: - Or: + <> = <<"Frank the Walrus">> + ** (CompileError): a binary field without size is only allowed at the end of a binary pattern - <<102 :: [unsigned, big, integer], rest :: binary>> + #### Shortcut Syntax - And so on. + Size and unit can also be specified using a syntax shortcut + when passing integer values: - Endianness only makes sense for integers and some UTF code - point types (utf16 and utf32). + iex> x = 1 + iex> <> == <> + true + iex> <> == <> + true - Finally, we can also specify size and unit for each segment. The - unit is multiplied by the size to give the effective size of - the segment in bits. The default unit for integers, floats, - and bitstrings is 1. For binaries, it is 8. + This syntax reflects the fact the effective size is given by + multiplying the size by the unit. - Since integers are default, the default unit is 1. The example below - matches because the string "foo" takes 24 bits and we match it - against a segment of 24 bits, 8 of which are taken by the integer - 102 and the remaining 16 bits are specified on the rest. + ### Modifiers - iex> <<102, _rest :: size(16)>> = "foo" - "foo" + Some types have associated modifiers to clear up ambiguity in byte + representation. - We can also match by specifying size and unit explicitly: + Modifier | Relevant Type(s) + -------------------- | ---------------- + `signed` | `integer` + `unsigned` (default) | `integer` + `little` | `integer`, `float`, `utf16`, `utf32` + `big` (default) | `integer`, `float`, `utf16`, `utf32` + `native` | `integer`, `utf16`, `utf32` - iex> <<102, _rest :: [size(2), unit(8)]>> = "foo" - "foo" + ### Sign + + Integers can be `signed` or `unsigned`, defaulting to `unsigned`. - However, if we expect a size of 32, it won't match: + iex> <> = <<-100>> + <<156>> + iex> int + 156 + iex> <> = <<-100>> + <<156>> + iex> int + -100 - iex> <<102, _rest :: size(32)>> = "foo" - ** (MatchError) no match of right hand side value: "foo" + `signed` and `unsigned` are only used for matching binaries (see below) and + are only used for integers. - Size and unit are not applicable to utf8, utf16, and utf32. + iex> <<-100::signed, _rest::binary>> = <<-100, "foo">> + <<156, 102, 111, 111>> - The default size for integers is 8. For floats, it is 64. For - binaries, it is the size of the binary. Only the last binary - in a binary match can use the default size (all others must - have their size specified explicitly). + ### Endianness - Size can also be specified using a syntax shortcut. Instead of - writing `size(8)`, one can write just `8` and it will be interpreted - as `size(8)` + Elixir has three options for endianness: `big`, `little`, and `native`. + The default is `big`: - iex> << 1 :: 3 >> == << 1 :: size(3) >> - true + iex> <> = <<0, 1>> + <<0, 1>> + iex> number + 256 + iex> <> = <<0, 1>> + <<0, 1>> + iex> number + 1 + + `native` is determined by the VM at startup and will depend on the + host operating system. + + ## Binary/Bitstring Matching + + Binary matching is a powerful feature in Elixir that is useful for extracting + information from binaries as well as pattern matching. + + Binary matching can be used by itself to extract information from binaries: + + iex> <<"Hello, ", place::binary>> = "Hello, World" + "Hello, World" + iex> place + "World" + + Or as a part of function definitions to pattern match: + + defmodule ImageTyper + @png_signature <<137::size(8), 80::size(8), 78::size(8), 71::size(8), + 13::size(8), 10::size(8), 26::size(8), 10::size(8)>> + @jpg_signature <<255::size(8), 216::size(8)>> + + def type(<<@png_signature, rest::binary>>), do: :png + def type(<<@jpg_signature, rest::binary>>), do: :jpg + def type(_), do :unknown + end + + ### Performance & Optimizations + + The Erlang compiler can provide a number of optimizations on binary creation + and matching. To see optimization output, set the `bin_opt_info` compiler + option: - For floats, `size * unit` must result in 32 or 64, corresponding - to binary32 and binary64, respectively. + ERL_COMPILER_OPTIONS=bin_opt_info mix compile + + To learn more about specific optimizations and performance considerations, + check out + [Erlang's Efficiency Guide on handling binaries](http://www.erlang.org/doc/efficiency_guide/binaryhandling.html). """ - defmacro unquote(:<<>>)(args) + defmacro unquote(:<<>>)(args), do: error!([args]) @doc """ - Defines a remote call or an alias. + Defines a remote call, a call to an anonymous function, or an alias. The dot (`.`) in Elixir can be used for remote calls: @@ -307,8 +324,16 @@ defmodule Kernel.SpecialForms do "foo" In this example above, we have used `.` to invoke `downcase` in the - `String` alias, passing "FOO" as argument. We can also use the dot - for creating aliases: + `String` module, passing `"FOO"` as argument. + + The dot may be used to invoke anonymous functions too: + + iex> (fn(n) -> n end).(7) + 7 + + in which case there is a function on the left hand side. + + We can also use the dot for creating aliases: iex> Hello.World Hello.World @@ -326,41 +351,29 @@ defmodule Kernel.SpecialForms do iex> Kernel.Sample Kernel.Sample - iex> Kernel.length([1,2,3]) + iex> Kernel.length([1, 2, 3]) 3 iex> Kernel.+(1, 2) 3 - iex> Kernel."length"([1,2,3]) + iex> Kernel."length"([1, 2, 3]) 3 iex> Kernel.'+'(1, 2) 3 - Note that `Kernel."HELLO"` will be treated as a remote call and not an alias. + Note that `Kernel."FUNCTION_NAME"` will be treated as a remote call and not an alias. This choice was done so every time single- or double-quotes are used, we have - a remote call irregardless of the quote contents. This decision is also reflected + a remote call regardless of the quote contents. This decision is also reflected in the quoted expressions discussed below. - ## Runtime (dynamic) behaviour - - The result returned by `.` is always specified by the right-side: - - iex> x = String - iex> x.downcase("FOO") - "foo" - iex> x.Sample - String.Sample - - In case the right-side is also dynamic, `.`'s behaviour can be reproduced - at runtime via `apply/3` and `Module.concat/2`: - - iex> apply(:erlang, :+, [1,2]) - 3 + When the dot is used to invoke an anonymous function there is only one + operand, but it is still written using a postfix notation: - iex> Module.concat(Kernel, Sample) - Kernel.Sample + iex> negate = fn(n) -> -n end + iex> negate.(7) + -7 ## Quoted expression @@ -368,7 +381,9 @@ defmodule Kernel.SpecialForms do forms. When the right side starts with a lowercase letter (or underscore): - iex> quote do: String.downcase("FOO") + iex> quote do + ...> String.downcase("FOO") + ...> end {{:., [], [{:__aliases__, [alias: false], [:String]}, :downcase]}, [], ["FOO"]} Notice we have an inner tuple, containing the atom `:.` representing @@ -378,22 +393,35 @@ defmodule Kernel.SpecialForms do This tuple follows the general quoted expression structure in Elixir, with the name as first argument, some keyword list as metadata as second, - and the number of arguments as third. In this case, the arguments is the - alias `String` and the atom `:downcase`. The second argument is **always** - an atom: + and the list of arguments as third. In this case, the arguments are the + alias `String` and the atom `:downcase`. The second argument in a remote call + is **always** an atom regardless of the literal used in the call: - iex> quote do: String."downcase"("FOO") + iex> quote do + ...> String."downcase"("FOO") + ...> end {{:., [], [{:__aliases__, [alias: false], [:String]}, :downcase]}, [], ["FOO"]} The tuple containing `:.` is wrapped in another tuple, which actually represents the function call, and has `"FOO"` as argument. + In the case of calls to anonymous functions, the inner tuple with the dot + special form has only one argument, reflecting the fact that the operator is + unary: + + iex> quote do + ...> negate.(0) + ...> end + {{:., [], [{:negate, [], __MODULE__}]}, [], [0]} + When the right side is an alias (i.e. starts with uppercase), we get instead: - iex> quote do: Hello.World + iex> quote do + ...> Hello.World + ...> end {:__aliases__, [alias: false], [:Hello, :World]} - We got into more details about aliases in the `__aliases__` special form + We go into more details about aliases in the `__aliases__/1` special form documentation. ## Unquoting @@ -401,27 +429,31 @@ defmodule Kernel.SpecialForms do We can also use unquote to generate a remote call in a quoted expression: iex> x = :downcase - iex> quote do: String.unquote(x)("FOO") + iex> quote do + ...> String.unquote(x)("FOO") + ...> end {{:., [], [{:__aliases__, [alias: false], [:String]}, :downcase]}, [], ["FOO"]} - Similar to `Kernel."HELLO"`, `unquote(x)` will always generate a remote call, + Similar to `Kernel."FUNCTION_NAME"`, `unquote(x)` will always generate a remote call, independent of the value of `x`. To generate an alias via the quoted expression, one needs to rely on `Module.concat/2`: iex> x = Sample - iex> quote do: Module.concat(String, unquote(x)) + iex> quote do + ...> Module.concat(String, unquote(x)) + ...> end {{:., [], [{:__aliases__, [alias: false], [:Module]}, :concat]}, [], [{:__aliases__, [alias: false], [:String]}, Sample]} """ - defmacro unquote(:.)(left, right) + defmacro unquote(:.)(left, right), do: error!([left, right]) @doc """ - `alias` is used to setup aliases, often useful with modules names. + `alias/2` is used to setup aliases, often useful with modules names. ## Examples - `alias` can be used to setup an alias for any module: + `alias/2` can be used to setup an alias for any module: defmodule Math do alias MyKeyword, as: Keyword @@ -446,9 +478,19 @@ defmodule Kernel.SpecialForms do alias Foo.Bar.Baz, as: Baz + We can also alias multiple modules in one line: + + alias Foo.{Bar, Baz, Biz} + + Is the same as: + + alias Foo.Bar + alias Foo.Baz + alias Foo.Biz + ## Lexical scope - `import`, `require` and `alias` are called directives and all + `import/2`, `require/2` and `alias/2` are called directives and all have lexical scope. This means you can set up aliases inside specific functions and it won't affect the overall scope. @@ -462,20 +504,19 @@ defmodule Kernel.SpecialForms do was not explicitly defined. Both warning behaviours could be changed by explicitly - setting the `:warn` option to true or false. + setting the `:warn` option to `true` or `false`. """ - defmacro alias(module, opts) + defmacro alias(module, opts), do: error!([module, opts]) @doc """ - Requires a given module to be compiled and loaded. + Requires a module in order to use its macros. ## Examples - Notice that usually modules should not be required before usage, - the only exception is if you want to use the macros from a module. - In such cases, you need to explicitly require them. + Public functions in modules are globally available, but in order to use + macros, you need to opt-in by requiring the module they are defined in. - Let's suppose you created your own `if` implementation in the module + Let's suppose you created your own `if/2` implementation in the module `MyMacros`. If you want to invoke it, you need to first explicitly require the `MyMacros`: @@ -488,16 +529,16 @@ defmodule Kernel.SpecialForms do ## Alias shortcut - `require` also accepts `as:` as an option so it automatically sets - up an alias. Please check `alias` for more information. + `require/2` also accepts `as:` as an option so it automatically sets + up an alias. Please check `alias/2` for more information. """ - defmacro require(module, opts) + defmacro require(module, opts), do: error!([module, opts]) @doc """ - Imports function and macros from other modules. + Imports functions and macros from other modules. - `import` allows one to easily access functions or macros from + `import/2` allows one to easily access functions or macros from others modules without using the qualified name. ## Examples @@ -508,7 +549,7 @@ defmodule Kernel.SpecialForms do iex> import List iex> flatten([1, [2], 3]) - [1,2,3] + [1, 2, 3] ## Selector @@ -531,26 +572,34 @@ defmodule Kernel.SpecialForms do import List, only: [flatten: 1] import String, except: [split: 2] - Notice that calling `except` for a previously declared `import` + Notice that calling `except` for a previously declared `import/2` simply filters the previously imported elements. For example: - import List, only: [flatten: 1, keyfind: 3] + import List, only: [flatten: 1, keyfind: 4] import List, except: [flatten: 1] - After the two import calls above, only `List.keyfind/3` will be + After the two import calls above, only `List.keyfind/4` will be imported. + ## Underscore functions + + By default functions starting with `_` are not imported. If you really want + to import a function starting with `_` you must explicitly include it in the + `:only` selector. + + import File.Stream, only: [__build__: 3] + ## Lexical scope - It is important to notice that `import` is lexical. This means you + It is important to notice that `import/2` is lexical. This means you can import specific macros inside specific functions: defmodule Math do def some_function do - # 1) Disable `if/2` from Kernel + # 1) Disable "if/2" from Kernel import Kernel, except: [if: 2] - # 2) Require the new `if` macro from MyMacros + # 2) Require the new "if/2" macro from MyMacros import MyMacros # 3) Use the new macro @@ -574,7 +623,7 @@ defmodule Kernel.SpecialForms do was not explicitly defined. Both warning behaviours could be changed by explicitly - setting the `:warn` option to true or false. + setting the `:warn` option to `true` or `false`. ## Ambiguous function/macro names @@ -583,7 +632,7 @@ defmodule Kernel.SpecialForms do if an ambiguous call to `foo/1` is actually made; that is, the errors are emitted lazily, not eagerly. """ - defmacro import(module, opts) + defmacro import(module, opts), do: error!([module, opts]) @doc """ Returns the current environment information as a `Macro.Env` struct. @@ -591,23 +640,23 @@ defmodule Kernel.SpecialForms do In the environment you can access the current filename, line numbers, set up aliases, the current function and others. """ - defmacro __ENV__ + defmacro __ENV__, do: error!([]) @doc """ Returns the current module name as an atom or `nil` otherwise. - Although the module can be accessed in the `__ENV__`, this macro + Although the module can be accessed in the `__ENV__/0`, this macro is a convenient shortcut. """ - defmacro __MODULE__ + defmacro __MODULE__, do: error!([]) @doc """ - Returns the current directory as a binary. + Returns the absolute path of the directory of the current file as a binary. Although the directory can be accessed as `Path.dirname(__ENV__.file)`, this macro is a convenient shortcut. """ - defmacro __DIR__ + defmacro __DIR__, do: error!([]) @doc """ Returns the current calling environment as a `Macro.Env` struct. @@ -615,29 +664,30 @@ defmodule Kernel.SpecialForms do In the environment you can access the filename, line numbers, set up aliases, the function and others. """ - defmacro __CALLER__ + defmacro __CALLER__, do: error!([]) @doc """ - Accesses an already bound variable in match clauses. + Accesses an already bound variable in match clauses. Also known as the pin operator. ## Examples Elixir allows variables to be rebound via static single assignment: iex> x = 1 - iex> x = 2 + iex> x = x + 1 iex> x 2 However, in some situations, it is useful to match against an existing - value, instead of rebinding. This can be done with the `^` special form: + value, instead of rebinding. This can be done with the `^` special form, + colloquially known as the pin operator: iex> x = 1 iex> ^x = List.first([1]) iex> ^x = List.first([2]) ** (MatchError) no match of right hand side value: 2 - Note that `^` always refers to the value of x prior to the match. The + Note that `^x` always refers to the value of `x` prior to the match. The following example will match: iex> x = 0 @@ -646,15 +696,43 @@ defmodule Kernel.SpecialForms do 1 """ - defmacro ^(var) + defmacro ^(var), do: error!([var]) + + @doc """ + Matches the value on the right against the pattern on the left. + """ + defmacro left = right, do: error!([left, right]) + + @doc """ + Used by types and bitstrings to specify types. + + This operator is used in two distinct occasions in Elixir. + It is used in typespecs to specify the type of a variable, + function or of a type itself: + + @type number :: integer | float + @spec add(number, number) :: number + + It may also be used in bit strings to specify the type + of a given bit segment: + + <> = bits + + Read the documentation on the `Typespec` page and + `<<>>/1` for more information on typespecs and + bitstrings respectively. + """ + defmacro left :: right, do: error!([left, right]) @doc ~S""" Gets the representation of any expression. ## Examples - quote do: sum(1, 2, 3) - #=> {:sum, [], [1, 2, 3]} + iex> quote do + ...> sum(1, 2, 3) + ...> end + {:sum, [], [1, 2, 3]} ## Explanation @@ -678,17 +756,21 @@ defmodule Kernel.SpecialForms do ## Options - * `:unquote` - when false, disables unquoting. Useful when you have a quote + * `:unquote` - when `false`, disables unquoting. Useful when you have a quote inside another quote and want to control what quote is able to unquote. * `:location` - when set to `:keep`, keeps the current line and file from quote. Read the Stacktrace information section below for more information. + * `:generated` - marks the given chunk as generated so it does not emit warnings. + Currently it only works on special forms (for example, you can annotate a `case` + but not an `if`). + * `:context` - sets the resolution context. * `:bind_quoted` - passes a binding to the macro. Whenever a binding is - given, `unquote` is automatically disabled. + given, `unquote/1` is automatically disabled. ## Quote literals @@ -704,7 +786,7 @@ defmodule Kernel.SpecialForms do ## Quote and macros - `quote` is commonly used with macros for code generation. As an exercise, + `quote/2` is commonly used with macros for code generation. As an exercise, let's define a macro that multiplies a number by itself (squared). Note there is no reason to define such as a macro (and it would actually be seen as a bad practice), but it is simple enough that it allows us to focus @@ -740,7 +822,7 @@ defmodule Kernel.SpecialForms do Returning 5 Returning 5 - 25 + Got 25 Notice how "Returning 5" was printed twice, instead of just once. This is because a macro receives an expression and not a value (which is what we @@ -772,7 +854,7 @@ defmodule Kernel.SpecialForms do once. In fact, this pattern is so common that most of the times you will want - to use the `bind_quoted` option with `quote`: + to use the `bind_quoted` option with `quote/2`: defmodule Math do defmacro squared(x) do @@ -800,7 +882,7 @@ defmodule Kernel.SpecialForms do import Math squared(5) - x #=> ** (RuntimeError) undefined function or variable: x + x #=> ** (CompileError) undefined variable x or undefined function x/0 We can see that `x` did not leak to the user context. This happens because Elixir macros are hygienic, a topic we will discuss at length @@ -812,7 +894,9 @@ defmodule Kernel.SpecialForms do defmodule Hygiene do defmacro no_interference do - quote do: a = 1 + quote do + a = 1 + end end end @@ -824,13 +908,15 @@ defmodule Kernel.SpecialForms do In the example above, `a` returns 10 even if the macro is apparently setting it to 1 because variables defined - in the macro does not affect the context the macro is executed in. + in the macro do not affect the context the macro is executed in. If you want to set or get a variable in the caller's context, you can do it with the help of the `var!` macro: defmodule NoHygiene do defmacro interference do - quote do: var!(a) = 1 + quote do + var!(a) = 1 + end end end @@ -859,7 +945,7 @@ defmodule Kernel.SpecialForms do Hygiene.write Hygiene.read - #=> ** (RuntimeError) undefined function or variable: a + #=> ** (RuntimeError) undefined variable a or undefined function a/0 For such, you can explicitly pass the current module scope as argument: @@ -888,34 +974,38 @@ defmodule Kernel.SpecialForms do Consider the following example: defmodule Hygiene do - alias HashDict, as: D + alias Map, as: M defmacro no_interference do - quote do: D.new + quote do + M.new + end end end require Hygiene - Hygiene.no_interference #=> #HashDict<[]> + Hygiene.no_interference #=> %{} - Notice that, even though the alias `D` is not available + Notice that, even though the alias `M` is not available in the context the macro is expanded, the code above works - because `D` still expands to `HashDict`. + because `M` still expands to `Map`. Similarly, even if we defined an alias with the same name before invoking a macro, it won't affect the macro's result: defmodule Hygiene do - alias HashDict, as: D + alias Map, as: M defmacro no_interference do - quote do: D.new + quote do + M.new + end end end require Hygiene - alias SomethingElse, as: D - Hygiene.no_interference #=> #HashDict<[]> + alias SomethingElse, as: M + Hygiene.no_interference #=> %{} In some cases, you want to access an alias or a module defined in the caller. For such, you can use the `alias!` macro: @@ -923,13 +1013,17 @@ defmodule Kernel.SpecialForms do defmodule Hygiene do # This will expand to Elixir.Nested.hello defmacro no_interference do - quote do: Nested.hello + quote do + Nested.hello + end end # This will expand to Nested.hello for # whatever is Nested in the caller defmacro interference do - quote do: alias!(Nested).hello + quote do + alias!(Nested).hello + end end end @@ -952,54 +1046,54 @@ defmodule Kernel.SpecialForms do following code: defmodule Hygiene do - defmacrop get_size do + defmacrop get_length do quote do - size("hello") + length([1, 2, 3]) end end - def return_size do - import Kernel, except: [size: 1] - get_size + def return_length do + import Kernel, except: [length: 1] + get_length end end - Hygiene.return_size #=> 5 + Hygiene.return_length #=> 3 - Notice how `return_size` returns 5 even though the `size/1` - function is not imported. In fact, even if `return_size` imported - a function from another module, it wouldn't affect the function - result: + Notice how `Hygiene.return_length/0` returns `3` even though the `Kernel.length/1` + function is not imported. In fact, even if `return_length/0` + imported a function with the same name and arity from another + module, it wouldn't affect the function result: - def return_size do - import Dict, only: [size: 1] - get_size + def return_length do + import String, only: [length: 1] + get_length end - Calling this new `return_size` will still return 5 as result. + Calling this new `return_length/0` will still return `3` as result. Elixir is smart enough to delay the resolution to the latest - moment possible. So, if you call `size("hello")` inside quote, - but no `size/1` function is available, it is then expanded in + possible moment. So, if you call `length([1, 2, 3])` inside quote, + but no `length/1` function is available, it is then expanded in the caller: defmodule Lazy do - defmacrop get_size do - import Kernel, except: [size: 1] + defmacrop get_length do + import Kernel, except: [length: 1] quote do - size([a: 1, b: 2]) + length("hello") end end - def return_size do - import Kernel, except: [size: 1] - import Dict, only: [size: 1] - get_size + def return_length do + import Kernel, except: [length: 1] + import String, only: [length: 1] + get_length end end - Lazy.return_size #=> 2 + Lazy.return_length #=> 5 ## Stacktrace information @@ -1023,6 +1117,11 @@ defmodule Kernel.SpecialForms do defadd end + require Sample + Sample.add(:one, :two) + #=> ** (ArithmeticError) bad argument in arithmetic expression + #=> adder.ex:5: Sample.add/2 + When using `location: :keep` and invalid arguments are given to `Sample.add/2`, the stacktrace information will point to the file and line inside the quote. Without `location: :keep`, the error is @@ -1080,7 +1179,7 @@ defmodule Kernel.SpecialForms do If you try to run our new macro, you will notice it won't even compile, complaining that the variables `k` and `v` - does not exist. This is because of the ambiguity: `unquote(k)` + do not exist. This is because of the ambiguity: `unquote(k)` can either be an unquote fragment, as previously, or a regular unquote as in `unquote(kv)`. @@ -1102,19 +1201,21 @@ defmodule Kernel.SpecialForms do In fact, the `:bind_quoted` option is recommended every time one desires to inject a value into the quote. """ - defmacro quote(opts, block) + defmacro quote(opts, block), do: error!([opts, block]) @doc """ Unquotes the given expression from inside a macro. ## Examples - Imagine the situation you have a variable `name` and + Imagine the situation you have a variable `value` and you want to inject it inside some quote. The first attempt would be: value = 13 - quote do: sum(1, value, 3) + quote do + sum(1, value, 3) + end Which would then return: @@ -1122,25 +1223,29 @@ defmodule Kernel.SpecialForms do Which is not the expected result. For this, we use unquote: - value = 13 - quote do: sum(1, unquote(value), 3) - #=> {:sum, [], [1, 13, 3]} + iex> value = 13 + iex> quote do + ...> sum(1, unquote(value), 3) + ...> end + {:sum, [], [1, 13, 3]} """ - defmacro unquote(:unquote)(expr) + defmacro unquote(:unquote)(expr), do: error!([expr]) @doc """ Unquotes the given list expanding its arguments. Similar - to unquote. + to `unquote/1`. ## Examples - values = [2, 3, 4] - quote do: sum(1, unquote_splicing(values), 5) - #=> {:sum, [], [1, 2, 3, 4, 5]} + iex> values = [2, 3, 4] + iex> quote do + ...> sum(1, unquote_splicing(values), 5) + ...> end + {:sum, [], [1, 2, 3, 4, 5]} """ - defmacro unquote(:unquote_splicing)(expr) + defmacro unquote(:unquote_splicing)(expr), do: error!([expr]) @doc ~S""" Comprehensions allow you to quickly build a data structure from @@ -1159,7 +1264,7 @@ defmodule Kernel.SpecialForms do [2, 4, 6, 8] # A comprehension with two generators - iex> for x <- [1, 2], y <- [2, 3], do: x*y + iex> for x <- [1, 2], y <- [2, 3], do: x * y [2, 3, 4, 6] Filters can also be given: @@ -1169,19 +1274,20 @@ defmodule Kernel.SpecialForms do [2, 4, 6] Note generators can also be used to filter as it removes any value - that doesn't match the left side of `<-`: + that doesn't match the pattern on the left side of `<-`: - iex> for {:user, name} <- [user: "jose", admin: "john", user: "eric"] do + iex> users = [user: "john", admin: "meg", guest: "barbara"] + iex> for {type, name} when type != :guest <- users do ...> String.upcase(name) ...> end - ["JOSE", "ERIC"] + ["JOHN", "MEG"] Bitstring generators are also supported and are very useful when you need to organize bitstring streams: iex> pixels = <<213, 45, 132, 64, 76, 32, 76, 0, 0, 234, 32, 15>> - iex> for <>, do: {r, g, b} - [{213,45,132},{64,76,32},{76,0,0},{234,32,15}] + iex> for <>, do: {r, g, b} + [{213, 45, 132}, {64, 76, 32}, {76, 0, 0}, {234, 32, 15}] Variable assignments inside the comprehension, be it in generators, filters or inside the block, are not reflected outside of the @@ -1208,7 +1314,65 @@ defmodule Kernel.SpecialForms do end """ - defmacro for(args) + defmacro for(args), do: error!([args]) + + @doc """ + Used to combine matching clauses. + + Let's start with an example: + + iex> opts = %{width: 10, height: 15} + iex> with {:ok, width} <- Map.fetch(opts, :width), + ...> {:ok, height} <- Map.fetch(opts, :height), + ...> do: {:ok, width * height} + {:ok, 150} + + If all clauses match, the `do` block is executed, returning its result. + Otherwise the chain is aborted and the non-matched value is returned: + + iex> opts = %{width: 10} + iex> with {:ok, width} <- Map.fetch(opts, :width), + ...> {:ok, height} <- Map.fetch(opts, :height), + ...> do: {:ok, width * height} + :error + + Guards can be used in patterns as well: + + iex> users = %{"melany" => "guest", "bob" => :admin} + iex> with {:ok, role} when not is_binary(role) <- Map.fetch(users, "bob"), + ...> do: {:ok, to_string(role)} + {:ok, "admin"} + + As in `for/1`, variables bound inside `with/1` won't leak; + "bare expressions" may also be inserted between the clauses: + + iex> width = nil + iex> opts = %{width: 10, height: 15} + iex> with {:ok, width} <- Map.fetch(opts, :width), + ...> double_width = width * 2, + ...> {:ok, height} <- Map.fetch(opts, :height), + ...> do: {:ok, double_width * height} + {:ok, 300} + iex> width + nil + + An `else` option can be given to modify what is being returned from + `with` in the case of a failed match: + + iex> opts = %{width: 10} + iex> with {:ok, width} <- Map.fetch(opts, :width), + ...> {:ok, height} <- Map.fetch(opts, :height) do + ...> {:ok, width * height} + ...> else + ...> :error -> + ...> {:error, :wrong_data} + ...> end + {:error, :wrong_data} + + If there is no matching `else` condition, then a `WithClauseError` exception is raised. + + """ + defmacro with(args), do: error!([args]) @doc """ Defines an anonymous function. @@ -1220,7 +1384,7 @@ defmodule Kernel.SpecialForms do 3 """ - defmacro unquote(:fn)(clauses) + defmacro unquote(:fn)(clauses), do: error!([clauses]) @doc """ Internal special form for block expressions. @@ -1229,11 +1393,15 @@ defmodule Kernel.SpecialForms do of expressions in Elixir. This special form is private and should not be invoked directly: - iex> quote do: (1; 2; 3) + iex> quote do + ...> 1 + ...> 2 + ...> 3 + ...> end {:__block__, [], [1, 2, 3]} """ - defmacro __block__(args) + defmacro __block__(args), do: error!([args]) @doc """ Captures or creates an anonymous function. @@ -1287,35 +1455,39 @@ defmodule Kernel.SpecialForms do iex> fun.(1, 2) {1, 2} - iex> fun = &[&1|&2] + iex> fun = &[&1 | &2] iex> fun.(1, 2) - [1|2] + [1 | 2] The only restrictions when creating anonymous functions is that at least one placeholder must be present, i.e. it must contain at least - `&1`: + `&1`, and that block expressions are not supported: - # No placeholder fails to compile - &var + # No placeholder, fails to compile. + &(:foo) - # Block expressions are also not supported - &(foo(&1, &2); &3 + &4) + # Block expression, fails to compile. + &(&1; &2) """ - defmacro unquote(:&)(expr) + defmacro unquote(:&)(expr), do: error!([expr]) @doc """ Internal special form to hold aliases information. It is usually compiled to an atom: - iex> quote do: Foo.Bar + iex> quote do + ...> Foo.Bar + ...> end {:__aliases__, [alias: false], [:Foo, :Bar]} Elixir represents `Foo.Bar` as `__aliases__` so calls can be unambiguously identified by the operator `:.`. For example: - iex> quote do: Foo.bar + iex> quote do + ...> Foo.bar + ...> end {{:., [], [{:__aliases__, [alias: false], [:Foo]}, :bar]}, [], []} Whenever an expression iterator sees a `:.` as the tuple key, @@ -1324,32 +1496,24 @@ defmodule Kernel.SpecialForms do On the other hand, aliases holds some properties: - 1. The head element of aliases can be any term. + 1. The head element of aliases can be any term that must expand to + an atom at compilation time. 2. The tail elements of aliases are guaranteed to always be atoms. - 3. When the head element of aliases is the atom `:Elixir`, no expansion happen. - - 4. When the head element of aliases is not an atom, it is expanded at runtime: - - quote do: some_var.Foo - {:__aliases__, [], [{:some_var, [], Elixir}, :Foo]} - - Since `some_var` is not available at compilation time, the compiler - expands such expression to: - - Module.concat [some_var, Foo] + 3. When the head element of aliases is the atom `:Elixir`, no expansion happens. """ - defmacro __aliases__(args) + defmacro __aliases__(args), do: error!([args]) @doc """ - Calls the overriden function when overriding it with `defoverridable`. - See `Kernel.defoverridable` for more information and documentation. + Calls the overridden function when overriding it with `Kernel.defoverridable/1`. + + See `Kernel.defoverridable/1` for more information and documentation. """ - defmacro super(args) + defmacro super(args), do: error!([args]) - @doc """ + @doc ~S""" Matches the given expression against the given clauses. ## Examples @@ -1363,7 +1527,21 @@ defmodule Kernel.SpecialForms do In the example above, we match `thing` against each clause "head" and execute the clause "body" corresponding to the first clause - that matches. If no clause matches, an error is raised. + that matches. + + If no clause matches, an error is raised. + For this reason, it may be necessary to add a final catch-all clause (like `_`) + which will always match. + + x = 10 + + case x do + 0 -> + "This clause won't match" + _ -> + "This clause would match any value (x = #{x})" + end + #=> "This clause would match any value (x = 10)" ## Variables handling @@ -1392,15 +1570,35 @@ defmodule Kernel.SpecialForms do In the example above, value is going to be `7` or `13` depending on the value of `lucky?`. In case `value` has no previous value before case, clauses that do not explicitly bind a value have the variable - bound to nil. + bound to `nil`. + + If you want to pattern match against an existing variable, + you need to use the `^/1` operator: + + x = 1 + + case 10 do + ^x -> "Won't match" + _ -> "Will match" + end + #=> "Will match" + """ - defmacro case(condition, clauses) + defmacro case(condition, clauses), do: error!([condition, clauses]) @doc """ Evaluates the expression corresponding to the first clause that - evaluates to truth value. + evaluates to a truthy value. - Raises an error if all conditions evaluate to to nil or false. + cond do + hd([1, 2, 3]) -> + "1 is considered as true" + end + #=> "1 is considered as true" + + Raises an error if all conditions evaluate to `nil` or `false`. + For this reason, it may be necessary to add a final always-truthy condition + (anything non-`false` and non-`nil`), which will always match. ## Examples @@ -1412,12 +1610,13 @@ defmodule Kernel.SpecialForms do true -> "This will" end + #=> "This will" """ - defmacro cond(clauses) + defmacro cond(clauses), do: error!([clauses]) @doc ~S""" - Evaluate the given expressions and handle any error, exit + Evaluates the given expressions and handles any error, exit, or throw that may have happened. ## Examples @@ -1429,28 +1628,29 @@ defmodule Kernel.SpecialForms do IO.puts "Invalid argument given" catch value -> - IO.puts "caught #{value}" + IO.puts "Caught #{inspect(value)}" else value -> - IO.puts "Success! The result was #{value}" + IO.puts "Success! The result was #{inspect(value)}" after IO.puts "This is printed regardless if it failed or succeed" end - The rescue clause is used to handle exceptions, while the catch - clause can be used to catch thrown values. The else clause can - be used to control flow based on the result of the expression. - Catch, rescue and else clauses work based on pattern matching. + The `rescue` clause is used to handle exceptions, while the `catch` + clause can be used to catch thrown values and exits. + The `else` clause can be used to control flow based on the result of + the expression. `catch`, `rescue`, and `else` clauses work based on + pattern matching (similar to the `case` special form). - Note that calls inside `try` are not tail recursive since the VM + Note that calls inside `try/1` are not tail recursive since the VM needs to keep the stacktrace in case an exception happens. - ## Rescue clauses + ## `rescue` clauses - Besides relying on pattern matching, rescue clauses provides some - conveniences around exceptions that allows one to rescue an - exception by its name. All the following formats are valid rescue - expressions: + Besides relying on pattern matching, `rescue` clauses provide some + conveniences around exceptions that allow one to rescue an + exception by its name. All the following formats are valid patterns + in `rescue` clauses: try do UndefinedModule.undefined_function @@ -1480,7 +1680,7 @@ defmodule Kernel.SpecialForms do ## Erlang errors - Erlang errors are transformed into Elixir ones during rescue: + Erlang errors are transformed into Elixir ones when rescuing: try do :erlang.error(:badarg) @@ -1498,8 +1698,8 @@ defmodule Kernel.SpecialForms do ErlangError -> :ok end - In fact, ErlangError can be used to rescue any error that is - not an Elixir error proper. For example, it can be used to rescue + In fact, `ErlangError` can be used to rescue any error that is + not a proper Elixir error. For example, it can be used to rescue the earlier `:badarg` error too, prior to transformation: try do @@ -1510,27 +1710,64 @@ defmodule Kernel.SpecialForms do ## Catching throws and exits - The catch clause can be used to catch throws values and exits. + The `catch` clause can be used to catch thrown values and exits. try do - exit(1) + exit(:shutdown) catch - :exit, 1 -> IO.puts "Exited with 1" + :exit, :shutdown -> + IO.puts "Exited with shutdown reason" end try do throw(:sample) catch :throw, :sample -> - IO.puts "sample thrown" + IO.puts ":sample was thrown" end - catch values also support `:error`, as in Erlang, although it is - commonly avoided in favor of raise/rescue control mechanisms. + The `catch` clause also supports `:error` alongside `:exit` and `:throw`, as + in Erlang, although it is commonly avoided in favor of `raise`/`rescue` control + mechanisms. One reason for this is that when catching `:error`, the error is + not automatically transformed into an Elixir error: - ## Else clauses + try do + :erlang.error(:badarg) + catch + :error, :badarg -> + :ok + end + + Note that it is possible to match both on the caught value as well as the *kind* + of such value: - Else clauses allow the result of the expression to be pattern + try do + exit(:shutdown) + catch + kind, value when kind in [:exit, :throw] -> + IO.puts "Exited with or thrown value #{inspect(value)}" + end + + ## `after` clauses + + An `after` clause allows you to define cleanup logic that will be invoked both + when the tried block of code succeeds and also when an error is raised. Note + that the process will exit as usually when receiving an exit signal that causes + it to exit abruptly and so the `after` clause is not guaranteed to be executed. + Luckily, most resources in Elixir (such as open files, ETS tables, ports, sockets, + etc.) are linked to or monitor the owning process and will automatically clean + themselves up if that process exits. + + File.write!("tmp/story.txt", "Hello, World") + try do + do_something_with("tmp/story.txt") + after + File.rm("tmp/story.txt") + end + + ## `else` clauses + + `else` clauses allow the result of the tried expression to be pattern matched on: x = 2 @@ -1546,8 +1783,8 @@ defmodule Kernel.SpecialForms do :large end - If an else clause is not present the result of the expression will - be return, if no exceptions are raised: + If an `else` clause is not present and no exceptions are raised, + the result of the expression will be returned: x = 1 ^x = @@ -1558,9 +1795,9 @@ defmodule Kernel.SpecialForms do :infinity end - However when an else clause is present but the result of the expression - does not match any of the patterns an exception will be raised. This - exception will not be caught by a catch or rescue in the same try: + However, when an `else` clause is present but the result of the expression + does not match any of the patterns then an exception will be raised. This + exception will not be caught by a `catch` or `rescue` in the same `try`: x = 1 try do @@ -1580,8 +1817,8 @@ defmodule Kernel.SpecialForms do :error_b end - Similarly an exception inside an else clause is not caught or rescued - inside the same try: + Similarly, an exception inside an `else` clause is not caught or rescued + inside the same `try`: try do try do @@ -1601,9 +1838,24 @@ defmodule Kernel.SpecialForms do end This means the VM no longer needs to keep the stacktrace once inside - an else clause and so tail recursion is possible when using a `try` - with a tail call as the final call inside an else clause. The same - is true for rescue and catch clauses. + an `else` clause and so tail recursion is possible when using a `try` + with a tail call as the final call inside an `else` clause. The same + is true for `rescue` and `catch` clauses. + + Only the result of the tried expression falls down to the `else` clause. + If the `try` ends up in the `rescue` or `catch` clauses, their result + will not fall down to `else`: + + try do + throw(:catch_this) + catch + :throw, :catch_this -> + :it_was_caught + else + # :it_was_caught will not fall down to this "else" clause. + other -> + {:else, other} + end ## Variable handling @@ -1619,7 +1871,7 @@ defmodule Kernel.SpecialForms do _, _ -> :failed end - x #=> unbound variable `x` + x #=> unbound variable "x" In the example above, `x` cannot be accessed since it was defined inside the `try` clause. A common practice to address this issue @@ -1635,7 +1887,7 @@ defmodule Kernel.SpecialForms do end """ - defmacro try(args) + defmacro try(args), do: error!([args]) @doc """ Checks if there is a message matching the given clauses @@ -1655,8 +1907,8 @@ defmodule Kernel.SpecialForms do IO.puts :stderr, "Unexpected message received" end - An optional after clause can be given in case the message was not - received after the specified period of time: + An optional `after` clause can be given in case the message was not + received after the given timeout period, specified in milliseconds: receive do {:selector, i, value} when is_integer(i) -> @@ -1671,18 +1923,23 @@ defmodule Kernel.SpecialForms do end The `after` clause can be specified even if there are no match clauses. - There are two special cases for the timeout value given to `after` + The timeout value given to `after` can be any expression evaluating to + one of the allowed values: * `:infinity` - the process should wait indefinitely for a matching message, this is the same as not using a timeout - * 0 - if there is no matching message in the mailbox, the timeout + * `0` - if there is no matching message in the mailbox, the timeout will occur immediately + * positive integer smaller than `4_294_967_295` (`0xFFFFFFFF` + in hex notation) - it should be possible to represent the timeout + value as an unsigned 32-bit integer. + ## Variables handling - The `receive` special form handles variables exactly as the `case` + The `receive/1` special form handles variables exactly as the `case/2` special macro. For more information, check the docs for `case/2`. """ - defmacro receive(args) + defmacro receive(args), do: error!([args]) end diff --git a/lib/elixir/lib/kernel/typespec.ex b/lib/elixir/lib/kernel/typespec.ex index f75870c323d..aa0a14dce5d 100644 --- a/lib/elixir/lib/kernel/typespec.ex +++ b/lib/elixir/lib/kernel/typespec.ex @@ -1,191 +1,5 @@ defmodule Kernel.Typespec do - @moduledoc """ - Provides macros and functions for working with typespecs. - - Elixir comes with a notation for declaring types and specifications. Elixir is - dynamically typed, as such typespecs are never used by the compiler to - optimize or modify code. Still, using typespecs is useful as documentation and - tools such as [Dialyzer](http://www.erlang.org/doc/man/dialyzer.html) can - analyze the code with typespecs to find bugs. - - The attributes `@type`, `@opaque`, `@typep`, `@spec` and `@callback` available - in modules are handled by the equivalent macros defined by this module. See - sub-sections "Defining a type" and "Defining a specification" below. - - ## Types and their syntax - - The type syntax provided by Elixir is fairly similar to the one in - [Erlang](http://www.erlang.org/doc/reference_manual/typespec.html). - - Most of the built-in types provided in Erlang (for example, `pid()`) are - expressed the same way: `pid()` or simply `pid`. Parametrized types are also - supported (`list(integer)`) and so are remote types (`Enum.t`). - - Integers and atom literals are allowed as types (ex. `1`, `:atom` or - `false`). All other types are built of unions of predefined types. Certain - shorthands are allowed, such as `[...]`, `<<>>` and `{...}`. - - ### Predefined types - - Type :: any # the top type, the set of all terms - | none # the bottom type, contains no terms - | pid - | port - | reference - | Atom - | Bitstring - | float - | Fun - | Integer - | List - | Tuple - | Union - | UserDefined # Described in section "Defining a type" - - Atom :: atom - | ElixirAtom # `:foo`, `:bar`, ... - - Bitstring :: <<>> - | << _ :: M >> # M is a positive integer - | << _ :: _ * N >> # N is a positive integer - | << _ :: M, _ :: _ * N >> - - Fun :: (... -> any) # any function - | (... -> Type) # any arity, returning Type - | (() -> Type)) - | (TList -> Type) - - Integer :: integer - | ElixirInteger # ..., -1, 0, 1, ... 42 ... - | ElixirInteger..ElixirInteger # an integer range - - List :: list(Type) # proper list ([]-terminated) - | improper_list(Type1, Type2) # Type1=contents, Type2=termination - | maybe_improper_list(Type1, Type2) # Type1 and Type2 as above - | nonempty_list(Type) # proper non-empty list - | [] # empty list - | [Type] # shorthand for list(Type) - | [Type, ...] # shorthand for nonempty_list(Type) - - Tuple :: tuple # a tuple of any size - | {} # empty tuple - | {TList} - - TList :: Type - | Type, TList - - Union :: Type1 | Type2 - - ### Bit strings - - Bit string with a base size of 3: - - << _ :: 3 >> - - Bit string with a unit size of 8: - - << _ :: _ * 8 >> - - ### Anonymous functions - - Any anonymous function: - - ((...) -> any) - (... -> any) - - Anonymous function with arity of zero: - - (() -> type) - - Anonymous function with some arity: - - ((type, type) -> type) - (type, type -> type) - - ## Built-in types - - Built-in type | Defined as - :-------------------- | :--------- - `term` | `any` - `binary` | `<< _ :: _ * 8 >>` - `bitstring` | `<< _ :: _ * 1 >>` - `boolean` | `false` | `true` - `byte` | `0..255` - `char` | `0..0xffff` - `number` | `integer` | `float` - `list` | `[any]` - `maybe_improper_list` | `maybe_improper_list(any, any)` - `nonempty_list` | `nonempty_list(any)` - `iodata` | `iolist` | `binary` - `iolist` | `maybe_improper_list(byte` | `binary` | `iolist, binary` | `[])` - `module` | `atom` - `mfa` | `{atom, atom, arity}` - `arity` | `0..255` - `node` | `atom` - `timeout` | `:infinity` | `non_neg_integer` - `no_return` | `none` - `fun` | `(... -> any)` - - Some built-in types cannot be expressed with valid syntax according to the - language defined above. - - Built-in type | Can be interpreted as - :---------------- | :-------------------- - `non_neg_integer` | `0..` - `pos_integer` | `1..` - `neg_integer` | `..-1` - - Types defined in other modules are referred to as "remote types", they are - referenced as `Module.type_name` (ex. `Enum.t` or `String.t`). - - ## Defining a type - - @type type_name :: type - @typep type_name :: type - @opaque type_name :: type - - A type defined with `@typep` is private. An opaque type, defined with - `@opaque` is a type where the internal structure of the type will not be - visible, but the type is still public. - - Types can be parametrised by defining variables as parameters, these variables - can then be used to define the type. - - @type dict(key, value) :: [{key, value}] - - ## Defining a specification - - @spec function_name(type1, type2) :: return_type - @callback function_name(type1, type2) :: return_type - - Callbacks are used to define the callbacks functions of behaviours (see - `Behaviour`). - - Guards can be used to restrict type variables given as arguments to the - function. - - @spec function(arg) :: [arg] when arg: atom - - Type variables with no restriction can also be defined. - - @spec function(arg) :: [arg] when arg: var - - Specifications can be overloaded just like ordinary functions. - - @spec function(integer) :: atom - @spec function(atom) :: integer - - ## Notes - - Elixir discourages the use of type `string` as it might be confused with - binaries which are referred to as "strings" in Elixir (as opposed to character - lists). In order to use the type that is called `string` in Erlang, one has to - use the `char_list` type which is a synonym for `string`. If you use `string`, - you'll get a warning from the compiler. - - If you want to refer to the "string" type (the one operated on by functions in - the `String` module), use `String.t` type instead. - """ + @moduledoc false @doc """ Defines a type. @@ -197,8 +11,11 @@ defmodule Kernel.Typespec do """ defmacro deftype(type) do + pos = :elixir_locals.cache_env(__CALLER__) + %{line: line, file: file, module: module} = __CALLER__ quote do - Kernel.Typespec.deftype(:type, unquote(Macro.escape(type, unquote: true)), __ENV__) + Kernel.Typespec.deftype(:type, unquote(Macro.escape(type, unquote: true)), + unquote(line), unquote(file), unquote(module), unquote(pos)) end end @@ -212,8 +29,11 @@ defmodule Kernel.Typespec do """ defmacro defopaque(type) do + pos = :elixir_locals.cache_env(__CALLER__) + %{line: line, file: file, module: module} = __CALLER__ quote do - Kernel.Typespec.deftype(:opaque, unquote(Macro.escape(type, unquote: true)), __ENV__) + Kernel.Typespec.deftype(:opaque, unquote(Macro.escape(type, unquote: true)), + unquote(line), unquote(file), unquote(module), unquote(pos)) end end @@ -227,8 +47,11 @@ defmodule Kernel.Typespec do """ defmacro deftypep(type) do + pos = :elixir_locals.cache_env(__CALLER__) + %{line: line, file: file, module: module} = __CALLER__ quote do - Kernel.Typespec.deftype(:typep, unquote(Macro.escape(type, unquote: true)), __ENV__) + Kernel.Typespec.deftype(:typep, unquote(Macro.escape(type, unquote: true)), + unquote(line), unquote(file), unquote(module), unquote(pos)) end end @@ -242,8 +65,11 @@ defmodule Kernel.Typespec do """ defmacro defspec(spec) do + pos = :elixir_locals.cache_env(__CALLER__) + %{line: line, file: file, module: module} = __CALLER__ quote do - Kernel.Typespec.defspec(:spec, unquote(Macro.escape(spec, unquote: true)), __ENV__) + Kernel.Typespec.defspec(:spec, unquote(Macro.escape(spec, unquote: true)), + unquote(line), unquote(file), unquote(module), unquote(pos)) end end @@ -257,23 +83,30 @@ defmodule Kernel.Typespec do """ defmacro defcallback(spec) do + pos = :elixir_locals.cache_env(__CALLER__) + %{line: line, file: file, module: module} = __CALLER__ quote do - Kernel.Typespec.defspec(:callback, unquote(Macro.escape(spec, unquote: true)), __ENV__) + Kernel.Typespec.defspec(:callback, unquote(Macro.escape(spec, unquote: true)), + unquote(line), unquote(file), unquote(module), unquote(pos)) end end @doc """ - Defines a `type`, `typep` or `opaque` by receiving a typespec expression. - """ - def define_type(kind, expr, doc \\ nil, env) do - Module.store_typespec(env.module, kind, {kind, expr, doc, env}) - end + Defines a macro callback. + This macro is responsible for handling the attribute `@macrocallback`. + + ## Examples + + @macrocallback add(number, number) :: Macro.t - @doc """ - Defines a `spec` by receiving a typespec expression. """ - def define_spec(kind, expr, env) do - Module.store_typespec(env.module, kind, {kind, expr, env}) + defmacro defmacrocallback(spec) do + pos = :elixir_locals.cache_env(__CALLER__) + %{line: line, file: file, module: module} = __CALLER__ + quote do + Kernel.Typespec.defspec(:macrocallback, unquote(Macro.escape(spec, unquote: true)), + unquote(line), unquote(file), unquote(module), unquote(pos)) + end end @doc """ @@ -281,11 +114,12 @@ defmodule Kernel.Typespec do (private, opaque or not). This function is only available for modules being compiled. """ - def defines_type?(module, name, arity) do - finder = fn {_kind, expr, _doc, _caller} -> + @spec defines_type?(module, atom, arity) :: boolean + def defines_type?(module, name, arity) + when is_atom(module) and is_atom(name) and arity in 0..255 do + finder = fn {_kind, expr, _caller} -> type_to_signature(expr) == {name, arity} end - :lists.any(finder, Module.get_attribute(module, :type)) or :lists.any(finder, Module.get_attribute(module, :opaque)) end @@ -294,7 +128,9 @@ defmodule Kernel.Typespec do Returns `true` if the current module defines a given spec. This function is only available for modules being compiled. """ - def defines_spec?(module, name, arity) do + @spec defines_spec?(module, atom, arity) :: boolean + def defines_spec?(module, name, arity) + when is_atom(module) and is_atom(name) and arity in 0..255 do finder = fn {_kind, expr, _caller} -> spec_to_signature(expr) == {name, arity} end @@ -305,7 +141,9 @@ defmodule Kernel.Typespec do Returns `true` if the current module defines a callback. This function is only available for modules being compiled. """ - def defines_callback?(module, name, arity) do + @spec defines_callback?(module, atom, arity) :: boolean + def defines_callback?(module, name, arity) + when is_atom(module) and is_atom(name) and arity in 0..255 do finder = fn {_kind, expr, _caller} -> spec_to_signature(expr) == {name, arity} end @@ -315,7 +153,10 @@ defmodule Kernel.Typespec do @doc """ Converts a spec clause back to Elixir AST. """ - def spec_to_ast(name, {:type, line, :fun, [{:type, _, :product, args}, result]}) do + @spec spec_to_ast(atom, tuple) :: {atom, Keyword.t, [Macro.t]} + def spec_to_ast(name, spec) + def spec_to_ast(name, {:type, line, :fun, [{:type, _, :product, args}, result]}) + when is_atom(name) do meta = [line: line] body = {name, meta, Enum.map(args, &typespec_to_ast/1)} @@ -333,11 +174,12 @@ defmodule Kernel.Typespec do end end - def spec_to_ast(name, {:type, line, :fun, []}) do + def spec_to_ast(name, {:type, line, :fun, []}) when is_atom(name) do {:::, [line: line], [{name, [line: line], []}, quote(do: term)]} end - def spec_to_ast(name, {:type, line, :bounded_fun, [{:type, _, :fun, [{:type, _, :product, args}, result]}, constraints]}) do + def spec_to_ast(name, {:type, line, :bounded_fun, [{:type, _, :fun, [{:type, _, :product, args}, result]}, constraints]}) + when is_atom(name) do guards = for {:type, _, :constraint, [{:atom, _, :is_subtype}, [{:var, _, var}, type]]} <- constraints do {var, typespec_to_ast(type)} @@ -362,45 +204,36 @@ defmodule Kernel.Typespec do @doc """ Converts a type clause back to Elixir AST. """ + def type_to_ast(type) def type_to_ast({{:record, record}, fields, args}) when is_atom(record) do fields = for field <- fields, do: typespec_to_ast(field) args = for arg <- args, do: typespec_to_ast(arg) - type = {:{}, [], [record|fields]} + type = {:{}, [], [record | fields]} quote do: unquote(record)(unquote_splicing(args)) :: unquote(type) end - def type_to_ast({name, type, args}) do + def type_to_ast({name, type, args}) when is_atom(name) do args = for arg <- args, do: typespec_to_ast(arg) quote do: unquote(name)(unquote_splicing(args)) :: unquote(typespec_to_ast(type)) end - @doc """ - Returns all type docs available from the module's beam code. - - The result is returned as a list of tuples where the first element is the pair of type - name and arity and the second element is the documentation. - - The module must have a corresponding beam file which can be - located by the runtime system. - """ - @spec beam_typedocs(module | binary) :: [tuple] | nil + @doc false + # TODO: Remove on v2.0 def beam_typedocs(module) when is_atom(module) or is_binary(module) do - case abstract_code(module) do - {:ok, abstract_code} -> - type_docs = for {:attribute, _, :typedoc, tup} <- abstract_code, do: tup - :lists.flatten(type_docs) - _ -> - nil + IO.write :stderr, "Kernel.Typespec.beam_typedocs/1 is deprecated, please use Code.get_docs/2 instead\n" <> + Exception.format_stacktrace + if docs = Code.get_docs(module, :type_docs) do + for {tuple, _, _, doc} <- docs, do: {tuple, doc} end end @doc """ - Returns all types available from the module's beam code. + Returns all types available from the module's BEAM code. The result is returned as a list of tuples where the first element is the type (`:typep`, `:type` and `:opaque`). - The module must have a corresponding beam file which can be + The module must have a corresponding BEAM file which can be located by the runtime system. """ @spec beam_types(module | binary) :: [tuple] | nil @@ -423,12 +256,12 @@ defmodule Kernel.Typespec do end @doc """ - Returns all specs available from the module's beam code. + Returns all specs available from the module's BEAM code. The result is returned as a list of tuples where the first element is spec name and arity and the second is the spec. - The module must have a corresponding beam file which can be + The module must have a corresponding BEAM file which can be located by the runtime system. """ @spec beam_specs(module | binary) :: [tuple] | nil @@ -437,12 +270,12 @@ defmodule Kernel.Typespec do end @doc """ - Returns all callbacks available from the module's beam code. + Returns all callbacks available from the module's BEAM code. The result is returned as a list of tuples where the first element is spec name and arity and the second is the spec. - The module must have a corresponding beam file + The module must have a corresponding BEAM file which can be located by the runtime system. """ @spec beam_callbacks(module | binary) :: [tuple] | nil @@ -488,31 +321,71 @@ defmodule Kernel.Typespec do do: type_to_signature(other) @doc false - def type_to_signature({:::, _, [{name, _, nil}, _]}), + def type_to_signature({:::, _, [{name, _, context}, _]}) when is_atom(name) and is_atom(context), do: {name, 0} - def type_to_signature({:::, _, [{name, _, args}, _]}), + def type_to_signature({:::, _, [{name, _, args}, _]}) when is_atom(name), do: {name, length(args)} + def type_to_signature(_), + do: :error ## Macro callbacks @doc false - def defspec(kind, expr, caller) do - Module.store_typespec(caller.module, kind, {kind, expr, caller}) + def defspec(kind, expr, line, file, module, pos) when kind in [:callback, :macrocallback] do + case spec_to_signature(expr) do + {name, arity} -> + store_callbackdoc(line, file, module, kind, name, arity) + :error -> + :error + end + Module.store_typespec(module, kind, {kind, expr, pos}) + end + + @doc false + def defspec(kind, expr, _line, _file, module, pos) do + Module.store_typespec(module, kind, {kind, expr, pos}) + end + + defp store_callbackdoc(line, _file, module, kind, name, arity) do + table = :elixir_module.data_table(module) + {line, doc} = get_doc_info(table, :doc, line) + :ets.insert(table, {{:callbackdoc, {name, arity}}, line, kind, doc}) + end + + defp get_doc_info(table, attr, line) do + case :ets.take(table, attr) do + [{^attr, {line, doc}, _, _}] -> {line, doc} + [] -> {line, nil} + end end @doc false - def deftype(kind, expr, caller) do - module = caller.module - doc = Module.get_attribute(module, :typedoc) + def deftype(kind, expr, line, file, module, pos) do + case type_to_signature(expr) do + {name, arity} -> store_typedoc(line, file, module, kind, name, arity) + :error -> :error + end + Module.store_typespec(module, kind, {kind, expr, pos}) + end + + defp store_typedoc(line, file, module, kind, name, arity) do + table = :elixir_module.data_table(module) + {line, doc} = get_doc_info(table, :typedoc, line) - Module.delete_attribute(module, :typedoc) - Module.store_typespec(module, kind, {kind, expr, doc, caller}) + if kind == :typep && doc do + :elixir_errors.warn(line, file, "type #{name}/#{arity} is private, " <> + "@typedoc's are always discarded for private types") + end + + :ets.insert(table, {{:typedoc, {name, arity}}, line, kind, doc}) end ## Translation from Elixir AST to typespec AST @doc false - def translate_type(kind, {:::, _, [{name, _, args}, definition]}, doc, caller) when is_atom(name) and name != ::: do + def translate_type(kind, {:::, _, [{name, _, args}, definition]}, pos) when is_atom(name) and name != ::: do + caller = :elixir_locals.get_cached_env(pos) + args = if is_atom(args) do [] @@ -520,12 +393,11 @@ defmodule Kernel.Typespec do for(arg <- args, do: variable(arg)) end - vars = for {:var, _, var} <- args, do: var - spec = typespec(definition, vars, caller) - - vars = for {:var, _, _} = var <- args, do: var - type = {name, spec, vars} - arity = length(vars) + vars = for {:var, _, var} <- args, do: var + spec = typespec(definition, vars, caller) + vars = for {:var, _, _} = var <- args, do: var + type = {name, spec, vars} + arity = length(vars) {kind, export} = case kind do @@ -534,53 +406,96 @@ defmodule Kernel.Typespec do :opaque -> {:opaque, true} end - if not export and doc do - :elixir_errors.warn(caller.line, caller.file, "type #{name}/#{arity} is private, " <> - "@typedoc's are always discarded for private types\n") + if builtin_type?(name, arity) do + compile_error caller, "type #{name}/#{arity} is a builtin type and it cannot be redefined" end - {{kind, {name, arity}, type}, caller.line, export, doc} + {{kind, {name, arity}, type}, caller.line, export} end - def translate_type(_kind, other, _doc, caller) do + def translate_type(_kind, other, pos) do + caller = :elixir_locals.get_cached_env(pos) type_spec = Macro.to_string(other) compile_error caller, "invalid type specification: #{type_spec}" end + defp builtin_type?(:as_boolean, 1), do: true + defp builtin_type?(:struct, 0), do: true + defp builtin_type?(:charlist, 0), do: true + # TODO: Remove char_list type by 2.0 + defp builtin_type?(:char_list, 0), do: true + defp builtin_type?(:nonempty_charlist, 0), do: true + defp builtin_type?(:keyword, 0), do: true + defp builtin_type?(:keyword, 1), do: true + defp builtin_type?(name, arity), do: :erl_internal.is_type(name, arity) + @doc false - def translate_spec(kind, {:when, _meta, [spec, guard]}, caller) do + def translate_spec(kind, {:when, _meta, [spec, guard]}, pos) do + caller = :elixir_locals.get_cached_env(pos) translate_spec(kind, spec, guard, caller) end - def translate_spec(kind, spec, caller) do + def translate_spec(kind, spec, pos) do + caller = :elixir_locals.get_cached_env(pos) translate_spec(kind, spec, [], caller) end - defp translate_spec(kind, {:::, meta, [{name, _, args}, return]}, guard, caller) when is_atom(name) and name != ::: do - if is_atom(args), do: args = [] + defp translate_spec(kind, {:::, meta, [{name, _, args}, return]}, guard, caller) + when is_atom(name) and name != ::: do + translate_spec(kind, meta, name, args, return, guard, caller) + end + + defp translate_spec(_kind, {name, _meta, _args} = spec, _guard, caller) when is_atom(name) and name != ::: do + spec = Macro.to_string(spec) + compile_error caller, "type specification missing return type: #{spec}" + end + + defp translate_spec(_kind, spec, _guard, caller) do + spec = Macro.to_string(spec) + compile_error caller, "invalid type specification: #{spec}" + end + + defp translate_spec(kind, meta, name, args, return, guard, caller) when is_atom(args), + do: translate_spec(kind, meta, name, [], return, guard, caller) + defp translate_spec(kind, meta, name, args, return, guard, caller) do + ensure_no_defaults!(args) unless Keyword.keyword?(guard) do - guard = Macro.to_string(guard) - compile_error caller, "expected keywords as guard in function type specification, got: #{guard}" + compile_error caller, "expected keywords as guard in type specification, " <> + "got: #{Macro.to_string(guard)}" end vars = Keyword.keys(guard) - constraints = guard_to_constraints(guard, vars, meta, caller) - spec = {:type, line(meta), :fun, fn_args(meta, args, return, vars, caller)} - if constraints != [] do - spec = {:type, line(meta), :bounded_fun, [spec, constraints]} - end + + spec = + case guard_to_constraints(guard, vars, meta, caller) do + [] -> spec + constraints -> {:type, line(meta), :bounded_fun, [spec, constraints]} + end arity = length(args) {{kind, {name, arity}, spec}, caller.line} end - defp translate_spec(_kind, spec, _guard, caller) do - spec = Macro.to_string(spec) - compile_error caller, "invalid function type specification: #{spec}" + defp ensure_no_defaults!(args) do + :lists.foreach fn + {:::, _, [left, right]} -> + ensure_not_default(left) + ensure_not_default(right) + left + other -> + ensure_not_default(other) + other + end, args end + defp ensure_not_default({:\\, _, [_, _]}) do + raise ArgumentError, "default arguments \\\\ not supported in type spec" + end + + defp ensure_not_default(_), do: :ok + defp guard_to_constraints(guard, vars, meta, caller) do line = line(meta) @@ -590,7 +505,7 @@ defmodule Kernel.Typespec do {name, type}, acc -> constraint = [{:atom, line, :is_subtype}, [{:var, line, name}, typespec(type, vars, caller)]] type = {:type, line, :constraint, constraint} - [type|acc] + [type | acc] end, [], guard) |> :lists.reverse end @@ -624,6 +539,10 @@ defmodule Kernel.Typespec do [] end + defp typespec_to_ast({:user_type, line, name, args}) do + typespec_to_ast({:type, line, name, args}) + end + defp typespec_to_ast({:type, line, :tuple, :any}) do {:tuple, [line: line], []} end @@ -640,13 +559,36 @@ defmodule Kernel.Typespec do end end - defp typespec_to_ast({:type, _line, :list, args}) do - for arg <- args, do: typespec_to_ast(arg) + defp typespec_to_ast({:type, line, :list, []}) do + {:list, [line: line], []} + end + + defp typespec_to_ast({:type, _line, :list, [arg]}) do + [typespec_to_ast(arg)] + end + + defp typespec_to_ast({:type, line, :nonempty_list, []}) do + [{:..., [line: line], nil}] + end + + defp typespec_to_ast({:type, line, :nonempty_list, [arg]}) do + [typespec_to_ast(arg), {:..., [line: line], nil}] + end + + defp typespec_to_ast({:type, line, :map, :any}) do + {:map, [line: line], []} end defp typespec_to_ast({:type, line, :map, fields}) do - fields = Enum.map fields, fn {:type, _, :map_field_assoc, k, v} -> - {typespec_to_ast(k), typespec_to_ast(v)} + fields = Enum.map fields, fn + {:type, _, :map_field_assoc, :any} -> + {{:optional, [], [{:any, [], []}]}, {:any, [], []}} + {:type, _, :map_field_exact, [{:atom, _, k}, v]} -> + {k, typespec_to_ast(v)} + {:type, _, :map_field_exact, [k, v]} -> + {{:required, [], [typespec_to_ast(k)]}, typespec_to_ast(v)} + {:type, _, :map_field_assoc, [k, v]} -> + {{:optional, [], [typespec_to_ast(k)]}, typespec_to_ast(v)} end {struct, fields} = Keyword.pop(fields, :__struct__) @@ -661,13 +603,13 @@ defmodule Kernel.Typespec do defp typespec_to_ast({:type, line, :binary, [arg1, arg2]}) do [arg1, arg2] = for arg <- [arg1, arg2], do: typespec_to_ast(arg) - cond do - arg2 == 0 -> + case {typespec_to_ast(arg1), typespec_to_ast(arg2)} do + {arg1, 0} -> quote line: line, do: <<_ :: unquote(arg1)>> - arg1 == 0 -> + {0, arg2} -> quote line: line, do: <<_ :: _ * unquote(arg2)>> - true -> - quote line: line, do: <<_ :: unquote(arg1) * unquote(arg2)>> + {arg1, arg2} -> + quote line: line, do: <<_ :: unquote(arg1), _ :: _ * unquote(arg2)>> end end @@ -686,11 +628,15 @@ defmodule Kernel.Typespec do end defp typespec_to_ast({:type, line, :fun, []}) do - typespec_to_ast({:type, line, :fun, [{:type, line, :any}, {:type, line, :any, []} ]}) + typespec_to_ast({:type, line, :fun, [{:type, line, :any}, {:type, line, :any, []}]}) end defp typespec_to_ast({:type, line, :range, [left, right]}) do - {:"..", [line: line], [typespec_to_ast(left), typespec_to_ast(right)]} + {:.., [line: line], [typespec_to_ast(left), typespec_to_ast(right)]} + end + + defp typespec_to_ast({:type, _line, nil, []}) do + [] end defp typespec_to_ast({:type, line, name, args}) do @@ -707,14 +653,28 @@ defmodule Kernel.Typespec do end # Special shortcut(s) - defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, :char_list}, []]}) do - typespec_to_ast({:type, line, :char_list, []}) + # TODO: Remove char_list type by 2.0 + defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, type}, []]}) + when type in [:charlist, :char_list] do + typespec_to_ast({:type, line, :charlist, []}) + end + + defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, :nonempty_charlist}, []]}) do + typespec_to_ast({:type, line, :nonempty_charlist, []}) + end + + defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, :struct}, []]}) do + typespec_to_ast({:type, line, :struct, []}) end defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, :as_boolean}, [arg]]}) do typespec_to_ast({:type, line, :as_boolean, [arg]}) end + defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, :keyword}, args]}) do + typespec_to_ast({:type, line, :keyword, args}) + end + defp typespec_to_ast({:remote_type, line, [mod, name, args]}) do args = for arg <- args, do: typespec_to_ast(arg) dot = {:., [line: line], [typespec_to_ast(mod), typespec_to_ast(name)]} @@ -747,9 +707,9 @@ defmodule Kernel.Typespec do defp erl_to_ex_var(var) do case Atom.to_string(var) do - <<"_", c :: [binary, size(1)], rest :: binary>> -> + <<"_", c::binary-1, rest::binary>> -> String.to_atom("_#{String.downcase(c)}#{rest}") - <> -> + <> -> String.to_atom("#{String.downcase(c)}#{rest}") end end @@ -775,29 +735,117 @@ defmodule Kernel.Typespec do {:type, line(meta), :binary, [{:integer, line(meta), 0}, {:integer, line(meta), 0}]} end - defp typespec({:<<>>, meta, [{:::, _, [{:_, meta1, atom}, {:*, _, [{:_, meta2, atom}, unit]}]}]}, _, _) when is_atom(atom) do - {:type, line(meta), :binary, [{:integer, line(meta1), 0}, {:integer, line(meta2), unit}]} + defp typespec({:<<>>, meta, [{:::, unit_meta, [{:_, _, ctx1}, {:*, _, [{:_, _, ctx2}, unit]}]}]}, _, _) + when is_atom(ctx1) and is_atom(ctx2) and is_integer(unit) do + {:type, line(meta), :binary, [{:integer, line(meta), 0}, {:integer, line(unit_meta), unit}]} end - defp typespec({:<<>>, meta, [{:::, meta1, [{:_, meta2, atom}, base]}]}, _, _) when is_atom(atom) do - {:type, line(meta), :binary, [{:integer, line(meta1), base}, {:integer, line(meta2), 0}]} + defp typespec({:<<>>, meta, [{:::, size_meta, [{:_, _, ctx}, size]}]}, _, _) + when is_atom(ctx) and is_integer(size) do + {:type, line(meta), :binary, [{:integer, line(size_meta), size}, {:integer, line(meta), 0}]} + end + + defp typespec({:<<>>, meta, [{:::, size_meta, [{:_, _, ctx1}, size]}, {:::, unit_meta, [{:_, _, ctx2}, {:*, _, [{:_, _, ctx3}, unit]}]}]}, _, _) + when is_atom(ctx1) and is_atom(ctx2) and is_atom(ctx3) and is_integer(size) and is_integer(unit) do + {:type, line(meta), :binary, [{:integer, line(size_meta), size}, {:integer, line(unit_meta), unit}]} end ## Handle maps and structs - defp typespec({:%{}, meta, fields}, vars, caller) do - fields = :lists.map(fn {k, v} -> - {:type, line(meta), :map_field_assoc, typespec(k, vars, caller), typespec(v, vars, caller)} - end, fields) + defp typespec({:map, meta, args}, _vars, _caller) when args == [] or is_atom(args) do + {:type, line(meta), :map, :any} + end + + defp typespec({:%{}, meta, fields} = map, vars, caller) do + fields = + :lists.map(fn + {k, v} when is_atom(k) -> + {:type, line(meta), :map_field_exact, [typespec(k, vars, caller), typespec(v, vars, caller)]} + {{:required, meta2, [k]}, v} -> + {:type, line(meta2), :map_field_exact, [typespec(k, vars, caller), typespec(v, vars, caller)]} + {{:optional, meta2, [k]}, v} -> + {:type, line(meta2), :map_field_assoc, [typespec(k, vars, caller), typespec(v, vars, caller)]} + {k, v} -> + # TODO: Emit warnings on v1.6 (when we drop OTP 18 support) + # :elixir_errors.warn(caller.line, caller.file, + # "invalid map specification. %{foo => bar} is deprecated in favor of " <> + # "%{required(foo) => bar} and %{optional(foo) => bar}. required/1 is an " <> + # "OTP 19 only feature, if you are targeting OTP 18 use optional/1.") + {:type, line(meta), :map_field_assoc, [typespec(k, vars, caller), typespec(v, vars, caller)]} + {:|, _, [_, _]} -> + compile_error(caller, + "invalid map specification. When using the | operator in the map key, " <> + "make sure to wrap the key type in parentheses: #{Macro.to_string(map)}") + _ -> + compile_error(caller, "invalid map specification: #{Macro.to_string(map)}") + end, fields) + {:type, line(meta), :map, fields} end defp typespec({:%, _, [name, {:%{}, meta, fields}]}, vars, caller) do - typespec({:%{}, meta, [{:__struct__, name}|fields]}, vars, caller) + # We cannot set a function name to avoid tracking + # as a compile time dependency, because for structs it actually is one. + module = Macro.expand(name, caller) + + struct = + if module == caller.module do + Module.get_attribute(module, :struct) || + compile_error(caller, "struct is not defined for #{Macro.to_string(name)}") + else + module.__struct__ + end + + struct = struct |> Map.from_struct |> Map.to_list + + unless Keyword.keyword?(fields) do + compile_error(caller, "expected key-value pairs in struct #{Macro.to_string(name)}") + end + + types = + :lists.map(fn {field, _} -> + {field, Keyword.get(fields, field, quote(do: term()))} + end, struct) + + :lists.foreach(fn {field, _} -> + unless Keyword.has_key?(struct, field) do + compile_error(caller, "undefined field #{field} on struct #{Macro.to_string(name)}") + end + end, fields) + + typespec({:%{}, meta, [__struct__: module] ++ types}, vars, caller) + end + + # Handle records + defp typespec({:record, meta, [atom]}, vars, caller) do + typespec({:record, meta, [atom, []]}, vars, caller) + end + + defp typespec({:record, meta, [atom, fields]}, vars, caller) do + # We cannot set a function name to avoid tracking + # as a compile time dependency because for records it actually is one. + case Macro.expand({atom, [], [{atom, [], []}]}, caller) do + keyword when is_list(keyword) -> + types = + :lists.map(fn {field, _} -> + Keyword.get(fields, field, quote(do: term())) + end, keyword) + + :lists.foreach(fn {field, _} -> + unless Keyword.has_key?(keyword, field) do + compile_error(caller, "undefined field #{field} on record #{inspect atom}") + end + end, fields) + + typespec({:{}, meta, [atom | types]}, vars, caller) + _ -> + compile_error(caller, "unknown record #{inspect atom}") + end end # Handle ranges defp typespec({:.., meta, args}, vars, caller) do - typespec({:range, meta, args}, vars, caller) + args = for arg <- args, do: typespec(arg, vars, caller) + {:type, line(meta), :range, args} end # Handle special forms @@ -806,7 +854,9 @@ defmodule Kernel.Typespec do end defp typespec({:__aliases__, _, _} = alias, vars, caller) do - atom = Macro.expand alias, caller + # We set a function name to avoid tracking + # aliases in typespecs as compile time dependencies. + atom = Macro.expand(alias, %{caller | function: {:typespec, 0}}) typespec(atom, vars, caller) end @@ -818,7 +868,7 @@ defmodule Kernel.Typespec do # Handle type operator defp typespec({:::, meta, [var, expr]}, vars, caller) do - left = typespec(var, [elem(var, 0)|vars], caller) + left = typespec(var, [elem(var, 0) | vars], caller) right = typespec(expr, vars, caller) {:ann_type, line(meta), [left, right]} end @@ -828,9 +878,24 @@ defmodule Kernel.Typespec do {:op, line(meta), op, {:integer, line(meta), integer}} end + # Handle remote calls in the form of @module_attribute.type. + # These are not handled by the general remote type clause as calling + # Macro.expand/2 on the remote does not expand module attributes (but expands + # things like __MODULE__). + defp typespec({{:., meta, [{:@, _, [{attr, _, _}]}, name]}, _, args} = orig, vars, caller) do + remote = Module.get_attribute(caller.module, attr) + unless is_atom(remote) and remote != nil do + message = "invalid remote in typespec: #{Macro.to_string(orig)} (@#{attr} is #{inspect remote})" + compile_error(caller, message) + end + remote_type({typespec(remote, vars, caller), meta, typespec(name, vars, caller), args}, vars, caller) + end + # Handle remote calls defp typespec({{:., meta, [remote, name]}, _, args} = orig, vars, caller) do - remote = Macro.expand remote, caller + # We set a function name to avoid tracking + # aliases in typespecs as compile time dependencies. + remote = Macro.expand(remote, %{caller | function: {:typespec, 0}}) unless is_atom(remote) do compile_error(caller, "invalid remote in typespec: #{Macro.to_string(orig)}") end @@ -867,23 +932,59 @@ defmodule Kernel.Typespec do # Handle local calls defp typespec({:string, meta, arguments}, vars, caller) do - :elixir_errors.warn caller.line, caller.file, "string() type use is discouraged. For character lists, use " <> - "char_list() type, for strings, String.t()\n#{Exception.format_stacktrace(Macro.Env.stacktrace(caller))}" + :elixir_errors.warn caller.line, caller.file, + "string() type use is discouraged. " <> + "For character lists, use charlist() type, for strings, String.t()\n" <> + Exception.format_stacktrace(Macro.Env.stacktrace(caller)) + arguments = for arg <- arguments, do: typespec(arg, vars, caller) {:type, line(meta), :string, arguments} end - defp typespec({:char_list, _meta, []}, vars, caller) do - typespec((quote do: :elixir.char_list()), vars, caller) + defp typespec({:nonempty_string, meta, arguments}, vars, caller) do + :elixir_errors.warn caller.line, caller.file, + "nonempty_string() type use is discouraged. " <> + "For non-empty character lists, use nonempty_charlist() type, for strings, String.t()\n" <> + Exception.format_stacktrace(Macro.Env.stacktrace(caller)) + + arguments = for arg <- arguments, do: typespec(arg, vars, caller) + {:type, line(meta), :nonempty_string, arguments} + end + + # TODO: Remove char_list type by 2.0 + defp typespec({type, _meta, []}, vars, caller) when type in [:charlist, :char_list] do + if type == :char_list do + :elixir_errors.warn caller.line, caller.file, "the char_list() type is deprecated, use charlist()" + end + typespec((quote do: :elixir.charlist()), vars, caller) + end + + defp typespec({:nonempty_charlist, _meta, []}, vars, caller) do + typespec((quote do: :elixir.nonempty_charlist()), vars, caller) + end + + defp typespec({:struct, _meta, []}, vars, caller) do + typespec((quote do: :elixir.struct()), vars, caller) end defp typespec({:as_boolean, _meta, [arg]}, vars, caller) do typespec((quote do: :elixir.as_boolean(unquote(arg))), vars, caller) end + defp typespec({:keyword, _meta, args}, vars, caller) when length(args) <= 1 do + typespec((quote do: :elixir.keyword(unquote_splicing(args))), vars, caller) + end + + defp typespec({:fun, meta, args}, vars, caller) do + args = for arg <- args, do: typespec(arg, vars, caller) + {:type, line(meta), :fun, args} + end + defp typespec({name, meta, arguments}, vars, caller) do arguments = for arg <- arguments, do: typespec(arg, vars, caller) - {:type, line(meta), name, arguments} + arity = length(arguments) + type = if :erl_internal.is_type(name, arity), do: :type, else: :user_type + {type, line(meta), name, arguments} end # Handle literals @@ -899,22 +1000,30 @@ defmodule Kernel.Typespec do typespec({nil, [], []}, vars, caller) end - defp typespec([spec], vars, caller) do - typespec({:list, [], [spec]}, vars, caller) + defp typespec([{:..., _, atom}], vars, caller) when is_atom(atom) do + typespec({:nonempty_list, [], []}, vars, caller) end - defp typespec([spec, {:"...", _, quoted}], vars, caller) when is_atom(quoted) do + defp typespec([spec, {:..., _, atom}], vars, caller) when is_atom(atom) do typespec({:nonempty_list, [], [spec]}, vars, caller) end - defp typespec(list, vars, caller) do - [h|t] = :lists.reverse(list) + defp typespec([spec], vars, caller) do + typespec({:list, [], [spec]}, vars, caller) + end + + defp typespec(list, vars, caller) when is_list(list) do + [h | t] = :lists.reverse(list) union = :lists.foldl(fn(x, acc) -> {:|, [], [validate_kw(x, list, caller), acc]} end, validate_kw(h, list, caller), t) typespec({:list, [], [union]}, vars, caller) end + defp typespec(other, _vars, caller) do + compile_error(caller, "unexpected expression in typespec: #{Macro.to_string other}") + end + ## Helpers defp compile_error(caller, desc) do @@ -923,10 +1032,10 @@ defmodule Kernel.Typespec do defp remote_type({remote, meta, name, arguments}, vars, caller) do arguments = for arg <- arguments, do: typespec(arg, vars, caller) - {:remote_type, line(meta), [ remote, name, arguments ]} + {:remote_type, line(meta), [remote, name, arguments]} end - defp collect_union({:|, _, [a, b]}), do: [a|collect_union(b)] + defp collect_union({:|, _, [a, b]}), do: [a | collect_union(b)] defp collect_union(v), do: [v] defp validate_kw({key, _} = t, _, _caller) when is_atom(key), do: t @@ -941,7 +1050,7 @@ defmodule Kernel.Typespec do end end - defp fn_args(meta, [{:"...", _, _}], _vars, _caller) do + defp fn_args(meta, [{:..., _, _}], _vars, _caller) do {:type, line(meta), :any} end @@ -954,8 +1063,8 @@ defmodule Kernel.Typespec do {:var, line(meta), name} end - defp unpack_typespec_kw([{:type, _, :tuple, [{:atom, _, atom}, type]}|t], acc) do - unpack_typespec_kw(t, [{atom, typespec_to_ast(type)}|acc]) + defp unpack_typespec_kw([{:type, _, :tuple, [{:atom, _, atom}, type]} | t], acc) do + unpack_typespec_kw(t, [{atom, typespec_to_ast(type)} | acc]) end defp unpack_typespec_kw([], acc) do diff --git a/lib/elixir/lib/kernel/utils.ex b/lib/elixir/lib/kernel/utils.ex new file mode 100644 index 00000000000..f0e707ed264 --- /dev/null +++ b/lib/elixir/lib/kernel/utils.ex @@ -0,0 +1,125 @@ +import Kernel, except: [destructure: 2, defdelegate: 2, defstruct: 2] + +defmodule Kernel.Utils do + @moduledoc false + + @doc """ + Callback for destructure. + """ + def destructure(list, count) when is_list(list) and is_integer(count) and count >= 0, + do: destructure_list(list, count) + def destructure(nil, count) when is_integer(count) and count >= 0, + do: destructure_nil(count) + + defp destructure_list(_, 0), do: [] + defp destructure_list([], count), do: destructure_nil(count) + defp destructure_list([h | t], count), do: [h | destructure_list(t, count - 1)] + + defp destructure_nil(0), do: [] + defp destructure_nil(count), do: [nil | destructure_nil(count - 1)] + + @doc """ + Callback for defdelegate. + """ + def defdelegate(fun, opts) when is_list(opts) do + # TODO: Remove by 2.0 + append_first? = Keyword.get(opts, :append_first, false) + + {name, args} = + case Macro.decompose_call(fun) do + {_, _} = pair -> pair + _ -> raise ArgumentError, "invalid syntax in defdelegate #{Macro.to_string(fun)}" + end + + as = Keyword.get(opts, :as, name) + as_args = build_as_args(args, append_first?) + + {name, args, as, as_args} + end + + defp build_as_args(args, append_first?) do + as_args = :lists.map(&build_as_arg/1, args) + + case append_first? do + true -> tl(as_args) ++ [hd(as_args)] + false -> as_args + end + end + + defp build_as_arg({:\\, _, [arg, _default_arg]}), do: validate_arg(arg) + defp build_as_arg(arg), do: validate_arg(arg) + + defp validate_arg({name, _, mod} = arg) when is_atom(name) and is_atom(mod) do + arg + end + + defp validate_arg(ast) do + raise ArgumentError, "defdelegate/2 only accepts function parameters, got: #{Macro.to_string(ast)}" + end + + @doc """ + Callback for defstruct. + """ + def defstruct(module, fields) do + case fields do + fs when is_list(fs) -> + :ok + other -> + raise ArgumentError, "struct fields definition must be list, got: #{inspect other}" + end + + fields = :lists.map(fn + {key, val} when is_atom(key) -> + try do + Macro.escape(val) + rescue + e in [ArgumentError] -> + raise ArgumentError, "invalid value for struct field #{key}, " <> Exception.message(e) + else + _ -> {key, val} + end + key when is_atom(key) -> + {key, nil} + other -> + raise ArgumentError, "struct field names must be atoms, got: #{inspect other}" + end, fields) + + enforce_keys = List.wrap(Module.get_attribute(module, :enforce_keys)) + + :lists.foreach(fn + key when is_atom(key) -> :ok + key -> raise ArgumentError, "keys given to @enforce_keys must be atoms, got: #{inspect key}" + end, enforce_keys) + + {:maps.put(:__struct__, module, :maps.from_list(fields)), + enforce_keys, + Module.get_attribute(module, :derive)} + end + + @doc """ + Announcing callback for defstruct. + """ + def announce_struct(module) do + case :erlang.get(:elixir_compiler_pid) do + :undefined -> :ok + pid -> send(pid, {:struct_available, module}) + end + end + + @doc """ + Callback for raise. + """ + def raise(msg) when is_binary(msg) do + RuntimeError.exception(msg) + end + def raise(atom) when is_atom(atom) do + atom.exception([]) + end + def raise(%{__struct__: struct, __exception__: true} = exception) when is_atom(struct) do + exception + end + def raise(other) do + ArgumentError.exception("raise/1 expects a module name, string or exception as " <> + "the first argument, got: #{inspect other}") + end +end diff --git a/lib/elixir/lib/keyword.ex b/lib/elixir/lib/keyword.ex index a991c6a4ccb..bc007f563e3 100644 --- a/lib/elixir/lib/keyword.ex +++ b/lib/elixir/lib/keyword.ex @@ -1,31 +1,63 @@ defmodule Keyword do @moduledoc """ - A keyword is a list of tuples where the first element - of the tuple is an atom and the second element can be - any value. + A set of functions for working with keywords. + + A keyword is a list of two-element tuples where the first + element of the tuple is an atom and the second element + can be any value. + + For example, the following is a keyword list: + + [{:exit_on_close, true}, {:active, :once}, {:packet_size, 1024}] + + Elixir provides a special and more concise syntax for keyword lists + that looks like this: + + [exit_on_close: true, active: :once, packet_size: 1024] + + This is also the syntax that Elixir uses to inspect keyword lists: + + iex> [{:active, :once}] + [active: :once] + + The two syntaxes are completely equivalent. Note that when keyword + lists are passed as the last argument to a function, if the short-hand + syntax is used then the square brackets around the keyword list can + be omitted as well. For example, the following: + + String.split("1-0", "-", trim: true, parts: 2) + + is equivalent to: + + String.split("1-0", "-", [trim: true, parts: 2]) A keyword may have duplicated keys so it is not strictly - a dictionary. However most of the functions in this module - behave exactly as a dictionary and mimic the API defined - by the `Dict` behaviour. + a key-value store. However most of the functions in this module + behave exactly as a dictionary so they work similarly to + the functions you would find in the `Map` module. - For example, `Keyword.get` will get the first entry matching + For example, `Keyword.get/3` will get the first entry matching the given key, regardless if duplicated entries exist. - Similarly, `Keyword.put` and `Keyword.delete` ensure all + Similarly, `Keyword.put/3` and `Keyword.delete/3` ensure all duplicated entries for a given key are removed when invoked. + Note that operations that require keys to be found in the keyword + list (like `Keyword.get/3`) need to traverse the list in order + to find keys, so these operations may be slower than their map + counterparts. A handful of functions exist to handle duplicated keys, in - particular, `from_enum` allows creating a new keywords without - removing duplicated keys, `get_values` returns all values for - a given key and `delete_first` deletes just one of the existing + particular, `Enum.into/2` allows creating new keywords without + removing duplicated keys, `get_values/2` returns all values for + a given key and `delete_first/2` deletes just one of the existing entries. - Since a keyword list is simply a list, all the operations defined - in `Enum` and `List` can also be applied. + The functions in `Keyword` do not guarantee any property when + it comes to ordering. However, since a keyword list is simply a + list, all the operations defined in `Enum` and `List` can be + applied too, especially when ordering is required. """ @compile :inline_list_funcs - @behaviour Dict @type key :: atom @type value :: any @@ -34,86 +66,114 @@ defmodule Keyword do @type t(value) :: [{key, value}] @doc """ - Checks if the given argument is a keywords list or not. + Returns `true` if `term` is a keyword list; otherwise returns `false`. + + ## Examples + + iex> Keyword.keyword?([]) + true + iex> Keyword.keyword?([a: 1]) + true + iex> Keyword.keyword?([{Foo, 1}]) + true + iex> Keyword.keyword?([{}]) + false + iex> Keyword.keyword?([:key]) + false + iex> Keyword.keyword?(%{}) + false + """ @spec keyword?(term) :: boolean - def keyword?([{key, _value} | rest]) when is_atom(key) do - keyword?(rest) - end + def keyword?(term) + def keyword?([{key, _value} | rest]) when is_atom(key), do: keyword?(rest) def keyword?([]), do: true def keyword?(_other), do: false @doc """ Returns an empty keyword list, i.e. an empty list. + + ## Examples + + iex> Keyword.new() + [] + """ - @spec new :: t - def new do - [] - end + @spec new :: [] + def new, do: [] @doc """ Creates a keyword from an enumerable. Duplicated entries are removed, the latest one prevails. - I.e. differently from `Enum.into(enumerable, [])`, - `Keyword.new(enumerable)` guarantees the keys are unique. + Unlike `Enum.into(enumerable, [])`, `Keyword.new(enumerable)` + guarantees the keys are unique. ## Examples iex> Keyword.new([{:b, 1}, {:a, 2}]) - [a: 2, b: 1] + [b: 1, a: 2] + + iex> Keyword.new([{:a, 1}, {:a, 2}, {:a, 3}]) + [a: 3] """ @spec new(Enum.t) :: t def new(pairs) do - Enum.reduce pairs, [], fn {k, v}, keywords -> - put(keywords, k, v) - end + new(pairs, fn pair -> pair end) end @doc """ Creates a keyword from an enumerable via the transformation function. Duplicated entries are removed, the latest one prevails. - I.e. differently from `Enum.into(enumerable, [], fun)`, + Unlike `Enum.into(enumerable, [], fun)`, `Keyword.new(enumerable, fun)` guarantees the keys are unique. ## Examples - iex> Keyword.new([:a, :b], fn (x) -> {x, x} end) |> Enum.sort + iex> Keyword.new([:a, :b], fn(x) -> {x, x} end) [a: :a, b: :b] """ - @spec new(Enum.t, ({key, value} -> {key, value})) :: t + @spec new(Enum.t, (term -> {key, value})) :: t def new(pairs, transform) do - Enum.reduce pairs, [], fn i, keywords -> - {k, v} = transform.(i) - put(keywords, k, v) + fun = fn el, acc -> + {k, v} = transform.(el) + put_new(acc, k, v) end + :lists.foldl(fun, [], Enum.reverse(pairs)) end @doc """ Gets the value for a specific `key`. - If `key` does not exist, return default value (`nil` if no default value). + If `key` does not exist, return the default value + (`nil` if no default value). If duplicated entries exist, the first one is returned. Use `get_values/2` to retrieve all entries. ## Examples + iex> Keyword.get([], :a) + nil iex> Keyword.get([a: 1], :a) 1 - iex> Keyword.get([a: 1], :b) nil - iex> Keyword.get([a: 1], :b, 3) 3 + With duplicated keys: + + iex> Keyword.get([a: 1, a: 2], :a, 3) + 1 + iex> Keyword.get([a: 1, a: 2], :b, 3) + 3 + """ - @spec get(t, key) :: value @spec get(t, key, value) :: value def get(keywords, key, default \\ nil) when is_list(keywords) and is_atom(key) do case :lists.keyfind(key, 1, keywords) do @@ -122,20 +182,168 @@ defmodule Keyword do end end + @doc """ + Gets the value for a specific `key`. + + If `key` does not exist, lazily evaluates `fun` and returns its result. + + This is useful if the default value is very expensive to calculate or + generally difficult to setup and teardown again. + + If duplicated entries exist, the first one is returned. + Use `get_values/2` to retrieve all entries. + + ## Examples + + iex> keyword = [a: 1] + iex> fun = fn -> + ...> # some expensive operation here + ...> 13 + ...> end + iex> Keyword.get_lazy(keyword, :a, fun) + 1 + iex> Keyword.get_lazy(keyword, :b, fun) + 13 + + """ + @spec get_lazy(t, key, (() -> value)) :: value + def get_lazy(keywords, key, fun) + when is_list(keywords) and is_atom(key) and is_function(fun, 0) do + case :lists.keyfind(key, 1, keywords) do + {^key, value} -> value + false -> fun.() + end + end + + @doc """ + Gets the value from `key` and updates it, all in one pass. + + This `fun` argument receives the value of `key` (or `nil` if `key` + is not present) and must return a two-element tuple: the "get" value + (the retrieved value, which can be operated on before being returned) + and the new value to be stored under `key`. The `fun` may also + return `:pop`, implying the current value shall be removed from the + keyword list and returned. + + The returned value is a tuple with the "get" value returned by + `fun` and a new keyword list with the updated value under `key`. + + ## Examples + + iex> Keyword.get_and_update([a: 1], :a, fn current_value -> + ...> {current_value, "new value!"} + ...> end) + {1, [a: "new value!"]} + + iex> Keyword.get_and_update([a: 1], :b, fn current_value -> + ...> {current_value, "new value!"} + ...> end) + {nil, [b: "new value!", a: 1]} + + iex> Keyword.get_and_update([a: 1], :a, fn _ -> :pop end) + {1, []} + + iex> Keyword.get_and_update([a: 1], :b, fn _ -> :pop end) + {nil, [a: 1]} + + """ + @spec get_and_update(t, key, (value -> {get, value} | :pop)) :: {get, t} when get: term + def get_and_update(keywords, key, fun) + when is_list(keywords) and is_atom(key), + do: get_and_update(keywords, [], key, fun) + + defp get_and_update([{key, current} | t], acc, key, fun) do + case fun.(current) do + {get, value} -> + {get, :lists.reverse(acc, [{key, value} | t])} + :pop -> + {current, :lists.reverse(acc, t)} + other -> + raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}" + end + end + + defp get_and_update([{_, _} = h | t], acc, key, fun), + do: get_and_update(t, [h | acc], key, fun) + + defp get_and_update([], acc, key, fun) do + case fun.(nil) do + {get, update} -> + {get, [{key, update} | :lists.reverse(acc)]} + :pop -> + {nil, :lists.reverse(acc)} + other -> + raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}" + end + end + + @doc """ + Gets the value from `key` and updates it. Raises if there is no `key`. + + This `fun` argument receives the value of `key` and must return a + two-element tuple: the "get" value (the retrieved value, which can be + operated on before being returned) and the new value to be stored under + `key`. + + The returned value is a tuple with the "get" value returned by `fun` and a new + keyword list with the updated value under `key`. + + ## Examples + + iex> Keyword.get_and_update!([a: 1], :a, fn current_value -> + ...> {current_value, "new value!"} + ...> end) + {1, [a: "new value!"]} + + iex> Keyword.get_and_update!([a: 1], :b, fn current_value -> + ...> {current_value, "new value!"} + ...> end) + ** (KeyError) key :b not found in: [a: 1] + + iex> Keyword.get_and_update!([a: 1], :a, fn _ -> + ...> :pop + ...> end) + {1, []} + + """ + @spec get_and_update!(t, key, (value -> {get, value})) :: {get, t} | no_return when get: term + def get_and_update!(keywords, key, fun) do + get_and_update!(keywords, key, fun, []) + end + + defp get_and_update!([{key, value} | keywords], key, fun, acc) do + case fun.(value) do + {get, value} -> + {get, :lists.reverse(acc, [{key, value} | delete(keywords, key)])} + :pop -> + {value, :lists.reverse(acc, keywords)} + other -> + raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}" + end + end + + defp get_and_update!([{_, _} = e | keywords], key, fun, acc) do + get_and_update!(keywords, key, fun, [e | acc]) + end + + defp get_and_update!([], key, _fun, acc) when is_atom(key) do + raise(KeyError, key: key, term: acc) + end + @doc """ Fetches the value for a specific `key` and returns it in a tuple. + If the `key` does not exist, returns `:error`. ## Examples iex> Keyword.fetch([a: 1], :a) {:ok, 1} - iex> Keyword.fetch([a: 1], :b) :error """ - @spec fetch(t, key) :: {:ok, value} + @spec fetch(t, key) :: {:ok, value} | :error def fetch(keywords, key) when is_list(keywords) and is_atom(key) do case :lists.keyfind(key, 1, keywords) do {^key, value} -> {:ok, value} @@ -144,14 +352,14 @@ defmodule Keyword do end @doc """ - Fetches the value for specific `key`. If `key` does not exist, - a `KeyError` is raised. + Fetches the value for specific `key`. + + If `key` does not exist, a `KeyError` is raised. ## Examples iex> Keyword.fetch!([a: 1], :a) 1 - iex> Keyword.fetch!([a: 1], :b) ** (KeyError) key :b not found in: [a: 1] @@ -169,31 +377,34 @@ defmodule Keyword do ## Examples + iex> Keyword.get_values([], :a) + [] + iex> Keyword.get_values([a: 1], :a) + [1] iex> Keyword.get_values([a: 1, a: 2], :a) - [1,2] + [1, 2] """ @spec get_values(t, key) :: [value] def get_values(keywords, key) when is_list(keywords) and is_atom(key) do fun = fn - {k, v} when k === key -> {true, v} + {^key, val} -> {true, val} {_, _} -> false end - :lists.filtermap(fun, keywords) end @doc """ - Returns all keys from the keyword list. Duplicated - keys appear duplicated in the final list of keys. + Returns all keys from the keyword list. + + Duplicated keys appear duplicated in the final list of keys. ## Examples iex> Keyword.keys([a: 1, b: 2]) - [:a,:b] - + [:a, :b] iex> Keyword.keys([a: 1, b: 2, a: 3]) - [:a,:b,:a] + [:a, :b, :a] """ @spec keys(t) :: [key] @@ -204,10 +415,14 @@ defmodule Keyword do @doc """ Returns all values from the keyword list. + Values from duplicated keys will be kept in the final list of values. + ## Examples iex> Keyword.values([a: 1, b: 2]) - [1,2] + [1, 2] + iex> Keyword.values([a: 1, b: 2, a: 3]) + [1, 2, 3] """ @spec values(t) :: [value] @@ -217,18 +432,19 @@ defmodule Keyword do @doc """ Deletes the entries in the keyword list for a `key` with `value`. + If no `key` with `value` exists, returns the keyword list unchanged. ## Examples iex> Keyword.delete([a: 1, b: 2], :a, 1) [b: 2] - iex> Keyword.delete([a: 1, b: 2, a: 3], :a, 3) [a: 1, b: 2] - - iex> Keyword.delete([b: 2], :a, 5) - [b: 2] + iex> Keyword.delete([a: 1], :a, 5) + [a: 1] + iex> Keyword.delete([a: 1], :b, 5) + [a: 1] """ @spec delete(t, key, value) :: t @@ -238,18 +454,17 @@ defmodule Keyword do @doc """ Deletes the entries in the keyword list for a specific `key`. + If the `key` does not exist, returns the keyword list unchanged. - Use `delete_first` to delete just the first entry in case of + Use `delete_first/2` to delete just the first entry in case of duplicated keys. ## Examples iex> Keyword.delete([a: 1, b: 2], :a) [b: 2] - iex> Keyword.delete([a: 1, b: 2, a: 3], :a) [b: 2] - iex> Keyword.delete([b: 2], :a) [b: 2] @@ -261,13 +476,13 @@ defmodule Keyword do @doc """ Deletes the first entry in the keyword list for a specific `key`. + If the `key` does not exist, returns the keyword list unchanged. ## Examples iex> Keyword.delete_first([a: 1, b: 2, a: 3], :a) [b: 2, a: 3] - iex> Keyword.delete_first([b: 2], :a) [b: 2] @@ -285,16 +500,46 @@ defmodule Keyword do ## Examples + iex> Keyword.put([a: 1], :b, 2) + [b: 2, a: 1] iex> Keyword.put([a: 1, b: 2], :a, 3) [a: 3, b: 2] - iex> Keyword.put([a: 1, b: 2, a: 4], :a, 3) [a: 3, b: 2] """ @spec put(t, key, value) :: t def put(keywords, key, value) when is_list(keywords) and is_atom(key) do - [{key, value}|delete(keywords, key)] + [{key, value} | delete(keywords, key)] + end + + @doc """ + Evaluates `fun` and puts the result under `key` + in keyword list unless `key` is already present. + + This is useful if the value is very expensive to calculate or + generally difficult to setup and teardown again. + + ## Examples + + iex> keyword = [a: 1] + iex> fun = fn -> + ...> # some expensive operation here + ...> 3 + ...> end + iex> Keyword.put_new_lazy(keyword, :a, fun) + [a: 1] + iex> Keyword.put_new_lazy(keyword, :b, fun) + [b: 3, a: 1] + + """ + @spec put_new_lazy(t, key, (() -> value)) :: t + def put_new_lazy(keywords, key, fun) + when is_list(keywords) and is_atom(key) and is_function(fun, 0) do + case :lists.keyfind(key, 1, keywords) do + {^key, _} -> keywords + false -> [{key, fun.()} | keywords] + end end @doc """ @@ -305,7 +550,6 @@ defmodule Keyword do iex> Keyword.put_new([a: 1], :b, 2) [b: 2, a: 1] - iex> Keyword.put_new([a: 1, b: 2], :a, 3) [a: 1, b: 2] @@ -314,18 +558,67 @@ defmodule Keyword do def put_new(keywords, key, value) when is_list(keywords) and is_atom(key) do case :lists.keyfind(key, 1, keywords) do {^key, _} -> keywords - false -> [{key, value}|keywords] + false -> [{key, value} | keywords] end end @doc """ - Checks if two keywords are equal. I.e. they contain + Alters the value stored under `key` to `value`, but only + if the entry `key` already exists in the keyword list. + + In the case a value is stored multiple times in the keyword list, + later occurrences are removed. + + ## Examples + + iex> Keyword.replace([a: 1], :b, 2) + [a: 1] + iex> Keyword.replace([a: 1, b: 2, a: 4], :a, 3) + [a: 3, b: 2] + + """ + @spec replace(t, key, value) :: t + def replace(keywords, key, value) when is_list(keywords) and is_atom(key) do + case :lists.keyfind(key, 1, keywords) do + {^key, _} -> [{key, value} | delete(keywords, key)] + false -> keywords + end + end + + @doc """ + Similar to `replace/3`, but will raise a `KeyError` + if the entry `key` does not exist. + + ## Examples + + iex> Keyword.replace!([a: 1, b: 2, a: 4], :a, 3) + [a: 3, b: 2] + iex> Keyword.replace!([a: 1], :b, 2) + ** (KeyError) key :b not found in: [a: 1] + + """ + @spec replace!(t, key, value) :: t + def replace!(keywords, key, value) when is_list(keywords) and is_atom(key) do + case :lists.keyfind(key, 1, keywords) do + {^key, _} -> [{key, value} | delete(keywords, key)] + false -> raise KeyError, key: key, term: keywords + end + end + + @doc """ + Checks if two keywords are equal. + + Two keywords are considered to be equal if they contain the same keys and those keys contain the same values. ## Examples iex> Keyword.equal?([a: 1, b: 2], [b: 2, a: 1]) true + iex> Keyword.equal?([a: 1, b: 2], [b: 1, a: 2]) + false + iex> Keyword.equal?([a: 1, b: 2, a: 3], [b: 2, a: 3, a: 1]) + true """ @spec equal?(t, t) :: boolean @@ -334,44 +627,100 @@ defmodule Keyword do end @doc """ - Merges two keyword lists into one. If they have duplicated - entries, the one given as second argument wins. + Merges two keyword lists into one. + + All keys, including duplicated keys, given in `keywords2` will be added + to `keywords1`, overriding any existing one. + + There are no guarantees about the order of keys in the returned keyword. ## Examples - iex> Keyword.merge([a: 1, b: 2], [a: 3, d: 4]) |> Enum.sort - [a: 3, b: 2, d: 4] + iex> Keyword.merge([a: 1, b: 2], [a: 3, d: 4]) + [b: 2, a: 3, d: 4] + + iex> Keyword.merge([a: 1, b: 2], [a: 3, d: 4, a: 5]) + [b: 2, a: 3, d: 4, a: 5] + + iex> Keyword.merge([a: 1], [2, 3]) + ** (ArgumentError) expected a keyword list as the second argument, got: [2, 3] """ @spec merge(t, t) :: t - def merge(d1, d2) when is_list(d1) and is_list(d2) do - fun = fn {k, _v} -> not has_key?(d2, k) end - d2 ++ :lists.filter(fun, d1) + def merge(keywords1, keywords2) when is_list(keywords1) and is_list(keywords2) do + if keyword?(keywords2) do + fun = fn + {key, _value} when is_atom(key) -> + not has_key?(keywords2, key) + _ -> + raise ArgumentError, message: "expected a keyword list as the first argument, got: #{inspect keywords1}" + end + :lists.filter(fun, keywords1) ++ keywords2 + else + raise ArgumentError, message: "expected a keyword list as the second argument, got: #{inspect keywords2}" + end end @doc """ - Merges two keyword lists into one. If they have duplicated - entries, the given function is invoked to solve conflicts. + Merges two keyword lists into one. + + All keys, including duplicated keys, given in `keywords2` will be added + to `keywords1`. The given function will be invoked to solve conflicts. + + If `keywords2` has duplicate keys, the given function will be invoked + for each matching pair in `keywords1`. + + There are no guarantees about the order of keys in the returned keyword. ## Examples - iex> Keyword.merge([a: 1, b: 2], [a: 3, d: 4], fn (_k, v1, v2) -> + iex> Keyword.merge([a: 1, b: 2], [a: 3, d: 4], fn _k, v1, v2 -> + ...> v1 + v2 + ...> end) + [b: 2, a: 4, d: 4] + + iex> Keyword.merge([a: 1, b: 2], [a: 3, d: 4, a: 5], fn :a, v1, v2 -> ...> v1 + v2 ...> end) - [a: 4, b: 2, d: 4] + [b: 2, a: 4, d: 4, a: 5] + + iex> Keyword.merge([a: 1, b: 2, a: 3], [a: 3, d: 4, a: 5], fn :a, v1, v2 -> + ...> v1 + v2 + ...> end) + [b: 2, a: 4, d: 4, a: 8] + + iex> Keyword.merge([a: 1, b: 2], [:a, :b], fn :a, v1, v2 -> + ...> v1 + v2 + ...> end) + ** (ArgumentError) expected a keyword list as the second argument, got: [:a, :b] """ @spec merge(t, t, (key, value, value -> value)) :: t - def merge(d1, d2, fun) when is_list(d1) and is_list(d2) do - do_merge(d2, d1, fun) + def merge(keywords1, keywords2, fun) when is_list(keywords1) and is_list(keywords2) and is_function(fun, 3) do + if keyword?(keywords1) do + do_merge(keywords2, [], keywords1, keywords1, fun, keywords2) + else + raise ArgumentError, message: "expected a keyword list as the first argument, got: #{inspect keywords1}" + end end - defp do_merge([{k, v2}|t], acc, fun) do - do_merge t, update(acc, k, v2, fn(v1) -> fun.(k, v1, v2) end), fun + defp do_merge([{key, value2} | tail], acc, rest, original, fun, keywords2) when is_atom(key) do + case :lists.keyfind(key, 1, original) do + {^key, value1} -> + do_merge(tail, [{key, fun.(key, value1, value2)} | acc], + delete(rest, key), :lists.keydelete(key, 1, original), fun, keywords2) + + false -> + do_merge(tail, [{key, value2} | acc], rest, original, fun, keywords2) + end + end + + defp do_merge([], acc, rest, _original, _fun, _keywords2) do + rest ++ :lists.reverse(acc) end - defp do_merge([], acc, _fun) do - acc + defp do_merge(_other, _acc, _rest, _original, _fun, keywords2) do + raise ArgumentError, message: "expected a keyword list as the second argument, got: #{inspect keywords2}" end @doc """ @@ -381,7 +730,6 @@ defmodule Keyword do iex> Keyword.has_key?([a: 1], :a) true - iex> Keyword.has_key?([a: 1], :b) false @@ -392,16 +740,19 @@ defmodule Keyword do end @doc """ - Updates the `key` with the given function. If the `key` does - not exist, raises `KeyError`. + Updates the `key` with the given function. + + If the `key` does not exist, raises `KeyError`. - If there are duplicated entries, they are all removed and only the first one + If there are duplicated keys, they are all removed and only the first one is updated. ## Examples iex> Keyword.update!([a: 1], :a, &(&1 * 2)) [a: 2] + iex> Keyword.update!([a: 1, a: 2], :a, &(&1 * 2)) + [a: 2] iex> Keyword.update!([a: 1], :b, &(&1 * 2)) ** (KeyError) key :b not found in: [a: 1] @@ -412,12 +763,12 @@ defmodule Keyword do update!(keywords, key, fun, keywords) end - defp update!([{key, value}|keywords], key, fun, _dict) do - [{key, fun.(value)}|delete(keywords, key)] + defp update!([{key, value} | keywords], key, fun, _dict) do + [{key, fun.(value)} | delete(keywords, key)] end - defp update!([{_, _} = e|keywords], key, fun, dict) do - [e|update!(keywords, key, fun, dict)] + defp update!([{_, _} = e | keywords], key, fun, dict) do + [e | update!(keywords, key, fun, dict)] end defp update!([], key, _fun, dict) when is_atom(key) do @@ -425,28 +776,32 @@ defmodule Keyword do end @doc """ - Updates the `key` with the given function. If the `key` does - not exist, inserts the given `initial` value. + Updates the `key` in `keywords` with the given function. - If there are duplicated entries, they are all removed and only the first one + If the `key` does not exist, inserts the given `initial` value. + + If there are duplicated keys, they are all removed and only the first one is updated. ## Examples iex> Keyword.update([a: 1], :a, 13, &(&1 * 2)) [a: 2] - + iex> Keyword.update([a: 1, a: 2], :a, 13, &(&1 * 2)) + [a: 2] iex> Keyword.update([a: 1], :b, 11, &(&1 * 2)) [a: 1, b: 11] """ @spec update(t, key, value, (value -> value)) :: t - def update([{key, value}|keywords], key, _initial, fun) do - [{key, fun.(value)}|delete(keywords, key)] + def update(keywords, key, initial, fun) + + def update([{key, value} | keywords], key, _initial, fun) do + [{key, fun.(value)} | delete(keywords, key)] end - def update([{_, _} = e|keywords], key, initial, fun) do - [e|update(keywords, key, initial, fun)] + def update([{_, _} = e | keywords], key, initial, fun) do + [e | update(keywords, key, initial, fun)] end def update([], key, initial, _fun) when is_atom(key) do @@ -455,29 +810,28 @@ defmodule Keyword do @doc """ Takes all entries corresponding to the given keys and extracts them into a - separate keyword list. Returns a tuple with the new list and the old list - with removed keys. + separate keyword list. + + Returns a tuple with the new list and the old list with removed keys. - Keys for which there are no entires in the keyword list are ignored. + Keys for which there are no entries in the keyword list are ignored. Entries with duplicated keys end up in the same keyword list. ## Examples - iex> d = [a: 1, b: 2, c: 3, d: 4] - iex> Keyword.split(d, [:a, :c, :e]) - {[a: 1, c: 3], [b: 2, d: 4]} - - iex> d = [a: 1, b: 2, c: 3, d: 4, a: 5] - iex> Keyword.split(d, [:a, :c, :e]) - {[a: 1, c: 3, a: 5], [b: 2, d: 4]} + iex> Keyword.split([a: 1, b: 2, c: 3], [:a, :c, :e]) + {[a: 1, c: 3], [b: 2]} + iex> Keyword.split([a: 1, b: 2, c: 3, a: 4], [:a, :c, :e]) + {[a: 1, c: 3, a: 4], [b: 2]} """ + @spec split(t, [key]) :: {t, t} def split(keywords, keys) when is_list(keywords) do fun = fn {k, v}, {take, drop} -> case k in keys do - true -> {[{k, v}|take], drop} - false -> {take, [{k, v}|drop]} + true -> {[{k, v} | take], drop} + false -> {take, [{k, v} | drop]} end end @@ -494,106 +848,139 @@ defmodule Keyword do ## Examples - iex> d = [a: 1, b: 2, c: 3, d: 4] - iex> Keyword.take(d, [:a, :c, :e]) + iex> Keyword.take([a: 1, b: 2, c: 3], [:a, :c, :e]) [a: 1, c: 3] - - iex> d = [a: 1, b: 2, c: 3, d: 4, a: 5] - iex> Keyword.take(d, [:a, :c, :e]) + iex> Keyword.take([a: 1, b: 2, c: 3, a: 5], [:a, :c, :e]) [a: 1, c: 3, a: 5] """ + @spec take(t, [key]) :: t def take(keywords, keys) when is_list(keywords) do :lists.filter(fn {k, _} -> k in keys end, keywords) end @doc """ - Drops the given keys from the dict. + Drops the given keys from the keyword list. Duplicated keys are preserved in the new keyword list. ## Examples - iex> d = [a: 1, b: 2, c: 3, d: 4] - iex> Keyword.drop(d, [:b, :d]) + iex> Keyword.drop([a: 1, b: 2, c: 3], [:b, :d]) [a: 1, c: 3] - - iex> d = [a: 1, b: 2, b: 3, c: 3, d: 4, a: 5] - iex> Keyword.drop(d, [:b, :d]) + iex> Keyword.drop([a: 1, b: 2, b: 3, c: 3, a: 5], [:b, :d]) [a: 1, c: 3, a: 5] """ + @spec drop(t, [key]) :: t def drop(keywords, keys) when is_list(keywords) do - :lists.filter(fn {k, _} -> not k in keys end, keywords) + :lists.filter(fn {key, _} -> key not in keys end, keywords) end @doc """ - Returns the first value associated with `key` in the keyword - list as well as the keyword list without `key`. + Returns and removes all values associated with `key` in the keyword list. - All duplicated entries are removed. See `pop_first/3` for + All duplicated keys are removed. See `pop_first/3` for removing only the first entry. ## Examples - iex> Keyword.pop [a: 1], :a - {1,[]} + iex> Keyword.pop([a: 1], :a) + {1, []} + iex> Keyword.pop([a: 1], :b) + {nil, [a: 1]} + iex> Keyword.pop([a: 1], :b, 3) + {3, [a: 1]} + iex> Keyword.pop([a: 1, a: 2], :a) + {1, []} - iex> Keyword.pop [a: 1], :b - {nil,[a: 1]} + """ + @spec pop(t, key, value) :: {value, t} + def pop(keywords, key, default \\ nil) when is_list(keywords) do + case fetch(keywords, key) do + {:ok, value} -> + {value, delete(keywords, key)} + :error -> + {default, keywords} + end + end - iex> Keyword.pop [a: 1], :b, 3 - {3,[a: 1]} + @doc """ + Lazily returns and removes all values associated with `key` in the keyword list. - iex> Keyword.pop [a: 1], :b, 3 - {3,[a: 1]} + This is useful if the default value is very expensive to calculate or + generally difficult to setup and teardown again. - iex> Keyword.pop [a: 1, a: 2], :a - {1,[]} + All duplicated keys are removed. See `pop_first/3` for + removing only the first entry. + + ## Examples + + iex> keyword = [a: 1] + iex> fun = fn -> + ...> # some expensive operation here + ...> 13 + ...> end + iex> Keyword.pop_lazy(keyword, :a, fun) + {1, []} + iex> Keyword.pop_lazy(keyword, :b, fun) + {13, [a: 1]} """ - def pop(keywords, key, default \\ nil) when is_list(keywords) do - {get(keywords, key, default), delete(keywords, key)} + @spec pop_lazy(t, key, (() -> value)) :: {value, t} + def pop_lazy(keywords, key, fun) + when is_list(keywords) and is_function(fun, 0) do + case fetch(keywords, key) do + {:ok, value} -> + {value, delete(keywords, key)} + :error -> + {fun.(), keywords} + end end @doc """ - Returns the first value associated with `key` in the keyword - list as well as the keyword list without that particular ocurrence - of `key`. + Returns and removes the first value associated with `key` in the keyword list. - Duplicated entries are not removed. + Duplicated keys are not removed. ## Examples - iex> Keyword.pop_first [a: 1], :a - {1,[]} + iex> Keyword.pop_first([a: 1], :a) + {1, []} + iex> Keyword.pop_first([a: 1], :b) + {nil, [a: 1]} + iex> Keyword.pop_first([a: 1], :b, 3) + {3, [a: 1]} + iex> Keyword.pop_first([a: 1, a: 2], :a) + {1, [a: 2]} - iex> Keyword.pop_first [a: 1], :b - {nil,[a: 1]} + """ + @spec pop_first(t, key, value) :: {value, t} + def pop_first(keywords, key, default \\ nil) when is_list(keywords) do + case :lists.keytake(key, 1, keywords) do + {:value, {^key, value}, rest} -> {value, rest} + false -> {default, keywords} + end + end - iex> Keyword.pop_first [a: 1], :b, 3 - {3,[a: 1]} + @doc """ + Returns the keyword list itself. - iex> Keyword.pop_first [a: 1], :b, 3 - {3,[a: 1]} + ## Examples - iex> Keyword.pop_first [a: 1, a: 2], :a - {1,[a: 2]} + iex> Keyword.to_list([a: 1]) + [a: 1] """ - def pop_first(keywords, key, default \\ nil) when is_list(keywords) do - {get(keywords, key, default), delete_first(keywords, key)} + @spec to_list(t) :: t + def to_list(keyword) when is_list(keyword) do + keyword end - # Dict callbacks - @doc false + # TODO: Remove on 2.0 + # (hard-deprecated in elixir_dispatch) def size(keyword) do length(keyword) end - - @doc false - def to_list(keyword) do - keyword - end end diff --git a/lib/elixir/lib/list.ex b/lib/elixir/lib/list.ex index f4990bdc337..92f544da63b 100644 --- a/lib/elixir/lib/list.ex +++ b/lib/elixir/lib/list.ex @@ -1,41 +1,125 @@ defmodule List do @moduledoc """ - Implements functions that only make sense for lists - and cannot be part of the Enum protocol. In general, - favor using the Enum API instead of List. + Functions that work on (linked) lists. - Some functions in this module expect an index. Index - access for list is linear. Negative indexes are also - supported but they imply the list will be iterated twice, - one to calculate the proper index and another to the - operation. + Lists in Elixir are specified between square brackets: - A decision was taken to delegate most functions to - Erlang's standard library but follow Elixir's convention - of receiving the target (in this case, a list) as the - first argument. + iex> [1, "two", 3, :four] + [1, "two", 3, :four] + + Two lists can be concatenated and subtracted using the + `Kernel.++/2` and `Kernel.--/2` operators: + + iex> [1, 2, 3] ++ [4, 5, 6] + [1, 2, 3, 4, 5, 6] + iex> [1, true, 2, false, 3, true] -- [true, false] + [1, 2, 3, true] + + Lists in Elixir are effectively linked lists, which means + they are internally represented in pairs containing the + head and the tail of a list: + + iex> [head | tail] = [1, 2, 3] + iex> head + 1 + iex> tail + [2, 3] + + Similarly, we could write the list `[1, 2, 3]` using only + such pairs (called cons cells): + + iex> [1 | [2 | [3 | []]]] + [1, 2, 3] + + Some lists, called improper lists, do not have an empty list as + the second element in the last cons cell: + + iex> [1 | [2 | [3 | 4]]] + [1, 2, 3 | 4] + + Although improper lists are generally avoided, they are used in some + special circumstances like iodata and chardata entities (see the `IO` module). + + Due to their cons cell based representation, prepending an element + to a list is always fast (constant time), while appending becomes + slower as the list grows in size (linear time): + + iex> list = [1, 2, 3] + iex> [0 | list] # fast + [0, 1, 2, 3] + iex> list ++ [4] # slow + [1, 2, 3, 4] + + The `Kernel` module contains many functions to manipulate lists + and that are allowed in guards. For example, `Kernel.hd/1` to + retrieve the head, `Kernel.tl/1` to fetch the tail and + `Kernel.length/1` for calculating the length. Keep in mind that, + similar to appending to a list, calculating the length needs to + traverse the whole list. + + ## Charlists + + If a list is made of non-negative integers, it can also be called + a charlist. Elixir uses single quotes to define charlists: + + iex> 'héllo' + [104, 233, 108, 108, 111] + + In particular, charlists may be printed back in single + quotes if they contain only ASCII-printable codepoints: + + iex> 'abc' + 'abc' + + The rationale behind this behaviour is to better support + Erlang libraries which may return text as charlists + instead of Elixir strings. One example of such functions + is `Application.loaded_applications/0`: + + Application.loaded_applications + #=> [{:stdlib, 'ERTS CXC 138 10', '2.6'}, + {:compiler, 'ERTS CXC 138 10', '6.0.1'}, + {:elixir, 'elixir', '1.0.0'}, + {:kernel, 'ERTS CXC 138 10', '4.1'}, + {:logger, 'logger', '1.0.0'}] + + ## List and Enum modules + + This module aims to provide operations that are specific + to lists, like conversion between data types, updates, + deletions and key lookups (for lists of tuples). For traversing + lists in general, developers should use the functions in the + `Enum` module that work across a variety of data types. + + In both `Enum` and `List` modules, any kind of index access + on a list is linear. Negative indexes are also supported but + they imply the list will be iterated twice, one to calculate + the proper index and another to perform the operation. """ @compile :inline_list_funcs @doc """ - Deletes the given item from the list. Returns a list without - the item. If the item occurs more than once in the list, just + Deletes the given `item` from the `list`. Returns a new list without + the item. + + If the `item` occurs more than once in the `list`, just the first occurrence is removed. ## Examples - iex> List.delete([1, 2, 3], 1) - [2,3] + iex> List.delete([:a, :b, :c], :a) + [:b, :c] - iex> List.delete([1, 2, 2, 3], 2) - [1, 2, 3] + iex> List.delete([:a, :b, :b, :c], :b) + [:a, :b, :c] """ @spec delete(list, any) :: list - def delete(list, item) do - :lists.delete(item, list) - end + def delete(list, item) + def delete([item | list], item), do: list + def delete([other | list], item), do: [other | delete(list, item)] + def delete([], _item), do: [] @doc """ Duplicates the given element `n` times in a list. @@ -43,10 +127,10 @@ defmodule List do ## Examples iex> List.duplicate("hello", 3) - ["hello","hello","hello"] + ["hello", "hello", "hello"] iex> List.duplicate([1, 2], 2) - [[1,2],[1,2]] + [[1, 2], [1, 2]] """ @spec duplicate(elem, non_neg_integer) :: [elem] when elem: var @@ -60,7 +144,7 @@ defmodule List do ## Examples iex> List.flatten([1, [[2], 3]]) - [1,2,3] + [1, 2, 3] """ @spec flatten(deep_list) :: list when deep_list: [any | deep_list] @@ -76,7 +160,7 @@ defmodule List do ## Examples iex> List.flatten([1, [[2], 3]], [4, 5]) - [1,2,3,4,5] + [1, 2, 3, 4, 5] """ @spec flatten(deep_list, [elem]) :: [elem] when elem: var, deep_list: [elem | deep_list] @@ -85,15 +169,15 @@ defmodule List do end @doc """ - Folds (reduces) the given list to the left with + Folds (reduces) the given list from the left with a function. Requires an accumulator. ## Examples - iex> List.foldl([5, 5], 10, fn (x, acc) -> x + acc end) + iex> List.foldl([5, 5], 10, fn(x, acc) -> x + acc end) 20 - iex> List.foldl([1, 2, 3, 4], 0, fn (x, acc) -> x - acc end) + iex> List.foldl([1, 2, 3, 4], 0, fn(x, acc) -> x - acc end) 2 """ @@ -103,12 +187,12 @@ defmodule List do end @doc """ - Folds (reduces) the given list to the right with + Folds (reduces) the given list from the right with a function. Requires an accumulator. ## Examples - iex> List.foldr([1, 2, 3, 4], 0, fn (x, acc) -> x - acc end) + iex> List.foldr([1, 2, 3, 4], 0, fn(x, acc) -> x - acc end) -2 """ @@ -133,8 +217,8 @@ defmodule List do """ @spec first([elem]) :: nil | elem when elem: var - def first([]), do: nil - def first([h|_]), do: h + def first([]), do: nil + def first([head | _]), do: head @doc """ Returns the last element in `list` or `nil` if `list` is empty. @@ -152,14 +236,14 @@ defmodule List do """ @spec last([elem]) :: nil | elem when elem: var - def last([]), do: nil - def last([h]), do: h - def last([_|t]), do: last(t) + def last([]), do: nil + def last([head]), do: head + def last([_ | tail]), do: last(tail) @doc """ Receives a list of tuples and returns the first tuple where the item at `position` in the tuple matches the - given `item`. + given `key`. ## Examples @@ -181,7 +265,7 @@ defmodule List do @doc """ Receives a list of tuples and returns `true` if there is a tuple where the item at `position` in the tuple matches - the given `item`. + the given `key`. ## Examples @@ -195,7 +279,7 @@ defmodule List do false """ - @spec keymember?([tuple], any, non_neg_integer) :: any + @spec keymember?([tuple], any, non_neg_integer) :: boolean def keymember?(list, key, position) do :lists.keymember(key, position + 1, list) end @@ -234,9 +318,10 @@ defmodule List do end @doc """ - Receives a list of tuples and replaces the item - identified by `key` at `position`. If the item - does not exist, it is added to the end of the list. + Receives a `list` of tuples and replaces the item + identified by `key` at `position`. + + If the item does not exist, it is added to the end of the `list`. ## Examples @@ -247,15 +332,15 @@ defmodule List do [a: 1, b: 2, c: 3] """ - @spec keystore([tuple], any, non_neg_integer, tuple) :: [tuple] + @spec keystore([tuple], any, non_neg_integer, tuple) :: [tuple, ...] def keystore(list, key, position, new_tuple) do :lists.keystore(key, position + 1, list, new_tuple) end @doc """ - Receives a list of tuples and deletes the first tuple + Receives a `list` of tuples and deletes the first tuple where the item at `position` matches the - given `item`. Returns the new list. + given `key`. Returns the new list. ## Examples @@ -274,8 +359,36 @@ defmodule List do :lists.keydelete(key, position + 1, list) end + @doc """ + Receives a `list` of tuples and returns the first tuple + where the element at `position` in the tuple matches the + given `key`, as well as the `list` without found tuple. + + If such a tuple is not found, `nil` will be returned. + + ## Examples + + iex> List.keytake([a: 1, b: 2], :a, 0) + {{:a, 1}, [b: 2]} + + iex> List.keytake([a: 1, b: 2], 2, 1) + {{:b, 2}, [a: 1]} + + iex> List.keytake([a: 1, b: 2], :c, 0) + nil + + """ + @spec keytake([tuple], any, non_neg_integer) :: {tuple, [tuple]} | nil + def keytake(list, key, position) do + case :lists.keytake(key, position + 1, list) do + {:value, item, list} -> {item, list} + false -> nil + end + end + @doc """ Wraps the argument in a list. + If the argument is already a list, returns the list. If the argument is `nil`, returns an empty list. @@ -285,7 +398,7 @@ defmodule List do ["hello"] iex> List.wrap([1, 2, 3]) - [1,2,3] + [1, 2, 3] iex> List.wrap(nil) [] @@ -307,6 +420,8 @@ defmodule List do @doc """ Zips corresponding elements from each list in `list_of_lists`. + The zipping finishes as soon as any list terminates. + ## Examples iex> List.zip([[1, 2], [3, 4], [5, 6]]) @@ -322,28 +437,11 @@ defmodule List do do_zip(list_of_lists, []) end - @doc """ - Unzips the given list of lists or tuples into separate lists and returns a - list of lists. - - ## Examples - - iex> List.unzip([{1, 2}, {3, 4}]) - [[1, 3], [2, 4]] - - iex> List.unzip([{1, :a, "apple"}, {2, :b, "banana"}, {3, :c}]) - [[1, 2, 3], [:a, :b, :c]] - - """ - @spec unzip([tuple]) :: [list] - def unzip(list) when is_list(list) do - :lists.map &Tuple.to_list/1, zip(list) - end - @doc """ Returns a list with `value` inserted at the specified `index`. + Note that `index` is capped at the list length. Negative indices - indicate an offset from the end of the list. + indicate an offset from the end of the `list`. ## Examples @@ -361,7 +459,7 @@ defmodule List do """ @spec insert_at(list, integer, any) :: list - def insert_at(list, index, value) do + def insert_at(list, index, value) when is_integer(index) do if index < 0 do do_insert_at(list, length(list) + index + 1, value) else @@ -371,7 +469,8 @@ defmodule List do @doc """ Returns a list with a replaced value at the specified `index`. - Negative indices indicate an offset from the end of the list. + + Negative indices indicate an offset from the end of the `list`. If `index` is out of bounds, the original `list` is returned. ## Examples @@ -390,7 +489,7 @@ defmodule List do """ @spec replace_at(list, integer, any) :: list - def replace_at(list, index, value) do + def replace_at(list, index, value) when is_integer(index) do if index < 0 do do_replace_at(list, length(list) + index, value) else @@ -400,7 +499,8 @@ defmodule List do @doc """ Returns a list with an updated value at the specified `index`. - Negative indices indicate an offset from the end of the list. + + Negative indices indicate an offset from the end of the `list`. If `index` is out of bounds, the original `list` is returned. ## Examples @@ -419,7 +519,7 @@ defmodule List do """ @spec update_at([elem], integer, (elem -> any)) :: list when elem: var - def update_at(list, index, fun) do + def update_at(list, index, fun) when is_function(fun, 1) and is_integer(index) do if index < 0 do do_update_at(list, length(list) + index, fun) else @@ -429,7 +529,8 @@ defmodule List do @doc """ Produces a new list by removing the value at the specified `index`. - Negative indices indicate an offset from the end of the list. + + Negative indices indicate an offset from the end of the `list`. If `index` is out of bounds, the original `list` is returned. ## Examples @@ -437,7 +538,7 @@ defmodule List do iex> List.delete_at([1, 2, 3], 0) [2, 3] - iex List.delete_at([1, 2, 3], 10) + iex> List.delete_at([1, 2, 3], 10) [1, 2, 3] iex> List.delete_at([1, 2, 3], -1) @@ -445,18 +546,71 @@ defmodule List do """ @spec delete_at(list, integer) :: list - def delete_at(list, index) do + def delete_at(list, index) when is_integer(index) do + elem(pop_at(list, index), 1) + end + + @doc """ + Returns and removes the value at the specified `index` in the `list`. + + Negative indices indicate an offset from the end of the `list`. + If `index` is out of bounds, the original `list` is returned. + + ## Examples + + iex> List.pop_at([1, 2, 3], 0) + {1, [2, 3]} + iex> List.pop_at([1, 2, 3], 5) + {nil, [1, 2, 3]} + iex> List.pop_at([1, 2, 3], 5, 10) + {10, [1, 2, 3]} + iex> List.pop_at([1, 2, 3], -1) + {3, [1, 2]} + + """ + @spec pop_at(list, integer, any) :: {any, list} + def pop_at(list, index, default \\ nil) when is_integer(index) do if index < 0 do - do_delete_at(list, length(list) + index) + do_pop_at(list, length(list) + index, default, []) else - do_delete_at(list, index) + do_pop_at(list, index, default, []) end end @doc """ - Converts a char list to an atom. + Returns `true` if `list` starts with the given `prefix` list; otherwise returns `false`. + + If `prefix` is an empty list, it returns `true`. + + ### Examples + + iex> List.starts_with?([1, 2, 3], [1, 2]) + true + + iex> List.starts_with?([1, 2], [1, 2, 3]) + false + + iex> List.starts_with?([:alpha], []) + true - Currently Elixir does not support conversions from char lists + iex> List.starts_with?([], [:alpha]) + false + + """ + @spec starts_with?(list, list) :: boolean + @spec starts_with?(list, []) :: true + @spec starts_with?([], nonempty_list) :: false + def starts_with?([head | tail], [head | prefix_tail]), + do: starts_with?(tail, prefix_tail); + def starts_with?(list, []) when is_list(list), + do: true + def starts_with?(list, [_ | _]) when is_list(list), + do: false + + @doc """ + Converts a charlist to an atom. + + Currently Elixir does not support conversions from charlists which contains Unicode codepoints greater than 0xFF. Inlined by the compiler. @@ -467,26 +621,37 @@ defmodule List do :elixir """ - @spec to_atom(char_list) :: atom - def to_atom(char_list) do - :erlang.list_to_atom(char_list) + @spec to_atom(charlist) :: atom + def to_atom(charlist) do + :erlang.list_to_atom(charlist) end @doc """ - Converts a char list to an existing atom. + Converts a charlist to an existing atom. Raises an `ArgumentError` + if the atom does not exist. - Currently Elixir does not support conversions from char lists + Currently Elixir does not support conversions from charlists which contains Unicode codepoints greater than 0xFF. Inlined by the compiler. + + ## Examples + + iex> _ = :my_atom + iex> List.to_existing_atom('my_atom') + :my_atom + + iex> List.to_existing_atom('this_atom_will_never_exist') + ** (ArgumentError) argument error + """ - @spec to_existing_atom(char_list) :: atom - def to_existing_atom(char_list) do - :erlang.list_to_existing_atom(char_list) + @spec to_existing_atom(charlist) :: atom + def to_existing_atom(charlist) do + :erlang.list_to_existing_atom(charlist) end @doc """ - Returns the float whose text representation is `char_list`. + Returns the float whose text representation is `charlist`. Inlined by the compiler. @@ -496,13 +661,13 @@ defmodule List do 2.2017764 """ - @spec to_float(char_list) :: float - def to_float(char_list) do - :erlang.list_to_float(char_list) + @spec to_float(charlist) :: float + def to_float(charlist) do + :erlang.list_to_float(charlist) end @doc """ - Returns an integer whose text representation is `char_list`. + Returns an integer whose text representation is `charlist`. Inlined by the compiler. @@ -512,13 +677,13 @@ defmodule List do 123 """ - @spec to_integer(char_list) :: integer - def to_integer(char_list) do - :erlang.list_to_integer(char_list) + @spec to_integer(charlist) :: integer + def to_integer(charlist) do + :erlang.list_to_integer(charlist) end @doc """ - Returns an integer whose text representation is `char_list` in base `base`. + Returns an integer whose text representation is `charlist` in base `base`. Inlined by the compiler. @@ -528,9 +693,9 @@ defmodule List do 1023 """ - @spec to_integer(char_list, non_neg_integer) :: integer - def to_integer(char_list, base) do - :erlang.list_to_integer(char_list, base) + @spec to_integer(charlist, 2..36) :: integer + def to_integer(charlist, base) do + :erlang.list_to_integer(charlist, base) end @doc """ @@ -553,9 +718,9 @@ defmodule List do Converts a list of integers representing codepoints, lists or strings into a string. - Notice that this function expect a list of integer representing + Notice that this function expects a list of integers representing UTF-8 codepoints. If you have a list of bytes, you must instead use - [the `:binary` module](http://erlang.org/doc/man/binary.html). + the [`:binary` module](http://www.erlang.org/doc/man/binary.html). ## Examples @@ -566,9 +731,26 @@ defmodule List do "abc" """ - @spec to_string(:unicode.char_list) :: String.t + @spec to_string(:unicode.charlist) :: String.t def to_string(list) when is_list(list) do - case :unicode.characters_to_binary(list) do + try do + :unicode.characters_to_binary(list) + rescue + ArgumentError -> + raise ArgumentError, """ + cannot convert the given list to a string. + + To be converted to a string, a list must contain only: + + * strings + * integers representing Unicode codepoints + * or a list containing one of these three elements + + Please check the given list or call inspect/1 to get the list representation, got: + + #{inspect list} + """ + else result when is_binary(result) -> result @@ -580,6 +762,109 @@ defmodule List do end end + @doc """ + Returns a keyword list that represents an *edit script*. + + The algorithm is outlined in the + "An O(ND) Difference Algorithm and Its Variations" paper by E. Myers. + + An *edit script* is a keyword list. Each key describes the "editing action" to + take in order to bring `list1` closer to being equal to `list2`; a key can be + `:eq`, `:ins`, or `:del`. Each value is a sublist of either `list1` or `list2` + that should be inserted (if the corresponding key `:ins`), deleted (if the + corresponding key is `:del`), or left alone (if the corresponding key is + `:eq`) in `list1` in order to be closer to `list2`. + + ## Examples + + iex> List.myers_difference([1, 4, 2, 3], [1, 2, 3, 4]) + [eq: [1], del: [4], eq: [2, 3], ins: [4]] + + """ + @spec myers_difference(list, list) :: [{:eq | :ins | :del, list}] | nil + def myers_difference(list1, list2) when is_list(list1) and is_list(list2) do + path = {0, 0, list1, list2, []} + find_script(0, length(list1) + length(list2), [path]) + end + + defp find_script(envelope, max, _paths) when envelope > max do + nil + end + + defp find_script(envelope, max, paths) do + case each_diagonal(-envelope, envelope, paths, []) do + {:done, edits} -> compact_reverse(edits, []) + {:next, paths} -> find_script(envelope + 1, max, paths) + end + end + + defp compact_reverse([], acc), do: acc + + defp compact_reverse([{kind, elem} | rest], [{kind, result} | acc]) do + compact_reverse(rest, [{kind, [elem | result]} | acc]) + end + + defp compact_reverse([{kind, elem} | rest], acc) do + compact_reverse(rest, [{kind, [elem]} | acc]) + end + + defp each_diagonal(diag, limit, _paths, next_paths) when diag > limit do + {:next, Enum.reverse(next_paths)} + end + + defp each_diagonal(diag, limit, paths, next_paths) do + {path, rest} = proceed_path(diag, limit, paths) + with {:cont, path} <- follow_snake(path) do + each_diagonal(diag + 2, limit, rest, [path | next_paths]) + end + end + + defp proceed_path(0, 0, [path]), do: {path, []} + + defp proceed_path(diag, limit, [path | _] = paths) when diag == -limit do + {move_down(path), paths} + end + + defp proceed_path(diag, limit, [path]) when diag == limit do + {move_right(path), []} + end + + defp proceed_path(_diag, _limit, [path1, path2 | rest]) do + if elem(path1, 1) > elem(path2, 1) do + {move_right(path1), [path2 | rest]} + else + {move_down(path2), [path2 | rest]} + end + end + + defp move_right({x, y, list1, [elem | rest], edits}) do + {x + 1, y, list1, rest, [{:ins, elem} | edits]} + end + + defp move_right({x, y, list1, [], edits}) do + {x + 1, y, list1, [], edits} + end + + defp move_down({x, y, [elem | rest], list2, edits}) do + {x, y + 1, rest, list2, [{:del, elem} | edits]} + end + + defp move_down({x, y, [], list2, edits}) do + {x, y + 1, [], list2, edits} + end + + defp follow_snake({x, y, [elem | rest1], [elem | rest2], edits}) do + follow_snake({x + 1, y + 1, rest1, rest2, [{:eq, elem} | edits]}) + end + + defp follow_snake({_x, _y, [], [], edits}) do + {:done, edits} + end + + defp follow_snake(path) do + {:cont, path} + end + ## Helpers # replace_at @@ -592,73 +877,72 @@ defmodule List do list end - defp do_replace_at([_old|rest], 0, value) do - [ value | rest ] + defp do_replace_at([_old | rest], 0, value) do + [value | rest] end - defp do_replace_at([h|t], index, value) do - [ h | do_replace_at(t, index - 1, value) ] + defp do_replace_at([head | tail], index, value) do + [head | do_replace_at(tail, index - 1, value)] end # insert_at defp do_insert_at([], _index, value) do - [ value ] + [value] end defp do_insert_at(list, index, value) when index <= 0 do - [ value | list ] + [value | list] end - defp do_insert_at([h|t], index, value) do - [ h | do_insert_at(t, index - 1, value) ] + defp do_insert_at([head | tail], index, value) do + [head | do_insert_at(tail, index - 1, value)] end # update_at - defp do_update_at([value|list], 0, fun) do - [ fun.(value) | list ] + defp do_update_at([value | list], 0, fun) do + [fun.(value) | list] end defp do_update_at(list, index, _fun) when index < 0 do list end - defp do_update_at([h|t], index, fun) do - [ h | do_update_at(t, index - 1, fun) ] + defp do_update_at([head | tail], index, fun) do + [head | do_update_at(tail, index - 1, fun)] end defp do_update_at([], _index, _fun) do [] end - # delete_at + # pop_at - defp do_delete_at([], _index) do - [] + defp do_pop_at([], _index, default, acc) do + {default, :lists.reverse(acc)} end - defp do_delete_at([_|t], 0) do - t + defp do_pop_at(list, index, default, []) when index < 0 do + {default, list} end - defp do_delete_at(list, index) when index < 0 do - list + defp do_pop_at([head | tail], 0, _default, acc) do + {head, :lists.reverse(acc, tail)} end - defp do_delete_at([h|t], index) do - [h | do_delete_at(t, index-1)] + defp do_pop_at([head | tail], index, default, acc) do + do_pop_at(tail, index - 1, default, [head | acc]) end # zip defp do_zip(list, acc) do converter = fn x, acc -> do_zip_each(to_list(x), acc) end - {mlist, heads} = :lists.mapfoldl converter, [], list - - case heads do - nil -> :lists.reverse acc - _ -> do_zip mlist, [:erlang.list_to_tuple(:lists.reverse(heads))|acc] + case :lists.mapfoldl(converter, [], list) do + {_, nil} -> :lists.reverse(acc) + {mlist, heads} -> + do_zip(mlist, [to_tuple(:lists.reverse(heads)) | acc]) end end @@ -666,8 +950,8 @@ defmodule List do {nil, nil} end - defp do_zip_each([h|t], acc) do - {t, [h|acc]} + defp do_zip_each([head | tail], acc) do + {tail, [head | acc]} end defp do_zip_each([], _) do diff --git a/lib/elixir/lib/list/chars.ex b/lib/elixir/lib/list/chars.ex index cfd42f42ddb..74659837246 100644 --- a/lib/elixir/lib/list/chars.ex +++ b/lib/elixir/lib/list/chars.ex @@ -1,56 +1,62 @@ defprotocol List.Chars do @moduledoc ~S""" - The List.Chars protocol is responsible for - converting a structure to a list (only if applicable). + The `List.Chars` protocol is responsible for + converting a structure to a charlist (only if applicable). + The only function required to be implemented is - `to_char_list` which does the conversion. + `to_charlist/1` which does the conversion. - The `to_char_list` function automatically imported - by Kernel invokes this protocol. + The `to_charlist/1` function automatically imported + by `Kernel` invokes this protocol. """ - def to_char_list(thing) + @doc """ + Converts `term` to a charlist. + """ + @spec to_charlist(t) :: charlist + def to_charlist(term) + + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + Kernel.def to_char_list(term) do + __MODULE__.to_charlist(term) + end end defimpl List.Chars, for: Atom do - def to_char_list(atom), do: Atom.to_char_list(atom) + def to_charlist(atom), do: Atom.to_charlist(atom) end defimpl List.Chars, for: BitString do @doc """ - Returns the given binary converted to a char list. + Returns the given binary `term` converted to a charlist. """ - def to_char_list(thing) when is_binary(thing) do - String.to_char_list(thing) + def to_charlist(term) when is_binary(term) do + String.to_charlist(term) end - def to_char_list(thing) do + def to_charlist(term) do raise Protocol.UndefinedError, protocol: @protocol, - value: thing, - description: "cannot convert a bitstring to a char list" + value: term, + description: "cannot convert a bitstring to a charlist" end end defimpl List.Chars, for: List do - def to_char_list(list), do: list + # Note that same inlining is used for the rewrite rule. + def to_charlist(list), do: list end defimpl List.Chars, for: Integer do - def to_char_list(thing) do - Integer.to_char_list(thing) + def to_charlist(term) do + Integer.to_charlist(term) end end defimpl List.Chars, for: Float do - @digits 20 - @limit :math.pow(10, @digits) - - def to_char_list(thing) when thing > @limit do - Float.to_char_list(thing, scientific: @digits) - end - - def to_char_list(thing) do - Float.to_char_list(thing, compact: true, decimals: @digits) + def to_charlist(term) do + :io_lib_format.fwrite_g(term) end end diff --git a/lib/elixir/lib/macro.ex b/lib/elixir/lib/macro.ex index 6e75221c54e..e203f9bf923 100644 --- a/lib/elixir/lib/macro.ex +++ b/lib/elixir/lib/macro.ex @@ -1,31 +1,85 @@ import Kernel, except: [to_string: 1] defmodule Macro do - @moduledoc """ + @moduledoc ~S""" Conveniences for working with macros. + + ## Custom Sigils + + To create a custom sigil, define a function with the name + `sigil_{identifier}` that takes two arguments. The first argument will be + the string, the second will be a charlist containing any modifiers. If the + sigil is lower case (such as `sigil_x`) then the string argument will allow + interpolation. If the sigil is upper case (such as `sigil_X`) then the string + will not be interpolated. + + Valid modifiers include only lower and upper case letters. Other characters + will cause a syntax error. + + The module containing the custom sigil must be imported before the sigil + syntax can be used. + + ### Examples + + defmodule MySigils do + defmacro sigil_x(term, [?r]) do + quote do + unquote(term) |> String.reverse() + end + end + defmacro sigil_x(term, _modifiers) do + term + end + defmacro sigil_X(term, [?r]) do + quote do + unquote(term) |> String.reverse() + end + end + defmacro sigil_X(term, _modifiers) do + term + end + end + + import MySigils + + ~x(with #{"inter" <> "polation"}) + #=>"with interpolation" + + ~x(with #{"inter" <> "polation"})r + #=>"noitalopretni htiw" + + ~X(without #{"interpolation"}) + #=>"without \#{"interpolation"}" + + ~X(without #{"interpolation"})r + #=>"}\"noitalopretni\"{# tuohtiw" """ @typedoc "Abstract Syntax Tree (AST)" - @type t :: expr | {t, t} | atom | number | binary | pid | fun | [t] + @type t :: expr | literal - @typedoc "Expr node (remaining ones are literals)" + @typedoc "Represents expressions in the AST" @type expr :: {expr | atom, Keyword.t, atom | [t]} - @binary_ops [:===, :!==, - :==, :!=, :<=, :>=, - :&&, :||, :<>, :++, :--, :\\, :::, :<-, :.., :|>, :=~, - :<, :>, :->, - :+, :-, :*, :/, :=, :|, :., - :and, :or, :xor, :when, :in, - :<<<, :>>>, :|||, :&&&, :^^^, :~~~] + @typedoc "Represents literals in the AST" + @type literal :: atom | number | binary | fun | {t, t} | [t] + + binary_ops = + [:===, :!==, :==, :!=, :<=, :>=, + :&&, :||, :<>, :++, :--, :\\, :::, :<-, :.., :|>, :=~, + :<, :>, :->, + :+, :-, :*, :/, :=, :|, :., + :and, :or, :when, :in, + :~>>, :<<~, :~>, :<~, :<~>, :<|>, + :<<<, :>>>, :|||, :&&&, :^^^, :~~~] @doc false - defmacro binary_ops, do: @binary_ops + defmacro binary_ops, do: unquote(binary_ops) - @unary_ops [:!, :@, :^, :not, :+, :-, :~~~, :&] + unary_ops = [:!, :@, :^, :not, :+, :-, :~~~, :&] @doc false - defmacro unary_ops, do: @unary_ops + defmacro unary_ops, do: unquote(unary_ops) @spec binary_op_props(atom) :: {:left | :right, precedence :: integer} defp binary_op_props(o) do @@ -35,24 +89,110 @@ defmodule Macro do ::: -> {:right, 60} :| -> {:right, 70} := -> {:right, 90} - o when o in [:||, :|||, :or, :xor] -> {:left, 130} + o when o in [:||, :|||, :or] -> {:left, 130} o when o in [:&&, :&&&, :and] -> {:left, 140} o when o in [:==, :!=, :=~, :===, :!==] -> {:left, 150} o when o in [:<, :<=, :>=, :>] -> {:left, 160} - o when o in [:|>, :<<<, :>>>] -> {:left, 170} + o when o in [:|>, :<<<, :>>>, :<~, :~>, + :<<~, :~>>, :<~>, :<|>, :^^^] -> {:left, 170} :in -> {:left, 180} o when o in [:++, :--, :.., :<>] -> {:right, 200} o when o in [:+, :-] -> {:left, 210} o when o in [:*, :/] -> {:left, 220} - :^^^ -> {:left, 250} :. -> {:left, 310} end end + # Classifies the given atom into one of the following categories: + # + # * :alias - a valid Elixir alias, like Foo, Foo.Bar and so on + # + # * :callable - an atom that can be used as a function call after the + # . operator (for example, :<> is callable because Foo.<>(1, 2, 3) is valid + # syntax); this category includes identifiers like :foo + # + # * :not_callable - an atom that cannot be used as a function call after the + # . operator (for example, :<<>> is not callable because Foo.<<>> is a + # syntax error); this category includes atoms like :Foo, since they are + # valid identifiers but they need quotes to be used in function calls + # (Foo."Bar") + # + # * :other - any other atom (these are usually escaped when inspected, like + # :"foo and bar") + @doc false + def classify_identifier(atom) when is_atom(atom) do + charlist = Atom.to_charlist(atom) + + cond do + atom in [:"%", :"%{}", :"{}", :"<<>>", :"...", :"..", :"."] -> + :not_callable + atom in unquote(unary_ops) or atom in unquote(binary_ops) -> + :callable + valid_alias?(charlist) -> + :alias + true -> + case :elixir_config.get(:identifier_tokenizer).tokenize(charlist) do + {kind, _acc, [], _, _, special} -> + if kind == :identifier and not :lists.member(?@, special) do + :callable + else + :not_callable + end + _ -> + :other + end + end + end + + defp valid_alias?('Elixir' ++ rest), do: valid_alias_piece?(rest) + defp valid_alias?(_other), do: false + + defp valid_alias_piece?([?., char | rest]) when char >= ?A and char <= ?Z, + do: valid_alias_piece?(trim_leading_while_valid_identifier(rest)) + defp valid_alias_piece?([]), + do: true + defp valid_alias_piece?(_other), + do: false + + defp trim_leading_while_valid_identifier([char | rest]) + when char >= ?a and char <= ?z + when char >= ?A and char <= ?Z + when char >= ?0 and char <= ?9 + when char == ?_ do + trim_leading_while_valid_identifier(rest) + end + + defp trim_leading_while_valid_identifier(other) do + other + end + @doc """ Breaks a pipeline expression into a list. - Raises if the pipeline is ill-formed. + The AST for a pipeline (a sequence of applications of `|>`) is similar to the + AST of a sequence of binary operators or function applications: the top-level + expression is the right-most `:|>` (which is the last one to be executed), and + its left-hand and right-hand sides are its arguments: + + quote do: 100 |> div(5) |> div(2) + #=> {:|>, _, [arg1, arg2]} + + In the example above, the `|>` pipe is the right-most pipe; `arg1` is the AST + for `100 |> div(5)`, and `arg2` is the AST for `div(2)`. + + It's often useful to have the AST for such a pipeline as a list of function + applications. This function does exactly that: + + Macro.unpipe(quote do: 100 |> div(5) |> div(2)) + #=> [{100, 0}, {{:div, [], [5]}, 0}, {{:div, [], [2]}, 0}] + + We get a list that follows the pipeline directly: first the `100`, then the + `div(5)` (more precisely, its AST), then `div(2)`. The `0` as the second + element of the tuples is the position of the previous element in the pipeline + inside the current function application: `{{:div, [], [5]}, 0}` means that the + previous element (`100`) will be inserted as the 0th (first) argument to the + `div/2` function, so that the AST for that function will become `{:div, [], + [100, 5]}` (`div(100, 5)`). """ @spec unpipe(Macro.t) :: [Macro.t] def unpipe(expr) do @@ -64,7 +204,7 @@ defmodule Macro do end defp unpipe(other, acc) do - [{other, 0}|acc] + [{other, 0} | acc] end @doc """ @@ -74,7 +214,32 @@ defmodule Macro do def pipe(expr, call_args, position) def pipe(expr, {:&, _, _} = call_args, _integer) do - raise ArgumentError, "cannot pipe #{to_string expr} into #{to_string call_args}" + raise ArgumentError, bad_pipe(expr, call_args) + end + + def pipe(expr, {tuple_or_map, _, _} = call_args, _integer) when tuple_or_map in [:{}, :%{}] do + raise ArgumentError, bad_pipe(expr, call_args) + end + + # Without this, `Macro |> Env == Macro.Env`. + def pipe(expr, {:__aliases__, _, _} = call_args, _integer) do + raise ArgumentError, bad_pipe(expr, call_args) + end + + def pipe(expr, {call, _, [_, _]} = call_args, _integer) + when call in unquote(binary_ops) do + raise ArgumentError, "cannot pipe #{to_string expr} into #{to_string call_args}, " <> + "the #{to_string call} operator can only take two arguments" + end + + # {:fn, _, _} is what we get when we pipe into an anonymous function without + # calling it, e.g., `:foo |> (fn x -> x end)`. + def pipe(expr, {:fn, _, _}, _integer) do + expr_str = to_string(expr) + raise ArgumentError, + "cannot pipe #{expr_str} into an anonymous function without" <> + " calling the function; use something like (fn ... end).() or" <> + " define the anonymous function as a regular private function" end def pipe(expr, {call, line, atom}, integer) when is_atom(atom) do @@ -86,13 +251,24 @@ defmodule Macro do end def pipe(expr, call_args, _integer) do - raise ArgumentError, "cannot pipe #{to_string expr} into #{to_string call_args}" + raise ArgumentError, bad_pipe(expr, call_args) + end + + defp bad_pipe(expr, call_args) do + "cannot pipe #{to_string expr} into #{to_string call_args}, " <> + "can only pipe into local calls foo(), remote calls Foo.bar() or anonymous functions calls foo.()" end + @doc false + def pipe_warning({call, _, _}) when call in unquote(unary_ops) do + "piping into a unary operator is deprecated. You could use e.g. Kernel.+(5) instead of +5" + end + def pipe_warning(_), do: nil + @doc """ Applies the given function to the node metadata if it contains one. - This is often useful when used with `Macro.prewalk/1` to remove + This is often useful when used with `Macro.prewalk/2` to remove information like lines and hygienic counters from the expression for either storage or comparison. @@ -116,14 +292,29 @@ defmodule Macro do end @doc """ - Genrates a AST node representing the variable given + Generates AST nodes for a given number of required argument variables using + `Macro.var/2`. + + ## Examples + + iex> Macro.generate_arguments(2, __MODULE__) + [{:var1, [], __MODULE__}, {:var2, [], __MODULE__}] + + """ + def generate_arguments(0, _), do: [] + def generate_arguments(amount, context) when is_integer(amount) and amount > 0 and is_atom(context) do + for id <- 1..amount, do: Macro.var(String.to_atom("var" <> Integer.to_string(id)), context) + end + + @doc """ + Generates an AST node representing the variable given by the atoms `var` and `context`. ## Examples In order to build a variable, a context is expected. Most of the times, in order to preserve hygiene, the - context must be `__MODULE__`: + context must be `__MODULE__/0`: iex> Macro.var(:foo, __MODULE__) {:foo, [], __MODULE__} @@ -141,56 +332,70 @@ defmodule Macro do end @doc """ - Performs a depth-first, pre-order traversal of quoted expressions. + Performs a depth-first traversal of quoted expressions + using an accumulator. """ - @spec prewalk(t, (t -> t)) :: t - def prewalk(ast, fun) when is_function(fun, 1) do - elem(prewalk(ast, nil, fn x, nil -> {fun.(x), nil} end), 0) + @spec traverse(t, any, (t, any -> {t, any}), (t, any -> {t, any})) :: {t, any} + def traverse(ast, acc, pre, post) when is_function(pre, 2) and is_function(post, 2) do + {ast, acc} = pre.(ast, acc) + do_traverse(ast, acc, pre, post) end - @doc """ - Performs a depth-first, pre-order traversal of quoted expressions - using an accumulator. - """ - @spec prewalk(t, any, (t, any -> {t, any})) :: {t, any} - def prewalk(ast, acc, fun) when is_function(fun, 2) do - {ast, acc} = fun.(ast, acc) - do_prewalk(ast, acc, fun) + defp do_traverse({form, meta, args}, acc, pre, post) when is_atom(form) do + {args, acc} = do_traverse_args(args, acc, pre, post) + post.({form, meta, args}, acc) end - defp do_prewalk({form, meta, args}, acc, fun) do - unless is_atom(form) do - {form, acc} = fun.(form, acc) - {form, acc} = do_prewalk(form, acc, fun) - end + defp do_traverse({form, meta, args}, acc, pre, post) do + {form, acc} = pre.(form, acc) + {form, acc} = do_traverse(form, acc, pre, post) + {args, acc} = do_traverse_args(args, acc, pre, post) + post.({form, meta, args}, acc) + end - unless is_atom(args) do - {args, acc} = Enum.map_reduce(args, acc, fn x, acc -> - {x, acc} = fun.(x, acc) - do_prewalk(x, acc, fun) - end) - end + defp do_traverse({left, right}, acc, pre, post) do + {left, acc} = pre.(left, acc) + {left, acc} = do_traverse(left, acc, pre, post) + {right, acc} = pre.(right, acc) + {right, acc} = do_traverse(right, acc, pre, post) + post.({left, right}, acc) + end + + defp do_traverse(list, acc, pre, post) when is_list(list) do + {list, acc} = do_traverse_args(list, acc, pre, post) + post.(list, acc) + end - {{form, meta, args}, acc} + defp do_traverse(x, acc, _pre, post) do + post.(x, acc) end - defp do_prewalk({left, right}, acc, fun) do - {left, acc} = fun.(left, acc) - {left, acc} = do_prewalk(left, acc, fun) - {right, acc} = fun.(right, acc) - {right, acc} = do_prewalk(right, acc, fun) - {{left, right}, acc} + defp do_traverse_args(args, acc, _pre, _post) when is_atom(args) do + {args, acc} end - defp do_prewalk(list, acc, fun) when is_list(list) do - Enum.map_reduce(list, acc, fn x, acc -> - {x, acc} = fun.(x, acc) - do_prewalk(x, acc, fun) + defp do_traverse_args(args, acc, pre, post) when is_list(args) do + Enum.map_reduce(args, acc, fn x, acc -> + {x, acc} = pre.(x, acc) + do_traverse(x, acc, pre, post) end) end - defp do_prewalk(x, acc, _fun) do - {x, acc} + @doc """ + Performs a depth-first, pre-order traversal of quoted expressions. + """ + @spec prewalk(t, (t -> t)) :: t + def prewalk(ast, fun) when is_function(fun, 1) do + elem(prewalk(ast, nil, fn x, nil -> {fun.(x), nil} end), 0) + end + + @doc """ + Performs a depth-first, pre-order traversal of quoted expressions + using an accumulator. + """ + @spec prewalk(t, any, (t, any -> {t, any})) :: {t, any} + def prewalk(ast, acc, fun) when is_function(fun, 2) do + traverse(ast, acc, fun, fn x, a -> {x, a} end) end @doc """ @@ -207,34 +412,7 @@ defmodule Macro do """ @spec postwalk(t, any, (t, any -> {t, any})) :: {t, any} def postwalk(ast, acc, fun) when is_function(fun, 2) do - do_postwalk(ast, acc, fun) - end - - defp do_postwalk({form, meta, args}, acc, fun) do - unless is_atom(form) do - {form, acc} = do_postwalk(form, acc, fun) - end - - unless is_atom(args) do - {args, acc} = Enum.map_reduce(args, acc, &do_postwalk(&1, &2, fun)) - end - - fun.({form, meta, args}, acc) - end - - defp do_postwalk({left, right}, acc, fun) do - {left, acc} = do_postwalk(left, acc, fun) - {right, acc} = do_postwalk(right, acc, fun) - fun.({left, right}, acc) - end - - defp do_postwalk(list, acc, fun) when is_list(list) do - {list, acc} = Enum.map_reduce(list, acc, &do_postwalk(&1, &2, fun)) - fun.(list, acc) - end - - defp do_postwalk(x, acc, fun) do - fun.(x, acc) + traverse(ast, acc, fn x, a -> {x, a} end, fun) end @doc """ @@ -245,23 +423,25 @@ defmodule Macro do ## Examples - iex> Macro.decompose_call(quote do: foo) + iex> Macro.decompose_call(quote(do: foo)) {:foo, []} - iex> Macro.decompose_call(quote do: foo()) + iex> Macro.decompose_call(quote(do: foo())) {:foo, []} - iex> Macro.decompose_call(quote do: foo(1, 2, 3)) + iex> Macro.decompose_call(quote(do: foo(1, 2, 3))) {:foo, [1, 2, 3]} - iex> Macro.decompose_call(quote do: Elixir.M.foo(1, 2, 3)) + iex> Macro.decompose_call(quote(do: Elixir.M.foo(1, 2, 3))) {{:__aliases__, [], [:Elixir, :M]}, :foo, [1, 2, 3]} - iex> Macro.decompose_call(quote do: 42) + iex> Macro.decompose_call(quote(do: 42)) :error """ @spec decompose_call(Macro.t) :: {atom, [Macro.t]} | {Macro.t, atom, [Macro.t]} | :error + def decompose_call(ast) + def decompose_call({{:., _, [remote, function]}, _, args}) when is_tuple(remote) or is_atom(remote), do: {remote, function, args} @@ -279,7 +459,7 @@ defmodule Macro do into a syntax tree. One may pass `unquote: true` to `escape/2` - which leaves `unquote` statements unescaped, effectively + which leaves `unquote/1` statements unescaped, effectively unquoting the contents on escape. ## Examples @@ -294,22 +474,72 @@ defmodule Macro do 1 """ - @spec escape(term) :: Macro.t @spec escape(term, Keyword.t) :: Macro.t def escape(expr, opts \\ []) do elem(:elixir_quote.escape(expr, Keyword.get(opts, :unquote, false)), 0) end + @doc """ + Validates the given expressions are valid quoted expressions. + + Checks the `t:Macro.t/0` for the specification of a valid + quoted expression. + + It returns `:ok` if the expression is valid. Otherwise it returns a tuple in the form of + `{:error, remainder}` where `remainder` is the invalid part of the quoted expression. + + ## Examples + + iex> Macro.validate({:two_element, :tuple}) + :ok + iex> Macro.validate({:three, :element, :tuple}) + {:error, {:three, :element, :tuple}} + + iex> Macro.validate([1, 2, 3]) + :ok + iex> Macro.validate([1, 2, 3, {4}]) + {:error, {4}} + + """ + @spec validate(term) :: :ok | {:error, term} + def validate(expr) do + find_invalid(expr) || :ok + end + + defp find_invalid({left, right}), do: + find_invalid(left) || find_invalid(right) + + defp find_invalid({left, meta, right}) when is_list(meta) and (is_atom(right) or is_list(right)), do: + find_invalid(left) || find_invalid(right) + + defp find_invalid(list) when is_list(list), do: + Enum.find_value(list, &find_invalid/1) + + defp find_invalid(pid) when is_pid(pid), do: nil + defp find_invalid(atom) when is_atom(atom), do: nil + defp find_invalid(num) when is_number(num), do: nil + defp find_invalid(bin) when is_binary(bin), do: nil + + defp find_invalid(fun) when is_function(fun) do + unless :erlang.fun_info(fun, :env) == {:env, []} and + :erlang.fun_info(fun, :type) == {:type, :external} do + {:error, fun} + end + end + + defp find_invalid(other), do: {:error, other} + @doc ~S""" - Unescape the given chars. + Unescapes the given chars. This is the unescaping behaviour used by default in Elixir single- and double-quoted strings. Check `unescape_string/2` for information on how to customize the escaping map. - In this setup, Elixir will escape the following: `\a`, `\b`, - `\d`, `\e`, `\f`, `\n`, `\r`, `\s`, `\t` and `\v`. Octals are - also escaped according to the latin1 set they represent. + In this setup, Elixir will escape the following: `\0`, `\a`, `\b`, + `\d`, `\e`, `\f`, `\n`, `\r`, `\s`, `\t` and `\v`. Bytes can be + given as hexadecimals via `\xNN` and Unicode Codepoints as + `\uNNNN` escapes. This function is commonly used on sigil implementations (like `~r`, `~s` and others) which receive a raw, unescaped @@ -329,7 +559,7 @@ defmodule Macro do end @doc ~S""" - Unescape the given chars according to the map given. + Unescapes the given chars according to the map given. Check `unescape_string/1` if you want to use the same map as Elixir single- and double-quoted strings. @@ -340,6 +570,7 @@ defmodule Macro do representing the codepoint of the character it wants to unescape. Here is the default mapping function implemented by Elixir: + def unescape_map(?0), do: ?0 def unescape_map(?a), do: ?\a def unescape_map(?b), do: ?\b def unescape_map(?d), do: ?\d @@ -350,24 +581,20 @@ defmodule Macro do def unescape_map(?s), do: ?\s def unescape_map(?t), do: ?\t def unescape_map(?v), do: ?\v + def unescape_map(?x), do: true + def unescape_map(?u), do: true def unescape_map(e), do: e - If the `unescape_map` function returns `false`. The char is - not escaped and `\` is kept in the char list. - - ## Octals + If the `unescape_map/1` function returns `false`, the char is + not escaped and the backslash is kept in the string. - Octals will by default be escaped unless the map function - returns `false` for `?0`. - - ## Hex - - Hexadecimals will by default be escaped unless the map function - returns `false` for `?x`. + Hexadecimals and Unicode codepoints will be escaped if the map + function returns `true` for `?x`. Unicode codepoints if the map + function returns `true` for `?u`. ## Examples - Using the `unescape_map` function defined above is easy: + Using the `unescape_map/1` function defined above is easy: Macro.unescape_string "example\\n", &unescape_map(&1) @@ -378,7 +605,7 @@ defmodule Macro do end @doc """ - Unescape the given tokens according to the default map. + Unescapes the given tokens according to the default map. Check `unescape_string/1` and `unescape_string/2` for more information about unescaping. @@ -394,7 +621,7 @@ defmodule Macro do end @doc """ - Unescape the given tokens according to the given map. + Unescapes the given tokens according to the given map. Check `unescape_tokens/1` and `unescape_string/2` for more information. """ @@ -406,13 +633,24 @@ defmodule Macro do @doc """ Converts the given expression to a binary. + The given `fun` is called for every node in the AST with two arguments: the + AST of the node being printed and the string representation of that same + node. The return value of this function is used as the final string + representation for that AST node. + ## Examples - iex> Macro.to_string(quote do: foo.bar(1, 2, 3)) + iex> Macro.to_string(quote(do: foo.bar(1, 2, 3))) "foo.bar(1, 2, 3)" + iex> Macro.to_string(quote(do: 1 + 2), fn + ...> 1, _string -> "one" + ...> 2, _string -> "two" + ...> _ast, string -> string + ...> end) + "one + two" + """ - @spec to_string(Macro.t) :: String.t @spec to_string(Macro.t, (Macro.t, String.t -> String.t)) :: String.t def to_string(tree, fun \\ fn(_ast, string) -> string end) @@ -437,11 +675,20 @@ defmodule Macro do end # Bits containers - def to_string({:<<>>, _, args} = ast, fun) do - fun.(ast, case Enum.map_join(args, ", ", &to_string(&1, fun)) do - "<" <> rest -> "<< <" <> rest <> " >>" - rest -> "<<" <> rest <> ">>" - end) + def to_string({:<<>>, _, parts} = ast, fun) do + if interpolated?(ast) do + fun.(ast, interpolate(ast, fun)) + else + result = Enum.map_join(parts, ", ", fn(part) -> + str = bitpart_to_string(part, fun) + if :binary.first(str) == ?< or :binary.last(str) == ?> do + "(" <> str <> ")" + else + str + end + end) + fun.(ast, "<<" <> result <> ">>") + end end # Tuple containers @@ -477,24 +724,31 @@ defmodule Macro do fun.(ast, "fn\n " <> block <> "\nend") end + # Ranges + def to_string({:.., _, args} = ast, fun) do + range = Enum.map_join(args, "..", &to_string(&1, fun)) + fun.(ast, range) + end + # left -> right - def to_string([{:->, _, _}|_] = ast, fun) do + def to_string([{:->, _, _} | _] = ast, fun) do fun.(ast, "(" <> arrow_to_string(ast, fun, true) <> ")") end # left when right def to_string({:when, _, [left, right]} = ast, fun) do - if right != [] and Keyword.keyword?(right) do - right = kw_list_to_string(right, fun) - else - right = fun.(ast, op_to_string(right, fun, :when, :right)) - end + right = + if right != [] and Keyword.keyword?(right) do + kw_list_to_string(right, fun) + else + fun.(ast, op_to_string(right, fun, :when, :right)) + end fun.(ast, op_to_string(left, fun, :when, :left) <> " when " <> right) end # Binary ops - def to_string({op, _, [left, right]} = ast, fun) when op in unquote(@binary_ops) do + def to_string({op, _, [left, right]} = ast, fun) when op in unquote(binary_ops) do fun.(ast, op_to_string(left, fun, op, :left) <> " #{op} " <> op_to_string(right, fun, op, :right)) end @@ -504,9 +758,29 @@ defmodule Macro do fun.(ast, "(" <> Enum.map_join(left, ", ", &to_string(&1, fun)) <> ") when " <> to_string(right, fun)) end + # Capture + def to_string({:&, _, [{:/, _, [{name, _, ctx}, arity]}]} = ast, fun) + when is_atom(name) and is_atom(ctx) and is_integer(arity) do + fun.(ast, "&" <> Atom.to_string(name) <> "/" <> to_string(arity, fun)) + end + + def to_string({:&, _, [{:/, _, [{{:., _, [mod, name]}, _, []}, arity]}]} = ast, fun) + when is_atom(name) and is_integer(arity) do + fun.(ast, "&" <> to_string(mod, fun) <> "." <> Atom.to_string(name) <> "/" <> to_string(arity, fun)) + end + + def to_string({:&, _, [arg]} = ast, fun) when not is_integer(arg) do + fun.(ast, "&(" <> to_string(arg, fun) <> ")") + end + + # left not in right + def to_string({:not, _, [{:in, _, [left, right]}]} = ast, fun) do + fun.(ast, to_string(left, fun) <> " not in " <> to_string(right, fun)) + end + # Unary ops def to_string({unary, _, [{binary, _, [_, _]} = arg]} = ast, fun) - when unary in unquote(@unary_ops) and binary in unquote(@binary_ops) do + when unary in unquote(unary_ops) and binary in unquote(binary_ops) do fun.(ast, Atom.to_string(unary) <> "(" <> to_string(arg, fun) <> ")") end @@ -514,25 +788,34 @@ defmodule Macro do fun.(ast, "not " <> to_string(arg, fun)) end - def to_string({op, _, [arg]} = ast, fun) when op in unquote(@unary_ops) do + def to_string({op, _, [arg]} = ast, fun) when op in unquote(unary_ops) do fun.(ast, Atom.to_string(op) <> to_string(arg, fun)) end # Access + def to_string({{:., _, [Access, :get]}, _, [{op, _, _} = left, right]} = ast, fun) + when op in unquote(binary_ops) do + fun.(ast, "(" <> to_string(left, fun) <> ")" <> to_string([right], fun)) + end + def to_string({{:., _, [Access, :get]}, _, [left, right]} = ast, fun) do fun.(ast, to_string(left, fun) <> to_string([right], fun)) end # All other calls def to_string({target, _, args} = ast, fun) when is_list(args) do - {list, last} = :elixir_utils.split_last(args) - fun.(ast, case kw_blocks?(last) do - true -> call_to_string_with_args(target, list, fun) <> kw_blocks_to_string(last, fun) - false -> call_to_string_with_args(target, args, fun) - end) + if sigil = sigil_call(ast, fun) do + sigil + else + {list, last} = :elixir_utils.split_last(args) + fun.(ast, case kw_blocks?(last) do + true -> call_to_string_with_args(target, list, fun) <> kw_blocks_to_string(last, fun) + false -> call_to_string_with_args(target, args, fun) + end) + end end - # Two-item tuples + # Two-element tuples def to_string({left, right}, fun) do to_string({:{}, [], [left, right]}, fun) end @@ -543,8 +826,9 @@ defmodule Macro do list == [] -> "[]" :io_lib.printable_list(list) -> - "'" <> Inspect.BitString.escape(IO.chardata_to_string(list), ?') <> "'" - Keyword.keyword?(list) -> + {escaped, _} = Inspect.BitString.escape(IO.chardata_to_string(list), ?') + IO.iodata_to_binary [?', escaped, ?'] + Inspect.List.keyword?(list) -> "[" <> kw_list_to_string(list, fun) <> "]" true -> "[" <> Enum.map_join(list, ", ", &to_string(&1, fun)) <> "]" @@ -554,21 +838,101 @@ defmodule Macro do # All other structures def to_string(other, fun), do: fun.(other, inspect(other, [])) + defp bitpart_to_string({:::, _, [left, right]} = ast, fun) do + result = + op_to_string(left, fun, :::, :left) <> + "::" <> + bitmods_to_string(right, fun, :::, :right) + fun.(ast, result) + end + + defp bitpart_to_string(ast, fun) do + to_string(ast, fun) + end + + defp bitmods_to_string({op, _, [left, right]} = ast, fun, _, _) when op in [:*, :-] do + result = + bitmods_to_string(left, fun, op, :left) <> + Atom.to_string(op) <> + bitmods_to_string(right, fun, op, :right) + fun.(ast, result) + end + + defp bitmods_to_string(other, fun, parent_op, side) do + op_to_string(other, fun, parent_op, side) + end + # Block keywords - @kw_keywords [:do, :catch, :rescue, :after, :else] + kw_keywords = [:do, :catch, :rescue, :after, :else] - defp kw_blocks?([_|_] = kw) do - Enum.all?(kw, &match?({x, _} when x in unquote(@kw_keywords), &1)) + defp kw_blocks?([{:do, _} | _] = kw) do + Enum.all?(kw, &match?({x, _} when x in unquote(kw_keywords), &1)) end defp kw_blocks?(_), do: false + # Check if we have an interpolated string. + defp interpolated?({:<<>>, _, [_ | _] = parts}) do + Enum.all?(parts, fn + {:::, _, [{{:., _, [Kernel, :to_string]}, _, [_]}, + {:binary, _, _}]} -> true + binary when is_binary(binary) -> true + _ -> false + end) + end + + defp interpolated?(_) do + false + end + + defp interpolate({:<<>>, _, parts}, fun) do + parts = Enum.map_join(parts, "", fn + {:::, _, [{{:., _, [Kernel, :to_string]}, _, [arg]}, {:binary, _, _}]} -> + "\#{" <> to_string(arg, fun) <> "}" + binary when is_binary(binary) -> + binary = inspect(binary, []) + :binary.part(binary, 1, byte_size(binary) - 2) + end) + + <> + end + defp module_to_string(atom, _fun) when is_atom(atom), do: inspect(atom, []) defp module_to_string(other, fun), do: call_to_string(other, fun) - defp call_to_string(atom, _fun) when is_atom(atom), do: Atom.to_string(atom) - defp call_to_string({:., _, [arg]}, fun), do: module_to_string(arg, fun) <> "." - defp call_to_string({:., _, [left, right]}, fun), do: module_to_string(left, fun) <> "." <> call_to_string(right, fun) - defp call_to_string(other, fun), do: to_string(other, fun) + defp sigil_call({func, _, [{:<<>>, _, _} = bin, args]} = ast, fun) when is_atom(func) and is_list(args) do + sigil = + case Atom.to_string(func) do + <<"sigil_", name>> -> + "~" <> <> <> + interpolate(bin, fun) <> + sigil_args(args, fun) + _ -> + nil + end + fun.(ast, sigil) + end + + defp sigil_call(_other, _fun) do + nil + end + + defp sigil_args([], _fun), do: "" + defp sigil_args(args, fun), do: fun.(args, List.to_string(args)) + + defp call_to_string(atom, _fun) when is_atom(atom), + do: Atom.to_string(atom) + defp call_to_string({:., _, [{:&, _, [val]} = arg]}, fun) when not is_integer(val), + do: "(" <> module_to_string(arg, fun) <> ")." + defp call_to_string({:., _, [{:fn, _, _} = arg]}, fun), + do: "(" <> module_to_string(arg, fun) <> ")." + defp call_to_string({:., _, [arg]}, fun), + do: module_to_string(arg, fun) <> "." + defp call_to_string({:., _, [left, right]}, fun) when is_atom(right), + do: module_to_string(left, fun) <> "." <> call_to_string_for_atom(right) + defp call_to_string({:., _, [left, right]}, fun), + do: module_to_string(left, fun) <> "." <> call_to_string(right, fun) + defp call_to_string(other, fun), + do: to_string(other, fun) defp call_to_string_with_args(target, args, fun) do target = call_to_string(target, fun) @@ -576,20 +940,27 @@ defmodule Macro do target <> "(" <> args <> ")" end + defp call_to_string_for_atom(atom) do + Inspect.Function.escape_name(atom) + end + defp args_to_string(args, fun) do {list, last} = :elixir_utils.split_last(args) - if last != [] and Keyword.keyword?(last) do - args = Enum.map_join(list, ", ", &to_string(&1, fun)) - if list != [], do: args = args <> ", " - args <> kw_list_to_string(last, fun) + if last != [] and Inspect.List.keyword?(last) do + prefix = + case list do + [] -> "" + _ -> Enum.map_join(list, ", ", &to_string(&1, fun)) <> ", " + end + prefix <> kw_list_to_string(last, fun) else Enum.map_join(args, ", ", &to_string(&1, fun)) end end defp kw_blocks_to_string(kw, fun) do - Enum.reduce(@kw_keywords, " ", fn(x, acc) -> + Enum.reduce(unquote(kw_keywords), " ", fn(x, acc) -> case Keyword.has_key?(kw, x) do true -> acc <> kw_block_to_string(x, Keyword.get(kw, x), fun) false -> acc @@ -602,7 +973,7 @@ defmodule Macro do Atom.to_string(key) <> "\n " <> block <> "\n" end - defp block_to_string([{:->, _, _}|_] = block, fun) do + defp block_to_string([{:->, _, _} | _] = block, fun) do Enum.map_join(block, "\n", fn({:->, _, [left, right]}) -> left = comma_join_or_empty_paren(left, fun, false) left <> "->\n " <> adjust_new_lines block_to_string(right, fun), "\n " @@ -621,7 +992,7 @@ defmodule Macro do defp map_to_string(list, fun) do cond do - Keyword.keyword?(list) -> kw_list_to_string(list, fun) + Inspect.List.keyword?(list) -> kw_list_to_string(list, fun) true -> map_list_to_string(list, fun) end end @@ -642,22 +1013,22 @@ defmodule Macro do end) end - defp parenthise(expr, fun) do + defp wrap_in_parenthesis(expr, fun) do "(" <> to_string(expr, fun) <> ")" end - defp op_to_string({op, _, [_, _]} = expr, fun, parent_op, side) when op in unquote(@binary_ops) do + defp op_to_string({op, _, [_, _]} = expr, fun, parent_op, side) when op in unquote(binary_ops) do {parent_assoc, parent_prec} = binary_op_props(parent_op) {_, prec} = binary_op_props(op) cond do parent_prec < prec -> to_string(expr, fun) - parent_prec > prec -> parenthise(expr, fun) + parent_prec > prec -> wrap_in_parenthesis(expr, fun) true -> # parent_prec == prec, so look at associativity. if parent_assoc == side do to_string(expr, fun) else - parenthise(expr, fun) + wrap_in_parenthesis(expr, fun) end end end @@ -694,7 +1065,7 @@ defmodule Macro do * Macros (local or remote) * Aliases are expanded (if possible) and return atoms - * Pseudo-variables (`__ENV__`, `__MODULE__` and `__DIR__`) + * Compilation environment macros (`__ENV__/0`, `__MODULE__/0` and `__DIR__/0`) * Module attributes reader (`@foo`) If the expression cannot be expanded, it returns the expression @@ -712,7 +1083,7 @@ defmodule Macro do Consider the implementation below: defmacro defmodule_with_length(name, do: block) do - length = length(Atom.to_char_list(name)) + length = length(Atom.to_charlist(name)) quote do defmodule unquote(name) do @@ -735,7 +1106,7 @@ defmodule Macro do That said, we need to expand the aliases node above to an atom, so we can retrieve its length. Expanding the node is - not straight-forward because we also need to expand the + not straightforward because we also need to expand the caller aliases. For example: alias MyHelpers, as: My @@ -752,7 +1123,7 @@ defmodule Macro do defmacro defmodule_with_length(name, do: block) do expanded = Macro.expand(name, __CALLER__) - length = length(Atom.to_char_list(expanded)) + length = length(Atom.to_charlist(expanded)) quote do defmodule unquote(name) do @@ -770,15 +1141,15 @@ defmodule Macro do defp do_expand_once({:__aliases__, _, _} = original, env) do case :elixir_aliases.expand(original, env.aliases, env.macro_aliases, env.lexical_tracker) do receiver when is_atom(receiver) -> - :elixir_lexical.record_remote(receiver, env.lexical_tracker) + :elixir_lexical.record_remote(receiver, env.function, env.lexical_tracker) {receiver, true} aliases -> - aliases = for alias <- aliases, do: elem(do_expand_once(alias, env), 0) + aliases = :lists.map(&elem(do_expand_once(&1, env), 0), aliases) case :lists.all(&is_atom/1, aliases) do true -> receiver = :elixir_aliases.concat(aliases) - :elixir_lexical.record_remote(receiver, env.lexical_tracker) + :elixir_lexical.record_remote(receiver, env.function, env.lexical_tracker) {receiver, true} false -> {original, false} @@ -786,15 +1157,7 @@ defmodule Macro do end end - # Expand @ calls - defp do_expand_once({:@, _, [{name, _, args}]} = original, env) when is_atom(args) or args == [] do - case (module = env.module) && Module.open?(module) do - true -> {escape(Module.get_attribute(module, name)), true} - false -> {original, false} - end - end - - # Expand pseudo-variables + # Expand compilation environment macros defp do_expand_once({:__MODULE__, _, atom}, env) when is_atom(atom), do: {env.module, true} defp do_expand_once({:__DIR__, _, atom}, env) when is_atom(atom), @@ -838,11 +1201,11 @@ defmodule Macro do end expand = :elixir_dispatch.expand_import(meta, {atom, length(args)}, args, - env, extra) + env, extra, true) case expand do {:ok, receiver, quoted} -> - next = :elixir_counter.next + next = :erlang.unique_integer() {:elixir_quote.linify_with_context_counter(0, {receiver, next}, quoted), true} {:ok, _receiver, _name, _args} -> {original, false} @@ -863,7 +1226,7 @@ defmodule Macro do case expand do {:ok, receiver, quoted} -> - next = :elixir_counter.next + next = :erlang.unique_integer() {:elixir_quote.linify_with_context_counter(0, {receiver, next}, quoted), true} :error -> {original, false} @@ -879,7 +1242,7 @@ defmodule Macro do be expanded. This function uses `expand_once/2` under the hood. Check - `expand_once/2` for more information and exmaples. + it out for more information and examples. """ def expand(tree, env) do expand_until({tree, true}, env) @@ -892,4 +1255,122 @@ defmodule Macro do defp expand_until({tree, false}, _env) do tree end + + @doc """ + Converts the given atom or binary to underscore format. + + If an atom is given, it is assumed to be an Elixir module, + so it is converted to a binary and then processed. + + This function was designed to underscore language identifiers/tokens, + that's why it belongs to the `Macro` module. Do not use it as a general + mechanism for underscoring strings as it does not support Unicode or + characters that are not valid in Elixir identifiers. + + ## Examples + + iex> Macro.underscore "FooBar" + "foo_bar" + + iex> Macro.underscore "Foo.Bar" + "foo/bar" + + iex> Macro.underscore Foo.Bar + "foo/bar" + + In general, `underscore` can be thought of as the reverse of + `camelize`, however, in some cases formatting may be lost: + + iex> Macro.underscore "SAPExample" + "sap_example" + + iex> Macro.camelize "sap_example" + "SapExample" + + iex> Macro.camelize "hello_10" + "Hello10" + + """ + def underscore(atom) when is_atom(atom) do + "Elixir." <> rest = Atom.to_string(atom) + underscore(rest) + end + def underscore(<>) do + <> <> do_underscore(t, h) + end + def underscore("") do + "" + end + + + defp do_underscore(<>, _) + when (h >= ?A and h <= ?Z) and not (t >= ?A and t <= ?Z) and t != ?. and t != ?_ do + <> <> do_underscore(rest, t) + end + defp do_underscore(<>, prev) + when (h >= ?A and h <= ?Z) and not (prev >= ?A and prev <= ?Z) and prev != ?_ do + <> <> do_underscore(t, h) + end + defp do_underscore(<>, _) do + <> <> underscore(t) + end + defp do_underscore(<>, _) do + <> <> do_underscore(t, h) + end + defp do_underscore(<<>>, _) do + <<>> + end + + @doc """ + Converts the given string to CamelCase format. + + This function was designed to camelize language identifiers/tokens, + that's why it belongs to the `Macro` module. Do not use it as a general + mechanism for camelizing strings as it does not support Unicode or + characters that are not valid in Elixir identifiers. + + ## Examples + + iex> Macro.camelize "foo_bar" + "FooBar" + + If uppercase characters are present, they are not modified in anyway + as a mechanism to preserve acronyms: + + iex> Macro.camelize "API.V1" + "API.V1" + iex> Macro.camelize "API_SPEC" + "API_SPEC" + + """ + @spec camelize(String.t) :: String.t + def camelize(string) + + def camelize(""), + do: "" + def camelize(<>), + do: camelize(t) + def camelize(<>), + do: <> <> do_camelize(t) + + defp do_camelize(<>), + do: do_camelize(<>) + defp do_camelize(<>) when h >= ?a and h <= ?z, + do: <> <> do_camelize(t) + defp do_camelize(<>) when h >= ?0 and h <= ?9, + do: <> <> do_camelize(t) + defp do_camelize(<>), + do: <<>> + defp do_camelize(<>), + do: <> <> camelize(t) + defp do_camelize(<>), + do: <> <> do_camelize(t) + defp do_camelize(<<>>), + do: <<>> + + defp to_upper_char(char) when char >= ?a and char <= ?z, do: char - 32 + defp to_upper_char(char), do: char + + defp to_lower_char(char) when char >= ?A and char <= ?Z, do: char + 32 + defp to_lower_char(char), do: char end diff --git a/lib/elixir/lib/macro/env.ex b/lib/elixir/lib/macro/env.ex index 98c5ed341c4..b9af05de5b7 100644 --- a/lib/elixir/lib/macro/env.ex +++ b/lib/elixir/lib/macro/env.ex @@ -3,33 +3,53 @@ defmodule Macro.Env do A struct that holds compile time environment information. The current environment can be accessed at any time as - `__ENV__`. Inside macros, the caller environment can be - accessed as `__CALLER__`. It contains the following fields: + `__ENV__/0`. Inside macros, the caller environment can be + accessed as `__CALLER__/0`. + + An instance of `Macro.Env` must not be modified by hand. If you need to + create a custom environment to pass to `Code.eval_quoted/3`, use the + following trick: + + def make_custom_env do + import SomeModule, only: [some_function: 2] + alias A.B.C + __ENV__ + end + + You may then call `make_custom_env()` to get a struct with the desired + imports and aliases included. + + It contains the following fields: * `module` - the current module name * `file` - the current file name as a binary * `line` - the current line as an integer - * `function` - a tuple as `{atom, integer`}, where the first - element is the function name and the seconds its arity; returns + * `function` - a tuple as `{atom, integer}`, where the first + element is the function name and the second its arity; returns `nil` if not inside a function * `context` - the context of the environment; it can be `nil` - (default context), inside a guard or inside an assign - * `aliases` - a list of two item tuples, where the first - item is the aliased name and the second the actual name + (default context), inside a guard or inside a match + * `aliases` - a list of two-element tuples, where the first + element is the aliased name and the second one the actual name * `requires` - the list of required modules * `functions` - a list of functions imported from each module * `macros` - a list of macros imported from each module * `macro_aliases` - a list of aliases defined inside the current macro * `context_modules` - a list of modules defined in the current context + * `lexical_tracker` - PID of the lexical tracker which is responsible for + keeping user info * `vars` - a list keeping all defined variables as `{var, context}` + + The following fields are private and must not be accessed or relied on: + * `export_vars` - a list keeping all variables to be exported in a construct (may be `nil`) - * `lexical_tracker` - PID of the lexical tracker which is responsible to - keep user info - * `local` - the module to expand local functions to + * `prematch_vars` - a list of variables defined before a match (is + `nil` when not inside a match) + """ - @type name_arity :: {atom, non_neg_integer} + @type name_arity :: {atom, arity} @type file :: binary @type line :: non_neg_integer @type aliases :: [{module, module}] @@ -40,12 +60,14 @@ defmodule Macro.Env do @type macros :: [{module, [name_arity]}] @type context_modules :: [module] @type vars :: [{atom, atom | non_neg_integer}] - @type export_vars :: vars | nil - @type lexical_tracker :: pid - @type local :: module | nil + @type lexical_tracker :: pid | nil + @type local :: atom | nil + + @opaque export_vars :: vars | nil + @opaque prematch_vars :: vars | nil @type t :: %{__struct__: __MODULE__, - module: module, + module: atom, file: file, line: line, function: name_arity | nil, @@ -58,8 +80,8 @@ defmodule Macro.Env do context_modules: context_modules, vars: vars, export_vars: export_vars, - lexical_tracker: lexical_tracker, - local: local} + prematch_vars: prematch_vars, + lexical_tracker: lexical_tracker} def __struct__ do %{__struct__: __MODULE__, @@ -75,15 +97,21 @@ defmodule Macro.Env do macro_aliases: [], context_modules: [], vars: [], - export_vars: nil, lexical_tracker: nil, - local: nil} + export_vars: nil, + prematch_vars: nil} + end + + def __struct__(kv) do + Enum.reduce kv, __struct__(), fn {k, v}, acc -> :maps.update(k, v, acc) end end @doc """ Returns a keyword list containing the file and line information as keys. """ + @spec location(t) :: Keyword.t + def location(env) def location(%{__struct__: Macro.Env, file: file, line: line}) do [file: file, line: line] end @@ -92,22 +120,27 @@ defmodule Macro.Env do Returns whether the compilation environment is currently inside a guard. """ + @spec in_guard?(t) :: boolean + def in_guard?(env) def in_guard?(%{__struct__: Macro.Env, context: context}), do: context == :guard @doc """ Returns whether the compilation environment is currently inside a match clause. """ + @spec in_match?(t) :: boolean + def in_match?(env) def in_match?(%{__struct__: Macro.Env, context: context}), do: context == :match @doc """ Returns the environment stacktrace. """ + @spec stacktrace(t) :: list def stacktrace(%{__struct__: Macro.Env} = env) do cond do - nil?(env.module) -> + is_nil(env.module) -> [{:elixir_compiler, :__FILE__, 1, relative_location(env)}] - nil?(env.function) -> + is_nil(env.function) -> [{env.module, :__MODULE__, 0, relative_location(env)}] true -> {name, arity} = env.function diff --git a/lib/elixir/lib/map.ex b/lib/elixir/lib/map.ex index d93e88f194b..62772b32051 100644 --- a/lib/elixir/lib/map.ex +++ b/lib/elixir/lib/map.ex @@ -1,42 +1,849 @@ defmodule Map do @moduledoc """ - A Dict implementation that works on maps. + A set of functions for working with maps. - Maps are key-value stores where keys are compared using - the match operator (`===`). Maps can be created with - the `%{}` special form defined in the `Kernel.SpecialForms` - module. + Maps are the "go to" key-value data structure in Elixir. Maps can be created + with the `%{}` syntax, and key-value pairs can be expressed as `key => value`: - For more information about the functions in this module and - their APIs, please consult the `Dict` module. + iex> %{} + %{} + iex> %{"one" => :two, 3 => "four"} + %{3 => "four", "one" => :two} + + Key-value pairs in a map do not follow any order (that's why the printed map + in the example above has a different order than the map that was created). + + Maps do not impose any restriction on the key type: anything can be a key in a + map. As a key-value structure, maps do not allow duplicated keys. Keys are + compared using the exact-equality operator (`===`). If colliding keys are defined + in a map literal, the last one prevails. + + When the key in a key-value pair is an atom, the `key: value` shorthand syntax + can be used (as in many other special forms), provided key-value pairs are put at + the end: + + iex> %{"hello" => "world", a: 1, b: 2} + %{:a => 1, :b => 2, "hello" => "world"} + + Keys in maps can be accessed through some of the functions in this module + (such as `Map.get/3` or `Map.fetch/2`) or through the `[]` syntax provided by + the `Access` module: + + iex> map = %{a: 1, b: 2} + iex> Map.fetch(map, :a) + {:ok, 1} + iex> map[:b] + 2 + iex> map["non_existing_key"] + nil + + The alternative access syntax `map.key` is provided alongside `[]` when the + map has a `:key` key; note that while `map[key]` will return `nil` if `map` + doesn't contain `key`, `map.key` will raise if `map` doesn't contain + the key `:key`. + + iex> map = %{foo: "bar", baz: "bong"} + iex> map.foo + "bar" + iex> map.non_existing_key + ** (KeyError) key :non_existing_key not found in: %{baz: "bong", foo: "bar"} + + Maps can be pattern matched on; when a map is on the left-hand side of a + pattern match, it will match if the map on the right-hand side contains the + keys on the left-hand side and their values match the ones on the left-hand + side. This means that an empty map matches every map. + + iex> %{} = %{foo: "bar"} + %{foo: "bar"} + iex> %{a: a} = %{:a => 1, "b" => 2, [:c, :e, :e] => 3} + iex> a + 1 + iex> %{:c => 3} = %{:a => 1, 2 => :b} + ** (MatchError) no match of right hand side value: %{2 => :b, :a => 1} + + Variables can be used as map keys both when writing map literals as well as + when matching: + + iex> n = 1 + 1 + iex> %{n => :one} + %{1 => :one} + iex> %{^n => :one} = %{1 => :one, 2 => :two, 3 => :three} + %{1 => :one, 2 => :two, 3 => :three} + + Maps also support a specific update syntax to update the value stored under + *existing* atom keys: + + iex> map = %{one: 1, two: 2} + iex> %{map | one: "one"} + %{one: "one", two: 2} + iex> %{map | three: 3} + ** (KeyError) key :three not found + + ## Modules to work with maps + + This module aims to provide functions that perform operations specific to maps + (like accessing keys, updating values, and so on). For traversing maps as + collections, developers should use the `Enum` module that works across a + variety of data types. + + The `Kernel` module also provides a few functions to work with maps: for + example, `Kernel.map_size/1` to know the number of key-value pairs in a map or + `Kernel.is_map/1` to know if a term is a map. """ - use Dict + @type key :: any + @type value :: any + @compile {:inline, fetch: 2, put: 3, delete: 2, has_key?: 2, replace!: 3} - defdelegate [keys(map), values(map), size(map), merge(map1, map2), to_list(map)], to: :maps + @doc """ + Returns all keys from `map`. + + ## Examples + + iex> Map.keys(%{a: 1, b: 2}) + [:a, :b] + + """ + @spec keys(map) :: [key] + defdelegate keys(map), to: :maps + + @doc """ + Returns all values from `map`. + + ## Examples + + iex> Map.values(%{a: 1, b: 2}) + [1, 2] - @compile {:inline, fetch: 2, put: 3, delete: 2, has_key?: 2} + """ + @spec values(map) :: [value] + defdelegate values(map), to: :maps + + @doc """ + Converts `map` to a list. + + Each key-value pair in the map is converted to a two-element tuple `{key, + value}` in the resulting list. + + ## Examples + + iex> Map.to_list(%{a: 1}) + [a: 1] + iex> Map.to_list(%{1 => 2}) + [{1, 2}] + + """ + @spec to_list(map) :: [{term, term}] + defdelegate to_list(map), to: :maps @doc """ Returns a new empty map. + + ## Examples + + iex> Map.new + %{} + """ + @spec new :: map def new, do: %{} + @doc """ + Creates a map from an `enumerable`. + + Duplicated keys are removed; the latest one prevails. + + ## Examples + + iex> Map.new([{:b, 1}, {:a, 2}]) + %{a: 2, b: 1} + iex> Map.new([a: 1, a: 2, a: 3]) + %{a: 3} + + """ + @spec new(Enumerable.t) :: map + def new(enumerable) + def new(list) when is_list(list), do: :maps.from_list(list) + def new(%{__struct__: _} = struct), do: new_from_enum(struct) + def new(%{} = map), do: map + def new(enum), do: new_from_enum(enum) + + defp new_from_enum(enumerable) do + enumerable + |> Enum.to_list + |> :maps.from_list + end + + @doc """ + Creates a map from an `enumerable` via the given transformation function. + + Duplicated keys are removed; the latest one prevails. + + ## Examples + + iex> Map.new([:a, :b], fn x -> {x, x} end) + %{a: :a, b: :b} + + """ + @spec new(Enumerable.t, (term -> {key, value})) :: map + def new(enumerable, transform) when is_function(transform, 1) do + enumerable + |> Enum.to_list + |> new_transform(transform, []) + end + + defp new_transform([], _fun, acc) do + acc + |> :lists.reverse + |> :maps.from_list + end + + defp new_transform([item | rest], fun, acc) do + new_transform(rest, fun, [fun.(item) | acc]) + end + + @doc """ + Returns whether the given `key` exists in the given `map`. + + ## Examples + + iex> Map.has_key?(%{a: 1}, :a) + true + iex> Map.has_key?(%{a: 1}, :b) + false + + Inlined by the compiler. + """ + @spec has_key?(map, key) :: boolean def has_key?(map, key), do: :maps.is_key(key, map) + @doc """ + Fetches the value for a specific `key` in the given `map`. + + If `map` contains the given `key` with value `value`, then `{:ok, value}` is + returned. If `map` doesn't contain `key`, `:error` is returned. + + ## Examples + + iex> Map.fetch(%{a: 1}, :a) + {:ok, 1} + iex> Map.fetch(%{a: 1}, :b) + :error + + Inlined by the compiler. + """ + @spec fetch(map, key) :: {:ok, value} | :error def fetch(map, key), do: :maps.find(key, map) - def put(map, key, val) do - :maps.put(key, val, map) + @doc """ + Fetches the value for a specific `key` in the given `map`, erroring out if + `map` doesn't contain `key`. + + If `map` contains the given `key`, the corresponding value is returned. If + `map` doesn't contain `key`, a `KeyError` exception is raised. + + ## Examples + + iex> Map.fetch!(%{a: 1}, :a) + 1 + iex> Map.fetch!(%{a: 1}, :b) + ** (KeyError) key :b not found in: %{a: 1} + + """ + @spec fetch!(map, key) :: value | no_return + def fetch!(map, key) do + case fetch(map, key) do + {:ok, value} -> value + :error -> raise KeyError, key: key, term: map + end + end + + @doc """ + Puts the given `value` under `key` unless the entry `key` + already exists in `map`. + + ## Examples + + iex> Map.put_new(%{a: 1}, :b, 2) + %{a: 1, b: 2} + iex> Map.put_new(%{a: 1, b: 2}, :a, 3) + %{a: 1, b: 2} + + """ + @spec put_new(map, key, value) :: map + def put_new(map, key, value) do + case has_key?(map, key) do + true -> map + false -> put(map, key, value) + end + end + + @doc """ + Alters the value stored under `key` to `value`, but only + if the entry `key` already exists in `map`. + + ## Examples + + iex> Map.replace(%{a: 1}, :b, 2) + %{a: 1} + iex> Map.replace(%{a: 1, b: 2}, :a, 3) + %{a: 3, b: 2} + + """ + @spec replace(map, key, value) :: map + def replace(map, key, value) do + case has_key?(map, key) do + true -> replace!(map, key, value) + false -> map + end end + @doc """ + Similar to `replace/3`, but will raise a `KeyError` + if the key does not exist in the map. + + ## Examples + + iex> Map.replace!(%{a: 1, b: 2}, :a, 3) + %{a: 3, b: 2} + iex> Map.replace!(%{a: 1}, :b, 2) + ** (KeyError) key :b not found in: %{a: 1} + + Inlined by the compiler. + """ + @spec replace!(map, key, value) :: map + def replace!(map, key, value) do + :maps.update(key, value, map) + end + + @doc """ + Evaluates `fun` and puts the result under `key` + in `map` unless `key` is already present. + + This function is useful in case you want to compute the value to put under + `key` only if `key` is not already present (e.g., the value is expensive to + calculate or generally difficult to setup and teardown again). + + ## Examples + + iex> map = %{a: 1} + iex> fun = fn -> + ...> # some expensive operation here + ...> 3 + ...> end + iex> Map.put_new_lazy(map, :a, fun) + %{a: 1} + iex> Map.put_new_lazy(map, :b, fun) + %{a: 1, b: 3} + + """ + @spec put_new_lazy(map, key, (() -> value)) :: map + def put_new_lazy(map, key, fun) when is_function(fun, 0) do + case has_key?(map, key) do + true -> map + false -> put(map, key, fun.()) + end + end + + @doc """ + Returns a new map with all the key-value pairs in `map` where the key + is in `keys`. + + If `keys` contains keys that are not in `map`, they're simply ignored. + + ## Examples + + iex> Map.take(%{a: 1, b: 2, c: 3}, [:a, :c, :e]) + %{a: 1, c: 3} + + """ + @spec take(map, Enumerable.t) :: map + def take(map, keys) + + def take(map, keys) when is_map(map) do + keys + |> Enum.to_list + |> do_take(map, []) + end + + def take(non_map, _keys) do + :erlang.error({:badmap, non_map}) + end + + defp do_take([], _map, acc), do: :maps.from_list(acc) + defp do_take([key | rest], map, acc) do + acc = case fetch(map, key) do + {:ok, value} -> [{key, value} | acc] + :error -> acc + end + do_take(rest, map, acc) + end + + @doc """ + Gets the value for a specific `key` in `map`. + + If `key` is present in `map` with value `value`, then `value` is + returned. Otherwise, `default` is returned (which is `nil` unless + specified otherwise). + + ## Examples + + iex> Map.get(%{}, :a) + nil + iex> Map.get(%{a: 1}, :a) + 1 + iex> Map.get(%{a: 1}, :b) + nil + iex> Map.get(%{a: 1}, :b, 3) + 3 + + """ + @spec get(map, key, value) :: value + def get(map, key, default \\ nil) do + case fetch(map, key) do + {:ok, value} -> value + :error -> default + end + end + + @doc """ + Gets the value for a specific `key` in `map`. + + If `key` is present in `map` with value `value`, then `value` is + returned. Otherwise, `fun` is evaluated and its result is returned. + + This is useful if the default value is very expensive to calculate or + generally difficult to setup and teardown again. + + ## Examples + + iex> map = %{a: 1} + iex> fun = fn -> + ...> # some expensive operation here + ...> 13 + ...> end + iex> Map.get_lazy(map, :a, fun) + 1 + iex> Map.get_lazy(map, :b, fun) + 13 + + """ + @spec get_lazy(map, key, (() -> value)) :: value + def get_lazy(map, key, fun) when is_function(fun, 0) do + case fetch(map, key) do + {:ok, value} -> value + :error -> fun.() + end + end + + @doc """ + Puts the given `value` under `key` in `map`. + + ## Examples + + iex> Map.put(%{a: 1}, :b, 2) + %{a: 1, b: 2} + iex> Map.put(%{a: 1, b: 2}, :a, 3) + %{a: 3, b: 2} + + Inlined by the compiler. + """ + @spec put(map, key, value) :: map + def put(map, key, value) do + :maps.put(key, value, map) + end + + @doc """ + Deletes the entry in `map` for a specific `key`. + + If the `key` does not exist, returns `map` unchanged. + + ## Examples + + iex> Map.delete(%{a: 1, b: 2}, :a) + %{b: 2} + iex> Map.delete(%{b: 2}, :a) + %{b: 2} + + Inlined by the compiler. + """ + @spec delete(map, key) :: map def delete(map, key), do: :maps.remove(key, map) - def merge(map1, map2, callback) do - :maps.fold fn k, v2, acc -> - update(acc, k, v2, fn(v1) -> callback.(k, v1, v2) end) - end, map1, map2 + @doc """ + Merges two maps into one. + + All keys in `map2` will be added to `map1`, overriding any existing one + (i.e., the keys in `map2` "have precedence" over the ones in `map1`). + + If you have a struct and you would like to merge a set of keys into the + struct, do not use this function, as it would merge all keys on the right + side into the struct, even if the key is not part of the struct. Instead, + use `Kernel.struct/2`. + + ## Examples + + iex> Map.merge(%{a: 1, b: 2}, %{a: 3, d: 4}) + %{a: 3, b: 2, d: 4} + + """ + @spec merge(map, map) :: map + defdelegate merge(map1, map2), to: :maps + + @doc """ + Merges two maps into one, resolving conflicts through the given `callback`. + + All keys in `map2` will be added to `map1`. The given function will be invoked + when there are duplicate keys; its arguments are `key` (the duplicate key), + `value1` (the value of `key` in `map1`), and `value2` (the value of `key` in + `map2`). The value returned by `callback` is used as the value under `key` in + the resulting map. + + ## Examples + + iex> Map.merge(%{a: 1, b: 2}, %{a: 3, d: 4}, fn _k, v1, v2 -> + ...> v1 + v2 + ...> end) + %{a: 4, b: 2, d: 4} + + """ + @spec merge(map, map, (key, value, value -> value)) :: map + def merge(map1, map2, callback) when is_function(callback, 3) do + if map_size(map1) > map_size(map2) do + :maps.fold fn key, val2, acc -> + update(acc, key, val2, fn val1 -> callback.(key, val1, val2) end) + end, map1, map2 + else + :maps.fold fn key, val2, acc -> + update(acc, key, val2, fn val1 -> callback.(key, val2, val1) end) + end, map2, map1 + end + end + + @doc """ + Updates the `key` in `map` with the given function. + + If `key` is present in `map` with value `value`, `fun` is invoked with + argument `value` and its result is used as the new value of `key`. If `key` is + not present in `map`, `initial` is inserted as the value of `key`. + + ## Examples + + iex> Map.update(%{a: 1}, :a, 13, &(&1 * 2)) + %{a: 2} + iex> Map.update(%{a: 1}, :b, 11, &(&1 * 2)) + %{a: 1, b: 11} + + """ + @spec update(map, key, value, (value -> value)) :: map + def update(map, key, initial, fun) when is_function(fun, 1) do + case fetch(map, key) do + {:ok, value} -> + put(map, key, fun.(value)) + :error -> + put(map, key, initial) + end + end + + @doc """ + Returns and removes the value associated with `key` in `map`. + + If `key` is present in `map` with value `value`, `{value, new_map}` is + returned where `new_map` is the result of removing `key` from `map`. If `key` + is not present in `map`, `{default, map}` is returned. + + ## Examples + + iex> Map.pop(%{a: 1}, :a) + {1, %{}} + iex> Map.pop(%{a: 1}, :b) + {nil, %{a: 1}} + iex> Map.pop(%{a: 1}, :b, 3) + {3, %{a: 1}} + + """ + @spec pop(map, key, value) :: {value, map} + def pop(map, key, default \\ nil) do + case map do + %{^key => value} -> {value, delete(map, key)} + %{} -> {default, map} + end + end + + @doc """ + Lazily returns and removes the value associated with `key` in `map`. + + If `key` is present in `map` with value `value`, `{value, new_map}` is + returned where `new_map` is the result of removing `key` from `map`. If `key` + is not present in `map`, `{fun_result, map}` is returned, where `fun_result` + is the result of applying `fun`. + + This is useful if the default value is very expensive to calculate or + generally difficult to setup and teardown again. + + ## Examples + + iex> map = %{a: 1} + iex> fun = fn -> + ...> # some expensive operation here + ...> 13 + ...> end + iex> Map.pop_lazy(map, :a, fun) + {1, %{}} + iex> Map.pop_lazy(map, :b, fun) + {13, %{a: 1}} + + """ + @spec pop_lazy(map, key, (() -> value)) :: {value, map} + def pop_lazy(map, key, fun) when is_function(fun, 0) do + case fetch(map, key) do + {:ok, value} -> {value, delete(map, key)} + :error -> {fun.(), map} + end + end + + @doc """ + Drops the given `keys` from `map`. + + If `keys` contains keys that are not in `map`, they're simply ignored. + + ## Examples + + iex> Map.drop(%{a: 1, b: 2, c: 3}, [:b, :d]) + %{a: 1, c: 3} + + """ + @spec drop(map, Enumerable.t) :: map + def drop(map, keys) + + def drop(map, keys) when is_map(map) do + keys + |> Enum.to_list + |> drop_list(map) + end + + def drop(non_map, _keys) do + :erlang.error({:badmap, non_map}) + end + + defp drop_list([], acc), do: acc + defp drop_list([key | rest], acc) do + drop_list(rest, delete(acc, key)) + end + + @doc """ + Takes all entries corresponding to the given `keys` in `map` and extracts + them into a separate map. + + Returns a tuple with the new map and the old map with removed keys. + + Keys for which there are no entries in `map` are ignored. + + ## Examples + + iex> Map.split(%{a: 1, b: 2, c: 3}, [:a, :c, :e]) + {%{a: 1, c: 3}, %{b: 2}} + + """ + @spec split(map, Enumerable.t) :: {map, map} + def split(map, keys) + + def split(map, keys) when is_map(map) do + keys + |> Enum.to_list + |> do_split([], map) + end + + def split(non_map, _keys) do + :erlang.error({:badmap, non_map}) + end + + defp do_split([], inc, exc) do + {:maps.from_list(inc), exc} + end + defp do_split([key | rest], inc, exc) do + case fetch(exc, key) do + {:ok, value} -> + do_split(rest, [{key, value} | inc], delete(exc, key)) + :error -> + do_split(rest, inc, exc) + end + end + + @doc """ + Updates `key` with the given function. + + If `key` is present in `map` with value `value`, `fun` is invoked with + argument `value` and its result is used as the new value of `key`. If `key` is + not present in `map`, a `KeyError` exception is raised. + + ## Examples + + iex> Map.update!(%{a: 1}, :a, &(&1 * 2)) + %{a: 2} + + iex> Map.update!(%{a: 1}, :b, &(&1 * 2)) + ** (KeyError) key :b not found in: %{a: 1} + + """ + @spec update!(map, key, (value -> value)) :: map | no_return + def update!(%{} = map, key, fun) when is_function(fun, 1) do + case fetch(map, key) do + {:ok, value} -> + put(map, key, fun.(value)) + :error -> + raise KeyError, term: map, key: key + end end + def update!(map, _key, _fun), do: :erlang.error({:badmap, map}) + + @doc """ + Gets the value from `key` and updates it, all in one pass. + + `fun` is called with the current value under `key` in `map` (or `nil` if `key` + is not present in `map`) and must return a two-element tuple: the "get" value + (the retrieved value, which can be operated on before being returned) and the + new value to be stored under `key` in the resulting new map. `fun` may also + return `:pop`, which means the current value shall be removed from `map` and + returned (making this function behave like `Map.pop(map, key)`. + + The returned value is a tuple with the "get" value returned by + `fun` and a new map with the updated value under `key`. + + ## Examples + + iex> Map.get_and_update(%{a: 1}, :a, fn current_value -> + ...> {current_value, "new value!"} + ...> end) + {1, %{a: "new value!"}} + + iex> Map.get_and_update(%{a: 1}, :b, fn current_value -> + ...> {current_value, "new value!"} + ...> end) + {nil, %{b: "new value!", a: 1}} + + iex> Map.get_and_update(%{a: 1}, :a, fn _ -> :pop end) + {1, %{}} + + iex> Map.get_and_update(%{a: 1}, :b, fn _ -> :pop end) + {nil, %{a: 1}} + + """ + @spec get_and_update(map, key, (value -> {get, value} | :pop)) :: {get, map} when get: term + def get_and_update(%{} = map, key, fun) when is_function(fun, 1) do + current = + case :maps.find(key, map) do + {:ok, value} -> value + :error -> nil + end + + case fun.(current) do + {get, update} -> + {get, :maps.put(key, update, map)} + :pop -> + {current, :maps.remove(key, map)} + other -> + raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}" + end + end + + def get_and_update(map, _key, _fun), do: :erlang.error({:badmap, map}) + + @doc """ + Gets the value from `key` and updates it. Raises if there is no `key`. + + Behaves exactly like `get_and_update/3`, but raises a `KeyError` exception if + `key` is not present in `map`. + + ## Examples + + iex> Map.get_and_update!(%{a: 1}, :a, fn current_value -> + ...> {current_value, "new value!"} + ...> end) + {1, %{a: "new value!"}} + + iex> Map.get_and_update!(%{a: 1}, :b, fn current_value -> + ...> {current_value, "new value!"} + ...> end) + ** (KeyError) key :b not found in: %{a: 1} + + iex> Map.get_and_update!(%{a: 1}, :a, fn _ -> + ...> :pop + ...> end) + {1, %{}} + + """ + @spec get_and_update!(map, key, (value -> {get, value})) :: {get, map} | no_return when get: term + def get_and_update!(%{} = map, key, fun) when is_function(fun, 1) do + case :maps.find(key, map) do + {:ok, value} -> + case fun.(value) do + {get, update} -> + {get, :maps.put(key, update, map)} + :pop -> + {value, :maps.remove(key, map)} + other -> + raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}" + end + :error -> + raise KeyError, term: map, key: key + end + end + + def get_and_update!(map, _key, _fun), do: :erlang.error({:badmap, map}) + + @doc """ + Converts a `struct` to map. + + It accepts the struct module or a struct itself and + simply removes the `__struct__` field from the given struct + or from a new struct generated from the given module. + + ## Example + + defmodule User do + defstruct [:name] + end + + Map.from_struct(User) + #=> %{name: nil} + + Map.from_struct(%User{name: "john"}) + #=> %{name: "john"} + + """ + @spec from_struct(atom | struct) :: map + def from_struct(struct) when is_atom(struct) do + :maps.remove(:__struct__, struct.__struct__) + end + + def from_struct(%{__struct__: _} = struct) do + :maps.remove(:__struct__, struct) + end + + @doc """ + Checks if two maps are equal. + + Two maps are considered to be equal if they contain + the same keys and those keys contain the same values. + + ## Examples + + iex> Map.equal?(%{a: 1, b: 2}, %{b: 2, a: 1}) + true + iex> Map.equal?(%{a: 1, b: 2}, %{b: 1, a: 2}) + false + + """ + @spec equal?(map, map) :: boolean def equal?(%{} = map1, %{} = map2), do: map1 === map2 + + @doc false + # TODO: Remove on 2.0 + # (hard-deprecated in elixir_dispatch) + def size(map) do + map_size(map) + end end diff --git a/lib/elixir/lib/map_set.ex b/lib/elixir/lib/map_set.ex new file mode 100644 index 00000000000..40db95ffc4d --- /dev/null +++ b/lib/elixir/lib/map_set.ex @@ -0,0 +1,383 @@ +defmodule MapSet do + @moduledoc """ + Functions that work on sets. + + `MapSet` is the "go to" set data structure in Elixir. A set can be constructed + using `MapSet.new/0`: + + iex> MapSet.new + #MapSet<[]> + + A set can contain any kind of elements, and elements in a set don't have to be + of the same type. By definition, sets can't contain duplicate elements: when + inserting an element in a set where it's already present, the insertion is + simply a no-op. + + iex> map_set = MapSet.new + iex> MapSet.put(map_set, "foo") + #MapSet<["foo"]> + iex> map_set |> MapSet.put("foo") |> MapSet.put("foo") + #MapSet<["foo"]> + + A `MapSet` is represented internally using the `%MapSet{}` struct. This struct + can be used whenever there's a need to pattern match on something being a `MapSet`: + + iex> match?(%MapSet{}, MapSet.new()) + true + + Note that, however, the struct fields are private and must not be accessed + directly; use the functions in this module to perform operations on sets. + + `MapSet`s can also be constructed starting from other collection-type data + structures: for example, see `MapSet.new/1` or `Enum.into/2`. + """ + + @type value :: term + + @opaque t(value) :: %__MODULE__{map: %{optional(value) => []}} + @type t :: t(term) + + defstruct map: %{}, version: 2 + + @doc """ + Returns a new set. + + ## Examples + + iex> MapSet.new + #MapSet<[]> + + """ + @spec new :: t + def new(), do: %MapSet{} + + @doc """ + Creates a set from an enumerable. + + ## Examples + + iex> MapSet.new([:b, :a, 3]) + #MapSet<[3, :a, :b]> + iex> MapSet.new([3, 3, 3, 2, 2, 1]) + #MapSet<[1, 2, 3]> + + """ + @spec new(Enum.t) :: t + def new(enumerable) + + def new(%__MODULE__{} = map_set), do: map_set + def new(enumerable) do + map = + enumerable + |> Enum.to_list + |> new_from_list([]) + + %MapSet{map: map} + end + + @doc """ + Creates a set from an enumerable via the transformation function. + + ## Examples + + iex> MapSet.new([1, 2, 1], fn x -> 2 * x end) + #MapSet<[2, 4]> + + """ + @spec new(Enum.t, (term -> val)) :: t(val) when val: value + def new(enumerable, transform) when is_function(transform, 1) do + map = + enumerable + |> Enum.to_list + |> new_from_list_transform(transform, []) + + %MapSet{map: map} + end + + defp new_from_list([], acc) do + :maps.from_list(acc) + end + + defp new_from_list([item | rest], acc) do + new_from_list(rest, [{item, []} | acc]) + end + + defp new_from_list_transform([], _fun, acc) do + :maps.from_list(acc) + end + defp new_from_list_transform([item | rest], fun, acc) do + new_from_list_transform(rest, fun, [{fun.(item), []} | acc]) + end + + @doc """ + Deletes `value` from `map_set`. + + Returns a new set which is a copy of `map_set` but without `value`. + + ## Examples + + iex> map_set = MapSet.new([1, 2, 3]) + iex> MapSet.delete(map_set, 4) + #MapSet<[1, 2, 3]> + iex> MapSet.delete(map_set, 2) + #MapSet<[1, 3]> + + """ + @spec delete(t(val1), val2) :: t(val1) when val1: value, val2: value + def delete(%MapSet{map: map} = map_set, value) do + %{map_set | map: Map.delete(map, value)} + end + + @doc """ + Returns a set that is `map_set1` without the members of `map_set2`. + + ## Examples + + iex> MapSet.difference(MapSet.new([1, 2]), MapSet.new([2, 3, 4])) + #MapSet<[1]> + + """ + @spec difference(t(val1), t(val2)) :: t(val1) when val1: value, val2: value + def difference(map_set1, map_set2) + + # If the first set is less than twice the size of the second map, + # it is fastest to re-accumulate items in the first set that are not + # present in the second set. + def difference(%MapSet{map: map1}, %MapSet{map: map2}) + when map_size(map1) < map_size(map2) * 2 do + map = + map1 + |> Map.keys + |> filter_not_in(map2) + + %MapSet{map: map} + end + + # If the second set is less than half the size of the first set, it's fastest + # to simply iterate through each item in the second set, deleting them from + # the first set. + def difference(%MapSet{map: map1} = map_set, %MapSet{map: map2}) do + %{map_set | map: Map.drop(map1, Map.keys(map2))} + end + + defp filter_not_in(keys, map2, acc \\ []) + defp filter_not_in([], _map2, acc), do: :maps.from_list(acc) + defp filter_not_in([key | rest], map2, acc) do + acc = + if Map.has_key?(map2, key) do + acc + else + [{key, []} | acc] + end + filter_not_in(rest, map2, acc) + end + + @doc """ + Checks if `map_set1` and `map_set2` have no members in common. + + ## Examples + + iex> MapSet.disjoint?(MapSet.new([1, 2]), MapSet.new([3, 4])) + true + iex> MapSet.disjoint?(MapSet.new([1, 2]), MapSet.new([2, 3])) + false + + """ + @spec disjoint?(t, t) :: boolean + def disjoint?(%MapSet{map: map1}, %MapSet{map: map2}) do + {map1, map2} = order_by_size(map1, map2) + + map1 + |> Map.keys + |> none_in?(map2) + end + + defp none_in?([], _) do + true + end + defp none_in?([key | rest], map2) do + case Map.has_key?(map2, key) do + true -> false + false -> none_in?(rest, map2) + end + end + + @doc """ + Checks if two sets are equal. + + The comparison between elements must be done using `===`. + + ## Examples + + iex> MapSet.equal?(MapSet.new([1, 2]), MapSet.new([2, 1, 1])) + true + iex> MapSet.equal?(MapSet.new([1, 2]), MapSet.new([3, 4])) + false + + """ + @spec equal?(t, t) :: boolean + def equal?(%MapSet{map: map1, version: version}, %MapSet{map: map2, version: version}) do + Map.equal?(map1, map2) + end + + # Elixir v1.5 change the map representation, so on + # version mismatch we need to compare the keys directly. + def equal?(%MapSet{map: map1}, %MapSet{map: map2}) do + map_size(map1) == map_size(map2) and map_subset?(Map.keys(map1), map2) + end + + @doc """ + Returns a set containing only members that `map_set1` and `map_set2` have in common. + + ## Examples + + iex> MapSet.intersection(MapSet.new([1, 2]), MapSet.new([2, 3, 4])) + #MapSet<[2]> + + iex> MapSet.intersection(MapSet.new([1, 2]), MapSet.new([3, 4])) + #MapSet<[]> + + """ + @spec intersection(t(val), t(val)) :: t(val) when val: value + def intersection(%MapSet{map: map1} = map_set, %MapSet{map: map2}) do + {map1, map2} = order_by_size(map1, map2) + %{map_set | map: Map.take(map2, Map.keys(map1))} + end + + @doc """ + Checks if `map_set` contains `value`. + + ## Examples + + iex> MapSet.member?(MapSet.new([1, 2, 3]), 2) + true + iex> MapSet.member?(MapSet.new([1, 2, 3]), 4) + false + + """ + @spec member?(t, value) :: boolean + def member?(%MapSet{map: map}, value) do + Map.has_key?(map, value) + end + + @doc """ + Inserts `value` into `map_set` if `map_set` doesn't already contain it. + + ## Examples + + iex> MapSet.put(MapSet.new([1, 2, 3]), 3) + #MapSet<[1, 2, 3]> + iex> MapSet.put(MapSet.new([1, 2, 3]), 4) + #MapSet<[1, 2, 3, 4]> + + """ + @spec put(t(val), new_val) :: t(val | new_val) when val: value, new_val: value + def put(%MapSet{map: map} = map_set, value) do + %{map_set | map: Map.put(map, value, [])} + end + + @doc """ + Returns the number of elements in `map_set`. + + ## Examples + + iex> MapSet.size(MapSet.new([1, 2, 3])) + 3 + + """ + @spec size(t) :: non_neg_integer + def size(%MapSet{map: map}) do + map_size(map) + end + + @doc """ + Checks if `map_set1`'s members are all contained in `map_set2`. + + This function checks if `map_set1` is a subset of `map_set2`. + + ## Examples + + iex> MapSet.subset?(MapSet.new([1, 2]), MapSet.new([1, 2, 3])) + true + iex> MapSet.subset?(MapSet.new([1, 2, 3]), MapSet.new([1, 2])) + false + + """ + @spec subset?(t, t) :: boolean + def subset?(%MapSet{map: map1}, %MapSet{map: map2}) do + if map_size(map1) <= map_size(map2) do + map1 + |> Map.keys + |> map_subset?(map2) + else + false + end + end + + defp map_subset?([], _), do: true + defp map_subset?([key | rest], map2) do + if Map.has_key?(map2, key) do + map_subset?(rest, map2) + else + false + end + end + + @doc """ + Converts `map_set` to a list. + + ## Examples + + iex> MapSet.to_list(MapSet.new([1, 2, 3])) + [1, 2, 3] + + """ + @spec to_list(t(val)) :: [val] when val: value + def to_list(%MapSet{map: map}) do + Map.keys(map) + end + + @doc """ + Returns a set containing all members of `map_set1` and `map_set2`. + + ## Examples + + iex> MapSet.union(MapSet.new([1, 2]), MapSet.new([2, 3, 4])) + #MapSet<[1, 2, 3, 4]> + + """ + @spec union(t(val1), t(val2)) :: t(val1 | val2) when val1: value, val2: value + def union(%MapSet{map: map1, version: version} = map_set, %MapSet{map: map2, version: version}) do + %{map_set | map: Map.merge(map1, map2)} + end + def union(%MapSet{map: map1}, %MapSet{map: map2}) do + new_from_list(Map.keys(map1) ++ Map.keys(map2), []) + end + + defp order_by_size(map1, map2) when map_size(map1) > map_size(map2), do: {map2, map1} + defp order_by_size(map1, map2), do: {map1, map2} + + defimpl Enumerable do + def reduce(map_set, acc, fun), do: Enumerable.List.reduce(MapSet.to_list(map_set), acc, fun) + def member?(map_set, val), do: {:ok, MapSet.member?(map_set, val)} + def count(map_set), do: {:ok, MapSet.size(map_set)} + end + + defimpl Collectable do + def into(original) do + {original, fn + map_set, {:cont, x} -> MapSet.put(map_set, x) + map_set, :done -> map_set + _, :halt -> :ok + end} + end + end + + defimpl Inspect do + import Inspect.Algebra + + def inspect(map_set, opts) do + concat ["#MapSet<", Inspect.List.inspect(MapSet.to_list(map_set), opts), ">"] + end + end +end diff --git a/lib/elixir/lib/module.ex b/lib/elixir/lib/module.ex index 3853bfeac8c..6c087855d94 100644 --- a/lib/elixir/lib/module.ex +++ b/lib/elixir/lib/module.ex @@ -1,289 +1,400 @@ defmodule Module do @moduledoc ~S''' - This module provides many functions to deal with modules during - compilation time. It allows a developer to dynamically attach - documentation, add, delete and register attributes and so forth. + Provides functions to deal with modules during compilation time. + + It allows a developer to dynamically add, delete and register + attributes, attach documentation and so forth. After a module is compiled, using many of the functions in this module will raise errors, since it is out of their scope to inspect runtime data. Most of the runtime data can be inspected - via the `__info__(attr)` function attached to each compiled module. + via the `__info__/1` function attached to each compiled module. ## Module attributes Each module can be decorated with one or more attributes. The following ones are currently defined by Elixir: - * `@after_compile` + ### `@after_compile` - A hook that will be invoked right after the current module is compiled. + A hook that will be invoked right after the current module is compiled. + Accepts a module or a `{module, function_name}`. See the "Compile callbacks" + section below. - Accepts a module or a tuple `{, }`. The function - must take two arguments: the module environment and its bytecode. - When just a module is provided, the function is assumed to be - `__after_compile__/2`. + ### `@before_compile` - ### Example + A hook that will be invoked before the module is compiled. + Accepts a module or a `{module, function_or_macro_name}` tuple. + See the "Compile callbacks" section below. - defmodule M do - @after_compile __MODULE__ + ### `@behaviour` (notice the British spelling) - def __after_compile__(env, _bytecode) do - IO.inspect env - end - end + Behaviours can be referenced by modules to ensure they implement + required specific function signatures defined by `@callback`. - * `@before_compile` + For example, you could specify a `URI.Parser` behaviour as follows: - A hook that will be invoked before the module is compiled. + defmodule URI.Parser do + @doc "Defines a default port" + @callback default_port() :: integer - Accepts a module or a tuple `{, }`. The - function/macro must take one argument: the module environment. If it's a - macro, its returned value will be injected at the end of the module definition - before the compilation starts. + @doc "Parses the given URL" + @callback parse(uri_info :: URI.t) :: URI.t + end - When just a module is provided, the function/macro is assumed to be - `__before_compile__/1`. + And then a module may use it as: - Note: unlike `@after_compile`, the callback function/macro must - be placed in a separate module (because when the callback is invoked, - the current module does not yet exist). + defmodule URI.HTTP do + @behaviour URI.Parser + def default_port(), do: 80 + def parse(info), do: info + end - ### Example + If the behaviour changes or `URI.HTTP` does not implement + one of the callbacks, a warning will be raised. - defmodule A do - defmacro __before_compile__(_env) do - quote do - def hello, do: "world" - end - end - end + ### `@impl` - defmodule B do - @before_compile A - end + To aid in the correct implementation of behaviours, you may optionally declare + `@impl` for implemented callbacks of a behaviour. This makes callbacks + explicit and can help you to catch errors in your code (the compiler will warn + you if you mark a function as `@impl` when in fact it is not a callback, and + vice versa). It also helps with maintainability by making it clear to other + developers that the function's purpose is to implement a callback. - * `@behaviour` (notice the British spelling) + Using `@impl` the example above can be rewritten as: - Specify an OTP or user-defined behaviour. + defmodule URI.HTTP do + @behaviour URI.parser - ### Example + @impl true + def default_port(), do: 80 - defmodule M do - @behaviour gen_event + @impl true + def parse(info), do: info + end - # ... - end + You may pass either `false`, `true`, or a specific behaviour to `@impl`. - * `@compile` + defmodule Foo do + @behaviour Bar + @behaviour Baz - Define options for module compilation that are passed to the Erlang - compiler. + @impl true # will warn if neither Bar nor Baz specify a callback named bar/0 + def bar(), do: :ok - Accepts an atom, a tuple, or a list of atoms and tuples. + @impl Baz # Will warn if Baz does not specify a callback named baz/0 + def baz(), do: :ok + end - See http://www.erlang.org/doc/man/compile.html for the list of supported - options. + ### `@compile` - ### Example + Defines options for module compilation. This is used to configure + both Elixir and Erlang compilers, as any other compilation pass + added by external tools. For example: - defmodule M do - @compile {:inline, myfun: 1} + defmodule MyModule do + @compile {:inline, my_fun: 1} - def myfun(arg) do - to_string(arg) - end - end + def my_fun(arg) do + to_string(arg) + end + end - * `@doc` + Multiple uses of `@compile` will accumulate instead of overriding + previous ones. See the "Compile options" section below. - Provide documentation for the function or macro that follows the - attribute. + ### `@doc` - Accepts a string (often a heredoc) or `false` where `@doc false` will - make the function/macro invisible to the documentation extraction tools - like ExDoc. + Provides documentation for the function or macro that follows the + attribute. - Can be invoked more than once. + Accepts a string (often a heredoc) or `false` where `@doc false` will + make the function/macro invisible to documentation extraction tools + like ExDoc. For example: - ### Example + defmodule MyModule do + @doc "Hello world" + def hello do + "world" + end - defmodule M do - @doc "Hello world" - def hello do - "world" - end + @doc """ + Sums `a` to `b`. + """ + def sum(a, b) do + a + b + end + end - @doc """ - Sum. - """ - def sum(a, b) do - a + b - end - end + ### `@dialyzer` - * `@file` + Defines warnings to request or suppress when using a version of + `:dialyzer` that supports module attributes. - Change the filename used in stacktraces for the function or macro that - follows the attribute. + Accepts an atom, a tuple, or a list of atoms and tuples. For example: - Accepts a string. Can be used more than once. + defmodule MyModule do + @dialyzer {:nowarn_function, my_fun: 1} - ### Example + def my_fun(arg) do + M.not_a_function(arg) + end + end - defmodule M do - @doc "Hello world" - @file "hello.ex" - def hello do - "world" - end - end + For the list of supported warnings, see + [`:dialyzer` module](http://www.erlang.org/doc/man/dialyzer.html). - * `@moduledoc` + Multiple uses of `@dialyzer` will accumulate instead of overriding + previous ones. - Provide documentation for the current module. + ### `@external_resource` - Accepts a string (which is often a heredoc) or `false` where - `@moduledoc false` will make the module invisible to the - documentation extraction tools like ExDoc. + Specifies an external resource for the current module. - ### Example + Sometimes a module embeds information from an external file. This + attribute allows the module to annotate which external resources + have been used. - defmodule M do - @moduledoc """ - A very useful module - """ - end + Tools like Mix may use this information to ensure the module is + recompiled in case any of the external resources change. + ### `@file` - * `@on_definition` + Changes the filename used in stacktraces for the function or macro that + follows the attribute, such as: - A hook that will be invoked when each function or macro in the current - module is defined. Useful when annotating functions. + defmodule MyModule do + @doc "Hello world" + @file "hello.ex" + def hello do + "world" + end + end - Accepts a module or a tuple `{, }`. The function - must take 6 arguments: + ### `@moduledoc` - - the module environment - - kind: `:def`, `:defp`, `:defmacro`, or `:defmacrop` - - function/macro name - - list of expanded arguments - - list of expanded guards - - expanded function body + Provides documentation for the current module. - Note the hook receives the expanded arguments and it is invoked before - the function is stored in the module. So `Module.defines?/2` will return - false for the first clause of every function. + defmodule MyModule do + @moduledoc """ + A very useful module. + """ + end - If the function/macro being defined has multiple clauses, the hook will - be called for each clause. + Accepts a string (often a heredoc) or `false` where + `@moduledoc false` will make the module invisible to + documentation extraction tools like ExDoc. - Unlike other hooks, `@on_definition` will only invoke functions - and never macros. This is because the hook is invoked inside the context - of the function (and nested function definitions are not allowed in - Elixir). + ### `@on_definition` - When just a module is provided, the function is assumed to be - `__on_definition__/6`. + A hook that will be invoked when each function or macro in the current + module is defined. Useful when annotating functions. - ### Example + Accepts a module or a `{module, function_name}` tuple. See the + "Compile callbacks" section below. - defmodule H do - def on_def(_env, kind, name, args, guards, body) do - IO.puts "Defining #{kind} named #{name} with args:" - IO.inspect args - IO.puts "and guards" - IO.inspect guards - IO.puts "and body" - IO.puts Macro.to_string(body) - end - end + ### `@on_load` - defmodule M do - @on_definition {H, :on_def} + A hook that will be invoked whenever the module is loaded. - def hello(arg) when is_binary(arg) or is_list(arg) do - "Hello" <> to_string(arg) - end + Accepts the function name (as an atom) of a function in the current module or + `{function_name, 0}` tuple where `function_name` is the name of a function in + the current module. The function must have arity 0 (no arguments) and has to + return `:ok`, otherwise the loading of the module will be aborted. For + example: - def hello(_) do - :ok - end - end + defmodule MyModule do + @on_load :load_check - * `@on_load` + def load_check do + if some_condition() do + :ok + else + :abort + end + end - A hook that will be invoked whenever the module is loaded. + def some_condition do + false + end + end - Accepts a function atom of a function in the current module. The function - must have arity 0 (no arguments) and has to return `:ok`, otherwise the - loading of the module will be aborted. + ### `@vsn` - ### Example + Specify the module version. Accepts any valid Elixir value, for example: - defmodule M do - @on_load :load_check + defmodule MyModule do + @vsn "1.0" + end - def load_check do - if some_condition() do - :ok - else - nil - end - end + ### Typespec attributes - def some_condition do - false - end - end + The following attributes are part of typespecs and are also reserved by + Elixir: - * `@vsn` + * `@type` - defines a type to be used in `@spec` + * `@typep` - defines a private type to be used in `@spec` + * `@opaque` - defines an opaque type to be used in `@spec` + * `@spec` - provides a specification for a function + * `@callback` - provides a specification for a behaviour callback + * `@macrocallback` - provides a specification for a macro behaviour callback + * `@optional_callbacks` - specifies which behaviour callbacks and macro + behaviour callbacks are optional + * `@impl` - declares an implementation of a callback function or macro - Specify the module version. Accepts any valid Elixir value. + ### Custom attributes - ### Example + In addition to the built-in attributes outlined above, custom attributes may + also be added. A custom attribute is any valid identifier prefixed with an + `@` and followed by a valid Elixir value: - defmodule M do - @vsn "1.0" - end + defmodule MyModule do + @custom_attr [some: "stuff"] + end - * `@external_resource` + For more advanced options available when defining custom attributes, see + `register_attribute/3`. - Specify an external resource to the current module. + ## Compile callbacks - Many times a module embeds information from an external file. This - attribute allows the module to annotate which external resources - have been used. + There are three callbacks that are invoked when functions are defined, + as well as before and immediately after the module bytecode is generated. - Tools like Mix may use this information to ensure the module is - recompiled in case any of the external resources change. + ### `@after_compile` - The following attributes are part of typespecs and are also reserved by - Elixir (see `Kernel.Typespec` for more information about typespecs): + A hook that will be invoked right after the current module is compiled. - * `@type` - defines a type to be used in `@spec` - * `@typep` - defines a private type to be used in `@spec` - * `@opaque` - defines an opaque type to be used in `@spec` - * `@spec` - provides a specification for a function - * `@callback` - provides a specification for the behaviour callback + Accepts a module or a `{module, function_name}` tuple. The function + must take two arguments: the module environment and its bytecode. + When just a module is provided, the function is assumed to be + `__after_compile__/2`. - In addition to the built-in attributes outlined above, custom attributes may - also be added. A custom attribute is any valid identifier prefixed with an - `@` and followed by a valid Elixir value: + #### Example - defmodule M do - @custom_attr [some: "stuff"] + defmodule MyModule do + @after_compile __MODULE__ + + def __after_compile__(env, _bytecode) do + IO.inspect env end + end - For more advanced options available when defining custom attributes, see - `register_attribute/3`. + ### `@before_compile` + + A hook that will be invoked before the module is compiled. + + Accepts a module or a `{module, function_or_macro_name}` tuple. The + function/macro must take one argument: the module environment. If it's a + macro, its returned value will be injected at the end of the module definition + before the compilation starts. + + When just a module is provided, the function/macro is assumed to be + `__before_compile__/1`. + + *Note*: unlike `@after_compile`, the callback function/macro must + be placed in a separate module (because when the callback is invoked, + the current module does not yet exist). + + #### Example + + defmodule A do + defmacro __before_compile__(_env) do + quote do + def hello, do: "world" + end + end + end + + defmodule B do + @before_compile A + end + + B.hello() + #=> "world" + + ### `@on_definition` + + A hook that will be invoked when each function or macro in the current + module is defined. Useful when annotating functions. + + Accepts a module or a `{module, function_name}` tuple. The function + must take 6 arguments: + + * the module environment + * the kind of the function/macro: `:def`, `:defp`, `:defmacro`, or `:defmacrop` + * the function/macro name + * the list of quoted arguments + * the list of quoted guards + * the squoted function body + + Note the hook receives the quoted arguments and it is invoked before + the function is stored in the module. So `Module.defines?/2` will return + `false` for the first clause of every function. + + If the function/macro being defined has multiple clauses, the hook will + be called for each clause. + + Unlike other hooks, `@on_definition` will only invoke functions and + never macros. This is to avoid `@on_definition` callbacks from + redefining functions that have just been defined in favor of more + explicit approaches. + + When just a module is provided, the function is assumed to be + `__on_definition__/6`. + + #### Example + + defmodule Hooks do + def on_def(_env, kind, name, args, guards, body) do + IO.puts "Defining #{kind} named #{name} with args:" + IO.inspect args + IO.puts "and guards" + IO.inspect guards + IO.puts "and body" + IO.puts Macro.to_string(body) + end + end + + defmodule MyModule do + @on_definition {Hooks, :on_def} + + def hello(arg) when is_binary(arg) or is_list(arg) do + "Hello" <> to_string(arg) + end + + def hello(_) do + :ok + end + end + + ## Compile options + + The `@compile` attribute accepts different options that are used by both + Elixir and Erlang compilers. Some of the common use cases are documented + below: + + * `@compile :debug_info` - includes `:debug_info` regardless of the + corresponding setting in `Code.compiler_options/1` - ## Runtime information about a module + * `@compile {:debug_info, false}` - disables `:debug_info` regardless + of the corresponding setting in `Code.compiler_options/1` - It is possible to query a module at runtime to find out which functions and - macros it defines, extract its docstrings, etc. See `__info__/1`. + * `@compile {:inline, some_fun: 2, other_fun: 3}` - inlines the given + name/arity pairs + + * `@compile {:autoload, false}` - disables automatic loading of + modules after compilation. Instead, the module will be loaded after + it is dispatched to + + You can see a handful more options used by the Erlang compiler in + the documentation for the [`:compile` module](http://www.erlang.org/doc/man/compile.html). ''' + @typep definition :: {atom, arity} + @typep def_kind :: :def | :defp | :defmacro | :defmacrop + @typep type_kind :: :type | :typep | :opaque + @doc """ Provides runtime information about functions and macros defined by the module, enables docstring extraction, etc. @@ -298,17 +409,20 @@ defmodule Module do * `:module` - module name (`Module == Module.__info__(:module)`) In addition to the above, you may also pass to `__info__/1` any atom supported - by Erlang's `module_info` function which also gets defined for each compiled - module. See http://erlang.org/doc/reference_manual/modules.html#id69430 for - more information. + by `:erlang.module_info/0` which also gets defined for each compiled module. + + For a list of supported attributes and more information, see [Modules – Erlang Reference Manual](http://www.erlang.org/doc/reference_manual/modules.html#id77056). """ def __info__(kind) @doc """ - Check if a module is open, i.e. it is currently being defined - and its attributes and functions can be modified. + Checks if a module is open. + + A module is "open" if it is currently being defined and its attributes and + functions can be modified. """ - def open?(module) do + @spec open?(module) :: boolean + def open?(module) when is_atom(module) do :elixir_module.is_open(module) end @@ -329,28 +443,34 @@ defmodule Module do Foo.sum(1, 2) #=> 3 - For convenience, you can my pass `__ENV__` as argument and - all options will be automatically extracted from the environment: + For convenience, you can pass any `Macro.Env` struct, such + as `__ENV__/0`, as the first argument or as options. Both + the module and all options will be automatically extracted + from the environment: defmodule Foo do contents = quote do: (def sum(a, b), do: a + b) - Module.eval_quoted __MODULE__, contents, [], __ENV__ + Module.eval_quoted __ENV__, contents end Foo.sum(1, 2) #=> 3 + Note that if you pass a `Macro.Env` struct as first argument + while also passing `opts`, they will be merged with `opts` + having precedence. """ - def eval_quoted(module, quoted, binding \\ [], opts \\ []) + @spec eval_quoted(module | Macro.Env.t, Macro.t, list, Keyword.t | Macro.Env.t) :: term + def eval_quoted(module_or_env, quoted, binding \\ [], opts \\ []) - def eval_quoted(%Macro.Env{} = env, quoted, binding, opts) do + def eval_quoted(%Macro.Env{} = env, quoted, binding, opts) when is_list(binding) and is_list(opts) do eval_quoted(env.module, quoted, binding, Keyword.merge(Map.to_list(env), opts)) end - def eval_quoted(module, quoted, binding, %Macro.Env{} = env) do + def eval_quoted(module, quoted, binding, %Macro.Env{} = env) when is_atom(module) and is_list(binding) do eval_quoted(module, quoted, binding, Map.to_list(env)) end - def eval_quoted(module, quoted, binding, opts) do + def eval_quoted(module, quoted, binding, opts) when is_atom(module) and is_list(binding) and is_list(opts) do assert_not_compiled!(:eval_quoted, module) :elixir_def.reset_last(module) {value, binding, _env, _scope} = @@ -378,20 +498,21 @@ defmodule Module do ## Differences from `defmodule` - `Module.create` works similarly to `defmodule` and + `Module.create/3` works similarly to `defmodule` and return the same results. While one could also use `defmodule` to define modules dynamically, this function is preferred when the module body is given by a quoted expression. - Another important distinction is that `Module.create` + Another important distinction is that `Module.create/3` allows you to control the environment variables used when defining the module, while `defmodule` automatically shares the same environment. """ + @spec create(module, Macro.t, Macro.Env.t | Keyword.t) :: {:module, module, binary, term} def create(module, quoted, opts) - def create(module, quoted, %Macro.Env{} = env) do + def create(module, quoted, %Macro.Env{} = env) when is_atom(module) do create(module, quoted, Map.to_list(env)) end @@ -399,6 +520,10 @@ defmodule Module do unless Keyword.has_key?(opts, :file) do raise ArgumentError, "expected :file to be given as option" end + + next = :erlang.unique_integer() + line = Keyword.get(opts, :line, 0) + quoted = :elixir_quote.linify_with_context_counter(line, {module, next}, quoted) :elixir_module.compile(module, quoted, [], :elixir.env_for_eval(opts)) end @@ -432,70 +557,61 @@ defmodule Module do """ @spec concat(binary | atom, binary | atom) :: atom - def concat(left, right) do + def concat(left, right) when (is_binary(left) or is_atom(left)) and (is_binary(right) or is_atom(right)) do :elixir_aliases.concat([left, right]) end @doc """ - Concatenates a list of aliases and returns a new alias only - if the alias was already referenced. If the alias was not - referenced yet, fails with `ArgumentError`. - It handles char lists, binaries and atoms. + Concatenates a list of aliases and returns a new alias only if the alias + was already referenced. + + If the alias was not referenced yet, fails with `ArgumentError`. + It handles charlists, binaries and atoms. ## Examples - iex> Module.safe_concat([Unknown, Module]) + iex> Module.safe_concat([Module, Unknown]) ** (ArgumentError) argument error iex> Module.safe_concat([List, Chars]) List.Chars """ - @spec safe_concat([binary | atom]) :: atom | no_return + @spec safe_concat([binary | atom]) :: atom def safe_concat(list) when is_list(list) do :elixir_aliases.safe_concat(list) end @doc """ - Concatenates two aliases and returns a new alias only - if the alias was already referenced. If the alias was not - referenced yet, fails with `ArgumentError`. - It handles char lists, binaries and atoms. + Concatenates two aliases and returns a new alias only if the alias was + already referenced. + + If the alias was not referenced yet, fails with `ArgumentError`. + It handles charlists, binaries and atoms. ## Examples - iex> Module.safe_concat(Unknown, Module) + iex> Module.safe_concat(Module, Unknown) ** (ArgumentError) argument error iex> Module.safe_concat(List, Chars) List.Chars """ - @spec safe_concat(binary | atom, binary | atom) :: atom | no_return - def safe_concat(left, right) do + @spec safe_concat(binary | atom, binary | atom) :: atom + def safe_concat(left, right) when (is_binary(left) or is_atom(left)) and (is_binary(right) or is_atom(right)) do :elixir_aliases.safe_concat([left, right]) end @doc """ - Gets an anonymous function from the given module, function - and arity. The module and function are not verified to exist. + Attaches documentation to a given function or type. - iex> fun = Module.function(Kernel, :is_atom, 1) - iex> fun.(:hello) - true + It expects the module the function/type belongs to, the line (a non-negative integer), + the kind (`:def`, `:defmacro`, `:type`, `:opaque`), a tuple `{, }`, + the function signature (the signature should be omitted for types) and the documentation, + which should be either a binary or a boolean. - """ - def function(mod, fun, arity) do - :erlang.make_fun(mod, fun, arity) - end - - @doc """ - Attaches documentation to a given function or type. It expects - the module the function/type belongs to, the line (a non negative - integer), the kind (`def` or `defmacro`), a tuple representing - the function and its arity, the function signature (the signature - should be omitted for types) and the documentation, which should - be either a binary or a boolean. + It returns `:ok` or `{:error, :private_doc}`. ## Examples @@ -505,84 +621,133 @@ defmodule Module do end """ - def add_doc(module, line, kind, tuple, signature \\ [], doc) + @spec add_doc(module, non_neg_integer, def_kind | type_kind, definition, list, String.t | boolean | nil) :: + :ok | {:error, :private_doc} + def add_doc(module, line, kind, function_tuple, signature \\ [], doc) - def add_doc(_module, _line, kind, _tuple, _signature, doc) when kind in [:defp, :defmacrop, :typep] do + def add_doc(_module, _line, kind, _function_tuple, _signature, doc) + when kind in [:defp, :defmacrop, :typep] do if doc, do: {:error, :private_doc}, else: :ok end - def add_doc(module, line, kind, tuple, signature, doc) when - kind in [:def, :defmacro, :type, :opaque] and (is_binary(doc) or is_boolean(doc) or doc == nil) do + def add_doc(module, line, kind, function_tuple, signature, doc) + when kind in [:def, :defmacro, :type, :opaque] and + (is_binary(doc) or is_boolean(doc) or doc == nil) do assert_not_compiled!(:add_doc, module) - table = docs_table_for(module) + table = data_table_for(module) - {signature, _} = :lists.mapfoldl fn(x, acc) -> - {simplify_signature(x, acc), acc + 1} - end, 1, signature + signature = simplify_signature(signature) - case :ets.lookup(table, tuple) do + case :ets.lookup(table, {:doc, function_tuple}) do [] -> - :ets.insert(table, {tuple, line, kind, signature, doc}) + :ets.insert(table, {{:doc, function_tuple}, line, kind, signature, doc}) :ok - [{tuple, line, _old_kind, old_sign, old_doc}] -> + [{doc_tuple, line, _old_kind, old_sign, old_doc}] -> :ets.insert(table, { - tuple, + doc_tuple, line, kind, merge_signatures(old_sign, signature, 1), - if(nil?(doc), do: old_doc, else: doc) - }) + if(is_nil(doc), do: old_doc, else: doc) + }) :ok end end # Simplify signatures to be stored in docs - defp simplify_signature({:\\, _, [left, right ]}, i) do - {:\\, [], [simplify_signature(left, i), right]} + defp simplify_signature(signature) do + {signature, acc} = :lists.mapfoldl(&simplify_signature/2, [], signature) + {signature, _} = :lists.mapfoldl(&expand_signature/2, {acc, acc}, signature) + signature end - defp simplify_signature({:%, _, [left, _]}, _i) when is_atom(left) do - last = List.last(String.split(Atom.to_string(left), ".")) - atom = String.to_atom(downcase(last)) - {atom, [], nil} + defp simplify_signature({:\\, _, [left, right]}, acc) do + {left, acc} = simplify_signature(left, acc) + {{:\\, [], [left, right]}, acc} end - defp simplify_signature({:=, _, [_, right]}, i) do - simplify_signature(right, i) + defp simplify_signature({:=, _, [_, right]}, acc) do + simplify_signature(right, acc) end - defp simplify_signature({var, _, atom}, _i) when is_atom(atom) do + defp simplify_signature({var, _, atom}, acc) when is_atom(atom) do case Atom.to_string(var) do - "_" <> rest -> {String.to_atom(rest), [], Elixir} - _ -> {var, [], nil} + "_" <> rest -> {{String.to_atom(rest), [], Elixir}, acc} + _ -> {{var, [], nil}, acc} + end + end + + defp simplify_signature({:%, _, [left, _]}, acc) when is_atom(left) do + module_name = simplify_module_name(left) + autogenerated(acc, module_name) + end + + defp simplify_signature({:%{}, _, _}, acc) do + autogenerated(acc, :map) + end + + defp simplify_signature(other, acc) when is_integer(other), do: autogenerated(acc, :int) + defp simplify_signature(other, acc) when is_boolean(other), do: autogenerated(acc, :bool) + defp simplify_signature(other, acc) when is_atom(other), do: autogenerated(acc, :atom) + defp simplify_signature(other, acc) when is_list(other), do: autogenerated(acc, :list) + defp simplify_signature(other, acc) when is_float(other), do: autogenerated(acc, :float) + defp simplify_signature(other, acc) when is_binary(other), do: autogenerated(acc, :binary) + defp simplify_signature(_, acc), do: autogenerated(acc, :arg) + + defp simplify_module_name(module) when is_atom(module) do + try do + split(module) + rescue + ArgumentError -> module + else + module_name -> String.to_atom(camelcase_to_underscore(List.last(module_name))) end end - defp simplify_signature(other, i) when is_integer(other), do: {:"int#{i}", [], Elixir} - defp simplify_signature(other, i) when is_boolean(other), do: {:"bool#{i}", [], Elixir} - defp simplify_signature(other, i) when is_atom(other), do: {:"atom#{i}", [], Elixir} - defp simplify_signature(other, i) when is_list(other), do: {:"list#{i}", [], Elixir} - defp simplify_signature(other, i) when is_float(other), do: {:"float#{i}", [], Elixir} - defp simplify_signature(other, i) when is_binary(other), do: {:"binary#{i}", [], Elixir} - defp simplify_signature(_, i), do: {:"arg#{i}", [], Elixir} + defp autogenerated(acc, key) do + {key, [key | acc]} + end - defp downcase(<>) when c >= ?A and c <= ?Z do - <> + defp expand_signature(key, {all_keys, acc}) when is_atom(key) do + case previous_values(key, all_keys, acc) do + {i, acc} -> {{:"#{key}#{i}", [], Elixir}, {all_keys, acc}} + :none -> {{key, [], Elixir}, {all_keys, acc}} + end end - defp downcase(<>) do - <> + defp expand_signature(term, {_, _} = acc) do + {term, acc} + end + + defp previous_values(key, all_keys, acc) do + total_occurrences = occurrences(key, all_keys) + + if total_occurrences == 1 do + :none + else + index = total_occurrences - occurrences(key, acc) + 1 + {index, :lists.delete(key, acc)} + end end - defp downcase(<<>>) do - <<>> + defp occurrences(key, list) do + length(:lists.filter(fn(el) -> el == key end, list)) end + defp camelcase_to_underscore(<>) when c >= ?A and c <= ?Z, + do: do_camelcase_to_underscore(rest, <>) + defp do_camelcase_to_underscore(<>, acc) when c >= ?A and c <= ?Z, + do: do_camelcase_to_underscore(rest, <>) + defp do_camelcase_to_underscore(<>, acc), + do: do_camelcase_to_underscore(rest, <>) + defp do_camelcase_to_underscore(<<>>, acc), + do: acc + # Merge - defp merge_signatures([h1|t1], [h2|t2], i) do - [merge_signature(h1, h2, i)|merge_signatures(t1, t2, i + 1)] + defp merge_signatures([h1 | t1], [h2 | t2], i) do + [merge_signature(h1, h2, i) | merge_signatures(t1, t2, i + 1)] end defp merge_signatures([], [], _) do @@ -609,8 +774,12 @@ defmodule Module do @doc """ Checks if the module defines the given function or macro. + Use `defines?/3` to assert for a specific type. + This function can only be used on modules that have not yet been compiled. + Use `Kernel.function_exported?/3` to check compiled modules. + ## Examples defmodule Example do @@ -620,16 +789,23 @@ defmodule Module do end """ - def defines?(module, tuple) when is_tuple(tuple) do + @spec defines?(module, definition) :: boolean + def defines?(module, {function_or_macro_name, arity} = tuple) + when is_atom(module) and is_atom(function_or_macro_name) and + is_integer(arity) and arity >= 0 and arity <= 255 do assert_not_compiled!(:defines?, module) - table = function_table_for(module) - :ets.lookup(table, tuple) != [] + table = defs_table_for(module) + :ets.lookup(table, {:def, tuple}) != [] end @doc """ Checks if the module defines a function or macro of the - given `kind`. `kind` can be any of `:def`, `:defp`, - `:defmacro` or `:defmacrop`. + given `kind`. + + `kind` can be any of `:def`, `:defp`, `:defmacro`, or `:defmacrop`. + + This function can only be used on modules that have not yet been compiled. + Use `Kernel.function_exported?/3` to check compiled modules. ## Examples @@ -640,30 +816,35 @@ defmodule Module do end """ - def defines?(module, tuple, kind) do + @spec defines?(module, definition, def_kind) :: boolean + def defines?(module, {function_macro_name, arity} = tuple, def_kind) + when is_atom(module) and is_atom(function_macro_name) and + is_integer(arity) and arity >= 0 and arity <= 255 and + def_kind in [:def, :defp, :defmacro, :defmacrop] do assert_not_compiled!(:defines?, module) - table = function_table_for(module) - case :ets.lookup(table, tuple) do - [{_, ^kind, _, _, _, _, _}] -> true + table = defs_table_for(module) + case :ets.lookup(table, {:def, tuple}) do + [{_, ^def_kind, _, _, _, _}] -> true _ -> false end end @doc """ - Return all functions defined in `module`. + Returns all functions defined in `module`. ## Examples defmodule Example do def version, do: 1 - Module.definitions_in __MODULE__ #=> [{:version,0}] + Module.definitions_in __MODULE__ #=> [{:version, 0}] end """ - def definitions_in(module) do + @spec definitions_in(module) :: [definition] + def definitions_in(module) when is_atom(module) do assert_not_compiled!(:definitions_in, module) - table = function_table_for(module) - for {tuple, _, _, _, _, _, _} <- :ets.tab2list(table), do: tuple + table = defs_table_for(module) + :lists.concat :ets.match(table, {{:def, :'$1'}, :_, :_, :_, :_, :_}) end @doc """ @@ -674,61 +855,129 @@ defmodule Module do defmodule Example do def version, do: 1 - Module.definitions_in __MODULE__, :def #=> [{:version,0}] + Module.definitions_in __MODULE__, :def #=> [{:version, 0}] Module.definitions_in __MODULE__, :defp #=> [] end """ - def definitions_in(module, kind) do + @spec definitions_in(module, def_kind) :: [definition] + def definitions_in(module, def_kind) + when is_atom(module) and def_kind in [:def, :defp, :defmacro, :defmacrop] do assert_not_compiled!(:definitions_in, module) - table = function_table_for(module) - for {tuple, stored_kind, _, _, _, _, _} <- :ets.tab2list(table), stored_kind == kind, do: tuple + table = defs_table_for(module) + :lists.concat :ets.match(table, {{:def, :'$1'}, def_kind, :_, :_, :_, :_}) end @doc """ Makes the given functions in `module` overridable. + An overridable function is lazily defined, allowing a developer to customize it. See `Kernel.defoverridable/1` for more information and documentation. """ - def make_overridable(module, tuples) do + @spec make_overridable(module, [definition]) :: :ok + def make_overridable(module, tuples) when is_atom(module) and is_list(tuples) do assert_not_compiled!(:make_overridable, module) - for tuple <- tuples do - case :elixir_def.lookup_definition(module, tuple) do - false -> - {name, arity} = tuple - raise "Cannot make function #{name}/#{arity} overridable because it was not defined" - clause -> - :elixir_def.delete_definition(module, tuple) + check_impls_for_overridable(module, tuples) + + :lists.foreach(fn + {function_name, arity} = tuple when is_atom(function_name) and is_integer(arity) and arity >= 0 and arity <= 255 -> + case :elixir_def.take_definition(module, tuple) do + false -> + raise ArgumentError, + "cannot make function #{function_name}/#{arity} overridable because it was not defined" + clause -> + neighbours = + if :elixir_compiler.get_opt(:internal) do + [] + else + Module.LocalsTracker.yank(module, tuple) + end - neighbours = if loaded?(Module.LocalsTracker) do - Module.LocalsTracker.yank(module, tuple) - else - [] - end + old = :elixir_overridable.overridable(module) + count = case :maps.find(tuple, old) do + {:ok, {count, _, _, _}} -> count + 1 + :error -> 1 + end + new = :maps.put(tuple, {count, clause, neighbours, false}, old) + :elixir_overridable.overridable(module, new) + end - old = get_attribute(module, :__overridable) - merged = :orddict.update(tuple, fn({count, _, _, _}) -> - {count + 1, clause, neighbours, false} - end, {1, clause, neighbours, false}, old) + other -> + raise ArgumentError, + "each element in tuple list has to be a {function_name :: atom, arity :: 0..255} tuple, got: #{inspect(other)}" + end, tuples) + end - put_attribute(module, :__overridable, merged) + @spec make_overridable(module, module) :: :ok + def make_overridable(module, behaviour) when is_atom(module) and is_atom(behaviour) do + case check_module_for_overridable(module, behaviour) do + :ok -> :ok + {:error, error_explanation} -> + raise ArgumentError, "cannot pass module #{inspect(behaviour)} as argument to defoverridable/1 because #{error_explanation}" + end + + behaviour_callbacks = + for callback <- behaviour.behaviour_info(:callbacks) do + {pair, _kind} = normalize_macro_or_function_callback(callback) + pair end + + tuples = for function_tuple <- definitions_in(module), + function_tuple in behaviour_callbacks, + do: function_tuple + + make_overridable(module, tuples) + end + + defp check_impls_for_overridable(module, tuples) do + table = data_table_for(module) + impls = :ets.lookup_element(table, {:elixir, :impls}, 2) + {overridable_impls, impls} = :lists.splitwith(fn {pair, _, _, _, _} -> pair in tuples end, impls) + + if overridable_impls != [] do + :ets.insert(table, {{:elixir, :impls}, impls}) + behaviours = :ets.lookup_element(table, :behaviour, 2) + check_impls(behaviours, overridable_impls) + end + end + + defp check_module_for_overridable(module, behaviour) do + behaviour_definitions = :ets.lookup_element(data_table_for(module), :behaviour, 2) + + cond do + not Code.ensure_compiled?(behaviour) -> + {:error, "it was not defined"} + not function_exported?(behaviour, :behaviour_info, 1) -> + {:error, "it does not define any callbacks"} + behaviour not in behaviour_definitions -> + {:error, "its corresponding behaviour is missing. Did you forget to " <> + "add @behaviour #{inspect(behaviour)}?"} + true -> + :ok + end + end + + defp normalize_macro_or_function_callback({function_name, arity}) do + case :erlang.atom_to_list(function_name) do + # Macros are always provided one extra argument in behaviour_info + 'MACRO-' ++ tail -> {{:erlang.list_to_atom(tail), arity - 1}, :defmacro} + _ -> {{function_name, arity}, :def} end end @doc """ Returns `true` if `tuple` in `module` is marked as overridable. """ - def overridable?(module, tuple) do - !!List.keyfind(get_attribute(module, :__overridable), tuple, 0) + @spec overridable?(module, definition) :: boolean + def overridable?(module, {function_name, arity} = tuple) + when is_atom(function_name) and is_integer(arity) and arity >= 0 and arity <= 255 do + :maps.is_key(tuple, :elixir_overridable.overridable(module)) end @doc """ - Puts an Erlang attribute to the given module with the given - key and value. The semantics of putting the attribute depends - if the attribute was registered or not via `register_attribute/3`. + Puts a module attribute with `key` and `value` in the given `module`. ## Examples @@ -737,44 +986,27 @@ defmodule Module do end """ - def put_attribute(module, key, value) when is_atom(key) do - assert_not_compiled!(:put_attribute, module) - table = data_table_for(module) - value = normalize_attribute(key, value) - acc = :ets.lookup_element(table, :__acc_attributes, 2) - - new = - if :lists.member(key, acc) do - case :ets.lookup(table, key) do - [{^key, old}] -> [value|old] - [] -> [value] - end - else - value - end - - :ets.insert(table, {key, new}) + @spec put_attribute(module, atom, term) :: :ok + def put_attribute(module, key, value) when is_atom(module) and is_atom(key) do + put_attribute(module, key, value, nil, nil) end @doc """ - Gets the given attribute from a module. If the attribute - was marked with `accumulate` with `Module.register_attribute/3`, - a list is always returned. + Gets the given attribute from a module. + + If the attribute was marked with `accumulate` with + `Module.register_attribute/3`, a list is always returned. + `nil` is returned if the attribute has not been marked with + `accumulate` and has not been set to any value. The `@` macro compiles to a call to this function. For example, the following code: @foo - Expands to: - - Module.get_attribute(__MODULE__, :foo, true) + Expands to something akin to: - Notice the third argument may be given to indicate a stacktrace - to be emitted when the attribute was not previously defined. - The default value for `warn` is nil for direct calls but the `@foo` - macro sets it to the proper stacktrace automatically, warning - every time `@foo` is used but not set previously. + Module.get_attribute(__MODULE__, :foo) ## Examples @@ -788,41 +1020,15 @@ defmodule Module do end """ - @spec get_attribute(module, atom, warn :: nil | [tuple]) :: term - def get_attribute(module, key, warn \\ nil) when - is_atom(key) and (is_list(warn) or nil?(warn)) do - assert_not_compiled!(:get_attribute, module) - table = data_table_for(module) - - case :ets.lookup(table, key) do - [{^key, val}] -> val - [] -> - acc = :ets.lookup_element(table, :__acc_attributes, 2) - - cond do - :lists.member(key, acc) -> - [] - is_list(warn) -> - :elixir_errors.warn warn_info(warn), "undefined module attribute @#{key}, " <> - "please remove access to @#{key} or explicitly set it to nil before access\n" - nil - true -> - nil - end - end - end - - defp warn_info([entry|_]) do - opts = elem(entry, tuple_size(entry) - 1) - Exception.format_file_line(Keyword.get(opts, :file), Keyword.get(opts, :line)) <> " " - end - - defp warn_info([]) do - "" + @spec get_attribute(module, atom) :: term + def get_attribute(module, key) when is_atom(module) and is_atom(key) do + get_attribute(module, key, nil) end @doc """ - Deletes all attributes that match the given key. + Deletes the module attribute that matches the given key. + + It returns the deleted attribute value (or `nil` if nothing was set). ## Examples @@ -832,16 +1038,26 @@ defmodule Module do end """ - def delete_attribute(module, key) when is_atom(key) do + @spec delete_attribute(module, atom) :: term + def delete_attribute(module, key) when is_atom(module) and is_atom(key) do assert_not_compiled!(:delete_attribute, module) table = data_table_for(module) - :ets.delete(table, key) + case :ets.take(table, key) do + [{_, value, _accumulated? = true, _}] -> + :ets.insert(table, {key, [], true, nil}) + value + [{_, value, _, _}] -> + value + [] -> + nil + end end @doc """ - Registers an attribute. By registering an attribute, a developer - is able to customize how Elixir will store and accumulate the - attribute values. + Registers an attribute. + + By registering an attribute, a developer is able to customize + how Elixir will store and accumulate the attribute values. ## Options @@ -869,116 +1085,378 @@ defmodule Module do end """ - def register_attribute(module, new, opts) when is_atom(new) do + @spec register_attribute(module, atom, [{:accumulate, boolean}, {:persist, boolean}]) :: :ok + def register_attribute(module, attribute, options) when is_atom(module) and is_atom(attribute) and is_list(options) do assert_not_compiled!(:register_attribute, module) table = data_table_for(module) - if Keyword.get(opts, :persist) do - old = :ets.lookup_element(table, :__persisted_attributes, 2) - :ets.insert(table, {:__persisted_attributes, [new|old]}) + if Keyword.get(options, :persist) do + old_attribute = :ets.lookup_element(table, {:elixir, :persisted_attributes}, 2) + :ets.insert(table, {{:elixir, :persisted_attributes}, [attribute | old_attribute]}) end - if Keyword.get(opts, :accumulate) do - old = :ets.lookup_element(table, :__acc_attributes, 2) - :ets.insert(table, {:__acc_attributes, [new|old]}) + if Keyword.get(options, :accumulate) do + :ets.insert_new(table, {attribute, [], _accumulated? = true, _unread_line = nil}) || + :ets.update_element(table, attribute, {3, true}) end + + :ok end @doc """ - Split the given module name into binary parts. + Splits the given module name into binary parts. + + `module` has to be an Elixir module, as `split/1` won't work with Erlang-style + modules (for example, `split(:lists)` raises an error). + + `split/1` also supports splitting the string representation of Elixir modules + (that is, the result of calling `Atom.to_string/1` with the module name). ## Examples - Module.split Very.Long.Module.Name.And.Even.Longer - #=> ["Very", "Long", "Module", "Name", "And", "Even", "Longer"] + iex> Module.split(Very.Long.Module.Name.And.Even.Longer) + ["Very", "Long", "Module", "Name", "And", "Even", "Longer"] + iex> Module.split("Elixir.String.Chars") + ["String", "Chars"] """ - def split(module) do - tl(String.split(String.Chars.to_string(module), ".")) + @spec split(module | String.t) :: [String.t, ...] + def split(module) + + def split(module) when is_atom(module) do + split(Atom.to_string(module), _original = module) + end + + def split(module) when is_binary(module) do + split(module, _original = module) + end + + defp split("Elixir." <> name, _original) do + String.split(name, ".") + end + + defp split(_module, original) do + raise ArgumentError, "expected an Elixir module, got: #{inspect(original)}" end @doc false - # Used internally to compile documentation. This function - # is private and must be used only internally. + # Used internally to compile documentation. + # This function is private and must be used only internally. def compile_doc(env, kind, name, args, _guards, _body) do module = env.module - line = env.line + table = data_table_for(module) arity = length(args) pair = {name, arity} - doc = get_attribute(module, :doc) + + {line, doc} = get_doc_info(table, env) + + # Arguments are not expanded for the docs, but we make an exception for + # module attributes and for structs (aliases to be precise). + args = Macro.prewalk args, fn + {:@, _, _} = attr -> + Macro.expand_once(attr, env) + {:%, meta, [aliases, fields]} -> + {:%, meta, [Macro.expand_once(aliases, env), fields]} + x -> + x + end case add_doc(module, line, kind, pair, args, doc) do :ok -> :ok {:error, :private_doc} -> - :elixir_errors.warn line, env.file, "function #{name}/#{arity} is private, @doc's are always discarded for private functions\n" + :elixir_errors.warn line, env.file, + "#{kind} #{name}/#{arity} is private, " <> + "@doc's are always discarded for private functions/macros/types" + end + + :ok + end + + @doc false + # Used internally to check the validity of arguments to @impl. + # This function is private and must be used only internally. + def compile_impl(env, kind, name, args, _guards, _body) do + %{module: module, line: line, file: file} = env + table = data_table_for(module) + + case :ets.take(table, :impl) do + [{:impl, value, _, _}] -> + impls = :ets.lookup_element(table, {:elixir, :impls}, 2) + impl = {{name, length(args)}, kind, line, file, value} + :ets.insert(table, {{:elixir, :impls}, [impl | impls]}) + [] -> + :ok + end + + :ok + end + + @doc false + def check_behaviours_and_impls(env, table, all_definitions, overridable_pairs) do + behaviours = :ets.lookup_element(table, :behaviour, 2) + impls = :ets.lookup_element(table, {:elixir, :impls}, 2) + + if impls != [] do + non_implemented_callbacks = check_impls(behaviours, impls) + warn_missing_impls(env, non_implemented_callbacks, all_definitions, overridable_pairs) end + end + + defp check_impls(behaviours, impls) do + callbacks = callbacks_with_behaviour(behaviours) + + Enum.reduce(impls, callbacks, fn {pair, kind, line, file, value}, acc -> + case impl_warn(pair, kind, value, behaviours, callbacks) do + :ok -> :ok + {:error, message} -> :elixir_errors.warn(line, file, message) + end + Map.delete(acc, {pair, kind}) + end) + end - delete_attribute(module, :doc) + defp callbacks_with_behaviour(behaviours) do + for behaviour <- behaviours, + function_exported?(behaviour, :behaviour_info, 1), + callback <- behaviour.behaviour_info(:callbacks), + do: {normalize_macro_or_function_callback(callback), behaviour}, + into: %{} + end + + defp impl_warn(pair, kind, _, _, _) when kind in [:defp, :defmacrop] do + {:error, "#{format_kind_pair(kind, pair)} is private, @impl is always discarded for private functions/macros"} + end + defp impl_warn(pair, kind, value, [], _callbacks) do + {:error, "got @impl #{inspect value} for #{format_kind_pair(kind, pair)} but no behaviour was declared"} + end + defp impl_warn(pair, kind, false, _behaviours, callbacks) do + if behaviour = Map.get(callbacks, {pair, kind}) do + message = "got @impl false for #{format_kind_pair(kind, pair)} " <> + "but it is a callback specified in #{inspect(behaviour)}" + {:error, message} + else + :ok + end + end + defp impl_warn(pair, kind, true, _, callbacks) do + if Map.has_key?(callbacks, {pair, kind}) do + :ok + else + message = "got @impl true for #{format_kind_pair(kind, pair)} " <> + "but no behaviour specifies this callback#{known_callbacks(callbacks)}" + {:error, message} + end + end + defp impl_warn(pair, kind, behaviour, behaviours, callbacks) do + cond do + Map.get(callbacks, {pair, kind}) == behaviour -> + :ok + behaviour not in behaviours -> + message = "got @impl #{inspect behaviour} for #{format_kind_pair(kind, pair)} " <> + "but the given behaviour was not declared with @behaviour" + {:error, message} + true -> + message = "got @impl #{inspect behaviour} for #{format_kind_pair(kind, pair)} " <> + "but the behaviour does not specify this callback#{known_callbacks(callbacks)}" + {:error, message} + end + end + + defp warn_missing_impls(_env, callbacks, _defs, _) when map_size(callbacks) == 0 do + :ok + end + defp warn_missing_impls(env, non_implemented_callbacks, defs, overridable_pairs) do + for {pair, kind, meta, _clauses} <- defs, + kind in [:def, :defmacro], + pair not in overridable_pairs, + behaviour = Map.get(non_implemented_callbacks, {pair, kind}) do + message = "module attribute @impl was not set for callback " <> + "#{format_kind_pair(kind, pair)} (callback specified in #{inspect behaviour}). " <> + "This either means you forgot to add the \"@impl true\" annotation before the " <> + "definition or that you are accidentally overriding a callback" + :elixir_errors.warn(:elixir_utils.get_line(meta), env.file, message) + end + + :ok + end + + defp format_kind_pair(kind, {name, arity}) do + "#{kind} #{name}/#{arity}" + end + + defp known_callbacks(callbacks) when map_size(callbacks) == 0 do + ". There are no known callbacks, please specify the proper @behaviour " <> + "and make sure they define callbacks" + end + defp known_callbacks(callbacks) do + formatted = for {{{name, arity}, kind}, module} <- callbacks do + "\n * " <> Exception.format_mfa(module, name, arity) <> "(#{kind})" + end + ". The known callbacks are:\n#{formatted}" end @doc false - # Used internally to compile types. This function - # is private and must be used only internally. - def store_typespec(module, key, value) when is_atom(key) do + # Used internally to compile types. + # This function is private and must be used only internally. + def store_typespec(module, key, value) when is_atom(module) and is_atom(key) do assert_not_compiled!(:put_attribute, module) table = data_table_for(module) new = case :ets.lookup(table, key) do - [{^key, old}] -> [value|old] + [{^key, old, _, _}] -> [value | old] [] -> [value] end - :ets.insert(table, {key, new}) + :ets.insert(table, {key, new, true, nil}) + end + + @doc false + # Used internally by Kernel's @. + # This function is private and must be used only internally. + def get_attribute(module, key, stack) when is_atom(key) do + assert_not_compiled!(:get_attribute, module) + table = data_table_for(module) + + case :ets.lookup(table, key) do + [{^key, val, _, _}] -> + :ets.update_element(table, key, {4, nil}) + val + [] when is_list(stack) -> + # TODO: Consider raising instead of warning on v2.0 as it usually cascades + IO.warn "undefined module attribute @#{key}, " <> + "please remove access to @#{key} or explicitly set it before access", stack + nil + [] -> + nil + end + end + + @doc false + # Used internally by Kernel's @. + # This function is private and must be used only internally. + def put_attribute(module, key, value, stack, unread_line) when is_atom(key) do + assert_not_compiled!(:put_attribute, module) + table = data_table_for(module) + value = preprocess_attribute(key, value) + + # TODO: Remove on Elixir v2.0 + case value do + {:parse_transform, _} when key == :compile and is_list(stack) -> + IO.warn "@compile {:parse_transform, _} is deprecated. " <> + "Elixir will no longer support Erlang-based transforms in future versions", stack + _ -> + :ok + end + + case :ets.lookup(table, key) do + [{^key, {line, <<_::binary>>}, accumulated?, _unread_line}] + when key in [:doc, :typedoc, :moduledoc] and is_list(stack) -> + IO.warn "redefining @#{key} attribute previously set at line #{line}", stack + :ets.insert(table, {key, value, accumulated?, unread_line}) + + [{^key, current, _accumulated? = true, _read?}] -> + :ets.insert(table, {key, [value | current], true, unread_line}) + + _ -> + :ets.insert(table, {key, value, false, unread_line}) + end + + :ok end ## Helpers - defp normalize_attribute(:on_load, atom) when is_atom(atom) do - {atom, 0} + defp preprocess_attribute(key, value) when key in [:moduledoc, :typedoc, :doc] do + case value do + {line, doc} when is_integer(line) and (is_binary(doc) or is_boolean(doc) or is_nil(doc)) -> + value + {line, doc} when is_integer(line) -> + raise ArgumentError, + "expected the #{key} attribute to contain a binary, a boolean, or nil, got: #{inspect(doc)}" + _other -> + raise ArgumentError, + "expected the #{key} attribute to be {line, doc} (where \"doc\" is " <> + "a binary, a boolean, or nil), got: #{inspect(value)}" + end + end + + defp preprocess_attribute(:on_load, value) do + case value do + atom when is_atom(atom) -> + {atom, 0} + {atom, 0} = tuple when is_atom(atom) -> + tuple + other -> + raise ArgumentError, + "expected the @on_load attribute to be an atom or a " <> + "{atom, 0} tuple, got: #{inspect(other)}" + end end - defp normalize_attribute(:behaviour, atom) when is_atom(atom) do - Code.ensure_compiled(atom) + defp preprocess_attribute(:impl, value) do + case value do + bool when is_boolean(bool) -> + value + module when is_atom(module) and module != nil -> + # Attempt to compile behaviour but ignore failure (will warn later) + _ = Code.ensure_compiled(module) + value + other -> + raise ArgumentError, + "expected impl attribute to contain a module or a boolean, got: #{inspect(other)}" + end + end + + defp preprocess_attribute(:behaviour, atom) when is_atom(atom) do + # Attempt to compile behaviour but ignore failure (will warn later) + _ = Code.ensure_compiled(atom) atom end - defp normalize_attribute(:file, file) when is_binary(file) do + defp preprocess_attribute(:file, file) when is_binary(file) do file end - defp normalize_attribute(key, atom) when is_atom(atom) and - key in [:before_compile, :after_compile, :on_definition] do - {atom, :"__#{key}__"} - end + defp preprocess_attribute(:before_compile, atom) when is_atom(atom), + do: {atom, :__before_compile__} + defp preprocess_attribute(:after_compile, atom) when is_atom(atom), + do: {atom, :__after_compile__} + defp preprocess_attribute(:on_definition, atom) when is_atom(atom), + do: {atom, :__on_definition__} - defp normalize_attribute(key, _value) when key in [:type, :typep, :export_type, :opaque, :callback] do - raise ArgumentError, "attributes type, typep, export_type, opaque and callback " <> - "must be set via Kernel.Typespec" + defp preprocess_attribute(key, _value) when key in [:type, :typep, :export_type, :opaque, :callback, :macrocallback] do + raise ArgumentError, "attributes type, typep, export_type, opaque, callback, and macrocallback" <> + "must be set directly via the @ notation" end - defp normalize_attribute(_key, value) do + defp preprocess_attribute(_key, value) do value end - defp data_table_for(module) do - module + defp get_doc_info(table, env) do + case :ets.take(table, :doc) do + [{:doc, {_, _} = pair, _, _}] -> + pair + [] -> + case :ets.lookup(table, :impl) do + [{:impl, value, _, _}] when value != false -> {env.line, false} + _ -> {env.line, nil} + end + end end - defp function_table_for(module) do - :elixir_def.table(module) + defp data_table_for(module) do + :elixir_module.data_table(module) end - defp docs_table_for(module) do - :elixir_module.docs_table(module) + defp defs_table_for(module) do + :elixir_module.defs_table(module) end defp assert_not_compiled!(fun, module) do open?(module) || raise ArgumentError, - "could not call #{fun} on module #{inspect module} because it was already compiled" + "could not call #{fun} with argument #{inspect module} because the module is already compiled" end - - defp loaded?(module), do: is_tuple :code.is_loaded(module) end diff --git a/lib/elixir/lib/module/locals_tracker.ex b/lib/elixir/lib/module/locals_tracker.ex index 2a2da62ad8c..e770677cde1 100644 --- a/lib/elixir/lib/module/locals_tracker.ex +++ b/lib/elixir/lib/module/locals_tracker.ex @@ -1,15 +1,15 @@ -# This is a module Elixir responsible for tracking +# This is an Elixir module responsible for tracking # calls in order to extract Elixir modules' behaviour # during compilation time. # # ## Implementation # # The implementation uses the digraph module to track -# all dependencies. The graph starts with one main vertice: +# all dependencies. The graph starts with one main vertex: # # * `:local` - points to local functions # -# We also have can the following vertices: +# We can also have the following vertices: # # * `Module` - a module that was invoked via an import # * `{name, arity}` - a local function/arity pair @@ -30,7 +30,7 @@ # * out neighbours: `Module` # # Note that since this is required for bootstrap, we can't use -# any of the `GenServer.Behaviour` conveniences. +# any of the `GenServer` conveniences. defmodule Module.LocalsTracker do @moduledoc false @@ -65,32 +65,37 @@ defmodule Module.LocalsTracker do """ @spec reachable(ref) :: [local] def reachable(ref) do - d = :gen_server.call(to_pid(ref), :digraph, @timeout) - reduce_reachable(d, :local, []) + ref + |> to_pid() + |> :gen_server.call(:digraph, @timeout) + |> reachable_from(:local) + |> :sets.to_list() + end + + defp reachable_from(d, starting) do + reduce_reachable(d, starting, :sets.new) end defp reduce_reachable(d, vertex, vertices) do neighbours = :digraph.out_neighbours(d, vertex) - neighbours = (for {_, _} = t <- neighbours, do: t) |> :ordsets.from_list - remaining = :ordsets.subtract(neighbours, vertices) - vertices = :ordsets.union(neighbours, vertices) - :lists.foldl(&reduce_reachable(d, &1, &2), vertices, remaining) + neighbours = (for {_, _} = t <- neighbours, do: t) |> :sets.from_list + remaining = :sets.subtract(neighbours, vertices) + vertices = :sets.union(neighbours, vertices) + :sets.fold(&reduce_reachable(d, &1, &2), vertices, remaining) end defp to_pid(pid) when is_pid(pid), do: pid defp to_pid(mod) when is_atom(mod) do table = :elixir_module.data_table(mod) - [{_, val}] = :ets.lookup(table, :__locals_tracker) - val + :ets.lookup_element(table, {:elixir, :locals_tracker}, 2) end # Internal API - # Starts the tracker and returns its pid. + # Starts the tracker and returns its PID. @doc false def start_link do - {:ok, pid} = :gen_server.start_link(__MODULE__, [], []) - pid + :gen_server.start_link(__MODULE__, [], []) end # Adds a definition into the tracker. A public @@ -119,7 +124,7 @@ defmodule Module.LocalsTracker do :gen_server.cast(pid, {:add_local, from, to}) end - # Adds a import dispatch to the given target. + # Adds an import dispatch to the given target. @doc false def add_import(pid, function, module, target) when is_atom(module) and is_tuple(target) do :gen_server.cast(pid, {:add_import, function, module, target}) @@ -134,9 +139,7 @@ defmodule Module.LocalsTracker do # Reattach a previously yanked node @doc false def reattach(pid, kind, tuple, neighbours) do - pid = to_pid(pid) - add_definition(pid, kind, tuple) - :gen_server.cast(pid, {:reattach, tuple, neighbours}) + :gen_server.cast(to_pid(pid), {:reattach, kind, tuple, neighbours}) end # Collecting all conflicting imports with the given functions @@ -144,11 +147,11 @@ defmodule Module.LocalsTracker do def collect_imports_conflicts(pid, all_defined) do d = :gen_server.call(pid, :digraph, @timeout) - for {name, arity} <- all_defined, + for {{name, arity}, _, meta, _} <- all_defined, :digraph.in_neighbours(d, {:import, name, arity}) != [], n = :digraph.out_neighbours(d, {:import, name, arity}), n != [] do - {n, name, arity} + {meta, {n, name, arity}} end end @@ -156,36 +159,56 @@ defmodule Module.LocalsTracker do # given also accounting the expected amount of default # clauses a private function have. @doc false - def collect_unused_locals(pid, private) do - reachable = reachable(pid) - :lists.foldl(&collect_unused_locals(&1, &2, reachable), [], private) + def collect_unused_locals(ref, private) do + d = :gen_server.call(to_pid(ref), :digraph, @timeout) + reachable = reachable_from(d, :local) + {unreachable(reachable, private), collect_warnings(reachable, private)} + end + + defp unreachable(reachable, private) do + for {tuple, kind, _, _} <- private, + kind == :defmacrop or not :sets.is_element(tuple, reachable), + do: tuple + end + + defp collect_warnings(reachable, private) do + :lists.foldl(&collect_warnings(&1, &2, reachable), [], private) end - defp collect_unused_locals({tuple, kind, 0}, acc, reachable) do - if :lists.member(tuple, reachable) do + defp collect_warnings({_, _, false, _}, acc, _reachable) do + acc + end + + defp collect_warnings({tuple, kind, meta, 0}, acc, reachable) do + if :sets.is_element(tuple, reachable) do acc else - [{:unused_def, tuple, kind}|acc] + [{meta, {:unused_def, tuple, kind}} | acc] end end - defp collect_unused_locals({tuple, kind, default}, acc, reachable) when default > 0 do + defp collect_warnings({tuple, kind, meta, default}, acc, reachable) when default > 0 do {name, arity} = tuple min = arity - default max = arity - invoked = for {n, a} <- reachable, n == name, a in min..max, do: a + case min_reachable_default(max, min, :none, name, reachable) do + :none -> [{meta, {:unused_def, tuple, kind}} | acc] + ^min -> acc + ^max -> [{meta, {:unused_args, tuple}} | acc] + diff -> [{meta, {:unused_args, tuple, diff}} | acc] + end + end - if invoked == [] do - [{:unused_def, tuple, kind}|acc] - else - case :lists.min(invoked) - min do - 0 -> acc - ^default -> [{:unused_args, tuple}|acc] - unused_args -> [{:unused_args, tuple, unused_args}|acc] - end + defp min_reachable_default(max, min, last, name, reachable) when max >= min do + case :sets.is_element({name, max}, reachable) do + true -> min_reachable_default(max - 1, min, max, name, reachable) + false -> min_reachable_default(max - 1, min, last, name, reachable) end end + defp min_reachable_default(_max, _min, last, _name, _reachable) do + last + end @doc false def cache_env(pid, env) do @@ -211,13 +234,14 @@ defmodule Module.LocalsTracker do {:ok, {d, []}} end + @doc false def handle_call({:cache_env, env}, _from, {d, cache}) do case cache do - [{i,^env}|_] -> + [{i, ^env} | _] -> {:reply, i, {d, cache}} t -> i = length(t) - {:reply, i, {d, [{i,env}|t]}} + {:reply, i, {d, [{i, env} | t]}} end end @@ -227,20 +251,16 @@ defmodule Module.LocalsTracker do end def handle_call({:yank, local}, _from, {d, _} = state) do - in_vertices = :digraph.in_neighbours(d, local) out_vertices = :digraph.out_neighbours(d, local) - :digraph.del_vertex(d, local) - {:reply, {in_vertices, out_vertices}, state} + :digraph.del_edges(d, :digraph.out_edges(d, local)) + {:reply, {[], out_vertices}, state} end def handle_call(:digraph, _from, {d, _} = state) do {:reply, d, state} end - def handle_call(request, _from, state) do - {:stop, {:bad_call, request}, state} - end - + @doc false def handle_info(_msg, state) do {:noreply, state} end @@ -263,14 +283,22 @@ defmodule Module.LocalsTracker do def handle_cast({:add_defaults, kind, {name, arity}, defaults}, {d, _} = state) do for i <- :lists.seq(arity - defaults, arity - 1) do handle_add_definition(d, kind, {name, i}) - handle_add_local(d, {name, i}, {name, i + 1}) + handle_add_local(d, {name, i}, {name, arity}) end {:noreply, state} end - def handle_cast({:reattach, tuple, {in_neigh, out_neigh}}, {d, _} = state) do - for from <- in_neigh, do: replace_edge(d, from, tuple) - for to <- out_neigh, do: replace_edge(d, tuple, to) + def handle_cast({:reattach, _kind, tuple, {in_neigh, out_neigh}}, {d, _} = state) do + for from <- in_neigh do + :digraph.add_vertex(d, from) + replace_edge!(d, from, tuple) + end + + for to <- out_neigh do + :digraph.add_vertex(d, to) + replace_edge!(d, tuple, to) + end + {:noreply, state} end @@ -278,14 +306,12 @@ defmodule Module.LocalsTracker do {:stop, :normal, state} end - def handle_cast(msg, state) do - {:stop, {:bad_cast, msg}, state} - end - + @doc false def terminate(_reason, _state) do :ok end + @doc false def code_change(_old, state, _extra) do {:ok, state} end @@ -300,6 +326,8 @@ defmodule Module.LocalsTracker do if function != nil do replace_edge!(d, function, tuple) end + + :ok end defp handle_add_local(d, from, to) do @@ -317,14 +345,9 @@ defmodule Module.LocalsTracker do end defp replace_edge!(d, from, to) do - unless :lists.member(to, :digraph.out_neighbours(d, from)) do - [:"$e"|_] = :digraph.add_edge(d, from, to) - end - end - - defp replace_edge(d, from, to) do - unless :lists.member(to, :digraph.out_neighbours(d, from)) do - :digraph.add_edge(d, from, to) + _ = unless :lists.member(to, :digraph.out_neighbours(d, from)) do + [:"$e" | _] = :digraph.add_edge(d, from, to) end + :ok end end diff --git a/lib/elixir/lib/node.ex b/lib/elixir/lib/node.ex index 9b52e451f0c..5b15e69b9e5 100644 --- a/lib/elixir/lib/node.ex +++ b/lib/elixir/lib/node.ex @@ -30,7 +30,7 @@ defmodule Node do returns `{:error, :not_allowed}`. Returns `{:error, :not_found}` if the local node is not alive. """ - @spec stop() :: :ok | {:error, term} + @spec stop() :: :ok | {:error, :not_allowed | :not_found} def stop() do :net_kernel.stop() end @@ -72,7 +72,8 @@ defmodule Node do The result returned when the argument is a list, is the list of nodes satisfying the disjunction(s) of the list elements. - See http://www.erlang.org/doc/man/erlang.html#nodes-1 for more info. + For more information, see + [`:erlang.nodes/1`](http://www.erlang.org/doc/man/erlang.html#nodes-1). """ @typep state :: :visible | :hidden | :connected | :this | :known @spec list(state | [state]) :: [t] @@ -86,7 +87,11 @@ defmodule Node do If `flag` is `true`, monitoring is turned on. If `flag` is `false`, monitoring is turned off. - See http://www.erlang.org/doc/man/erlang.html#monitor_node-2 for more info. + For more information, see + [`:erlang.monitor_node/2`](http://www.erlang.org/doc/man/erlang.html#monitor_node-2). + + For monitoring status changes of all nodes, see + [`:net_kernel.monitor_nodes/3`](http://www.erlang.org/doc/man/net_kernel.html#monitor_nodes-2). """ @spec monitor(t, boolean) :: true def monitor(node, flag) do @@ -97,7 +102,11 @@ defmodule Node do Behaves as `monitor/2` except that it allows an extra option to be given, namely `:allow_passive_connect`. - See http://www.erlang.org/doc/man/erlang.html#monitor_node-3 for more info. + For more information, see + [`:erlang.monitor_node/3`](http://www.erlang.org/doc/man/erlang.html#monitor_node-3). + + For monitoring status changes of all nodes, see + [`:net_kernel.monitor_nodes/3`](http://www.erlang.org/doc/man/net_kernel.html#monitor_nodes-2). """ @spec monitor(t, boolean, [:allow_passive_connect]) :: true def monitor(node, flag, options) do @@ -128,7 +137,8 @@ defmodule Node do protocols. Returns `true` if disconnection succeeds, otherwise `false`. If the local node is not alive, the function returns `:ignored`. - See http://www.erlang.org/doc/man/erlang.html#disconnect_node-1 for more info. + For more information, see + [`:erlang.disconnect_node/1`](http://www.erlang.org/doc/man/erlang.html#disconnect_node-1). """ @spec disconnect(t) :: boolean | :ignored def disconnect(node) do @@ -141,7 +151,8 @@ defmodule Node do Returns `true` if successful, `false` if not, and the atom `:ignored` if the local node is not alive. - See http://erlang.org/doc/man/net_kernel.html#connect_node-1 for more info. + For more information, see + [`:net_kernel.connect_node/1`](http://www.erlang.org/doc/man/net_kernel.html#connect_node-1). """ @spec connect(t) :: boolean | :ignored def connect(node) do @@ -149,11 +160,11 @@ defmodule Node do end @doc """ - Returns the pid of a new process started by the application of `fun` - on `node`. If `node` does not exist, a useless pid is returned. + Returns the PID of a new process started by the application of `fun` + on `node`. If `node` does not exist, a useless PID is returned. - Check http://www.erlang.org/doc/man/erlang.html#spawn-2 for - the list of available options. + For the list of available options, see + [`:erlang.spawn/2`](http://www.erlang.org/doc/man/erlang.html#spawn-2). Inlined by the compiler. """ @@ -163,12 +174,13 @@ defmodule Node do end @doc """ - Returns the pid of a new process started by the application of `fun` + Returns the PID of a new process started by the application of `fun` on `node`. - If `node` does not exist, a useless pid is returned. Check - http://www.erlang.org/doc/man/erlang.html#spawn_opt-3 for the list of - available options. + If `node` does not exist, a useless PID is returned. + + For the list of available options, see + [`:erlang.spawn_opt/3`](http://www.erlang.org/doc/man/erlang.html#spawn_opt-3). Inlined by the compiler. """ @@ -178,12 +190,13 @@ defmodule Node do end @doc """ - Returns the pid of a new process started by the application of + Returns the PID of a new process started by the application of `module.function(args)` on `node`. - If `node` does not exist, a useless pid is returned. Check - http://www.erlang.org/doc/man/erlang.html#spawn-4 for the list of - available options. + If `node` does not exist, a useless PID is returned. + + For the list of available options, see + [`:erlang.spawn/4`](http://www.erlang.org/doc/man/erlang.html#spawn-4). Inlined by the compiler. """ @@ -193,12 +206,13 @@ defmodule Node do end @doc """ - Returns the pid of a new process started by the application of + Returns the PID of a new process started by the application of `module.function(args)` on `node`. - If `node` does not exist, a useless pid is returned. Check - http://www.erlang.org/doc/man/erlang.html#spawn_opt-5 for the list of - available options. + If `node` does not exist, a useless PID is returned. + + For the list of available options, see + [`:erlang.spawn/5`](http://www.erlang.org/doc/man/erlang.html#spawn_opt-5). Inlined by the compiler. """ @@ -208,10 +222,10 @@ defmodule Node do end @doc """ - Returns the pid of a new linked process started by the application of `fun` on `node`. + Returns the PID of a new linked process started by the application of `fun` on `node`. A link is created between the calling process and the new process, atomically. - If `node` does not exist, a useless pid is returned (and due to the link, an exit + If `node` does not exist, a useless PID is returned (and due to the link, an exit signal with exit reason `:noconnection` will be received). Inlined by the compiler. @@ -222,11 +236,11 @@ defmodule Node do end @doc """ - Returns the pid of a new linked process started by the application of + Returns the PID of a new linked process started by the application of `module.function(args)` on `node`. A link is created between the calling process and the new process, atomically. - If `node` does not exist, a useless pid is returned (and due to the link, an exit + If `node` does not exist, a useless PID is returned (and due to the link, an exit signal with exit reason `:noconnection` will be received). Inlined by the compiler. @@ -239,7 +253,7 @@ defmodule Node do @doc """ Sets the magic cookie of `node` to the atom `cookie`. - The default node is `Node.self`, the local node. If `node` is the local node, + The default node is `Node.self/0`, the local node. If `node` is the local node, the function also sets the cookie of all other unknown nodes to `cookie`. This function will raise `FunctionClauseError` if the given `node` is not alive. diff --git a/lib/elixir/lib/option_parser.ex b/lib/elixir/lib/option_parser.ex index ebfa6e752dc..1b1f1e71890 100644 --- a/lib/elixir/lib/option_parser.ex +++ b/lib/elixir/lib/option_parser.ex @@ -1,6 +1,6 @@ defmodule OptionParser do @moduledoc """ - This module contains functions to parse command line arguments. + This module contains functions to parse command line options. """ @type argv :: [String.t] @@ -8,66 +8,158 @@ defmodule OptionParser do @type errors :: [{String.t, String.t | nil}] @type options :: [switches: Keyword.t, strict: Keyword.t, aliases: Keyword.t] + defmodule ParseError do + defexception [:message] + end + @doc """ - Parses `argv` into a keywords list. + Parses `argv` into a keyword list. - It returns the parsed values, remaining arguments and the - invalid options. + It returns a three-element tuple with the form `{parsed, args, invalid}`, where: - ## Examples + * `parsed` is a keyword list of parsed switches with `{switch_name, value}` + tuples in it; `switch_name` is the atom representing the switch name while + `value` is the value for that switch parsed according to `opts` (see the + "Examples" section for more information) + * `args` is a list of the remaining arguments in `argv` as strings + * `invalid` is a list of invalid options as `{option_name, value}` where + `option_name` is the raw option and `value` is `nil` if the option wasn't + expected or the string value if the value didn't have the expected type for + the corresponding option - iex> OptionParser.parse(["--debug"]) + Elixir converts switches to underscored atoms, so `--source-path` becomes + `:source_path`. This is done to better suit Elixir conventions. However, this + means that switches can't contain underscores and switches that do contain + underscores are always returned in the list of invalid switches. + + When parsing, it is common to list switches and their expected types: + + iex> OptionParser.parse(["--debug"], switches: [debug: :boolean]) {[debug: true], [], []} - iex> OptionParser.parse(["--source", "lib"]) + iex> OptionParser.parse(["--source", "lib"], switches: [source: :string]) {[source: "lib"], [], []} - iex> OptionParser.parse(["--source-path", "lib", "test/enum_test.exs", "--verbose"]) + iex> OptionParser.parse(["--source-path", "lib", "test/enum_test.exs", "--verbose"], + ...> switches: [source_path: :string, verbose: :boolean]) {[source_path: "lib", verbose: true], ["test/enum_test.exs"], []} - By default, Elixir will try to automatically parse switches. - Switches without an argument, like `--debug` will automatically - be set to true. Switches followed by a value will be assigned - to the value, always as strings. + We will explore the valid switches and operation modes of option parser below. + + ## Options + + The following options are supported: + + * `:switches` or `:strict` - see the "Switch definitions" section below + * `:allow_nonexistent_atoms` - see the "Parsing dynamic switches" section below + * `:aliases` - see the "Aliases" section below + + ## Switch definitions + + Switches can be specified via one of two options: - Note Elixir also converts the switches to underscore atoms, as - `--source-path` becomes `:source_path`, to better suit Elixir - conventions. This means that option names on the command line cannot contain - underscores; such options will be reported as `:undefined` (in strict mode) - or `:invalid` (in basic mode). + * `:switches` - defines some switches and their types. This function + still attempts to parse switches that are not in this list. + * `:strict` - defines strict switches. Any switch in `argv` that is not + specified in the list is returned in the invalid options list. - ## Switches + Both these options accept a keyword list of `{name, type}` tuples where `name` + is an atom defining the name of the switch and `type` is an atom that + specifies the type for the value of this switch (see the "Types" section below + for the possible types and more information about type casting). - Many times though, it is better to explicitly list the available - switches and their formats. The switches can be specified via two - different options: + Note that you should only supply the `:switches` or the`:strict` option. + If you supply both, an `ArgumentError` exception will be raised. - * `:strict` - the switches are strict. Any switch that does not - exist in the switch list is treated as an error. + ### Types - * `:switches` - configure some switches. Switches that does not - exist in the switch list are still attempted to be parsed. + Switches parsed by `OptionParser` may take zero or one arguments. - Note only `:strict` or `:switches` may be given at once. + The following switches types take no arguments: - For each switch, the following types are supported: + * `:boolean` - sets the value to `true` when given (see also the + "Negation switches" section below) + * `:count` - counts the number of times the switch is given - * `:boolean` - marks the given switch as a boolean. Boolean switches - never consume the following value unless it is `true` or - `false`. - * `:integer` - parses the switch as an integer. - * `:float` - parses the switch as a float. - * `:string` - returns the switch as a string. + The following switches take one argument: - If a switch can't be parsed or is not specfied in the strict case, - the option is returned in the invalid options list (third element - of the returned tuple). + * `:integer` - parses the value as an integer + * `:float` - parses the value as a float + * `:string` - parses the value as a string - The following extra "types" are supported: + If a switch can't be parsed according to the given type, it is + returned in the invalid options list. - * `:keep` - keeps duplicated items in the list instead of overriding + ### Modifiers - Examples: + Switches can be specified with modifiers, which change how + they behave. The following modifiers are supported: + + * `:keep` - keeps duplicated items instead of overriding them; + works with all types except `:count`. Specifying `switch_name: :keep` + assumes the type of `:switch_name` will be `:string`. + + To use `:keep` with a type other than `:string`, use a list as the type + for the switch. For example: `[foo: [:integer, :keep]]`. + + ### Negation switches + + In case a switch `SWITCH` is specified to have type `:boolean`, it may be + passed as `--no-SWITCH` as well which will set the option to `false`: + + iex> OptionParser.parse(["--no-op", "path/to/file"], switches: [op: :boolean]) + {[op: false], ["path/to/file"], []} + + ### Parsing dynamic switches + + `OptionParser` also includes a dynamic mode where it will attempt to parse + switches dynamically. Such can be done by not specifying the `:switches` or + `:strict` option. + + iex> OptionParser.parse(["--debug"]) + {[debug: true], [], []} + + + Switches followed by a value will be assigned the value, as a string. Switches + without an argument, like `--debug` in the examples above, will automatically be + set to `true`. + + Since Elixir converts switches to atoms, the dynamic mode will only parse + switches that translate to atoms used by the runtime. Therefore, the code below + likely won't parse the given option since the `:option_parser_example` atom is + never used anywhere: + + OptionParser.parse(["--option-parser-example"]) + # The :option_parser_example atom is not used anywhere below + + However, the code below does since the `:option_parser_example` atom is used + at some point later (or earlier) on: + + {opts, _, _} = OptionParser.parse(["--option-parser-example"]) + opts[:option_parser_example] + + In other words, when using dynamic mode, Elixir will do the correct thing and + only parse options that are used by the runtime, ignoring all others. If you + would like to parse all switches, regardless if they exist or not, you can + force creation of atoms by passing `allow_nonexistent_atoms: true` as option. + Such option is useful when you are building command-line applications that + receive dynamically-named arguments but must be used with care on long-running + systems. + + Switches followed by a value will be assigned the value, as a string. + Switches without an argument, like `--debug` in the examples above, will + automatically be set to `true`. + + ## Aliases + + A set of aliases can be specified in the `:aliases` option: + + iex> OptionParser.parse(["-d"], aliases: [d: :debug]) + {[debug: true], [], []} + + ## Examples + + Here are some examples of working with different types and modifiers: iex> OptionParser.parse(["--unlock", "path/to/file"], strict: [unlock: :boolean]) {[unlock: true], ["path/to/file"], []} @@ -82,6 +174,12 @@ defmodule OptionParser do iex> OptionParser.parse(["--limit", "xyz"], strict: [limit: :integer]) {[], [], [{"--limit", "xyz"}]} + iex> OptionParser.parse(["--verbose"], switches: [verbose: :count]) + {[verbose: 1], [], []} + + iex> OptionParser.parse(["-v", "-v"], aliases: [v: :verbose], strict: [verbose: :count]) + {[verbose: 2], [], []} + iex> OptionParser.parse(["--unknown", "xyz"], strict: []) {[], ["xyz"], [{"--unknown", nil}]} @@ -89,32 +187,50 @@ defmodule OptionParser do ...> switches: [limit: :integer]) {[limit: 3, unknown: "xyz"], [], []} - ## Negation switches + iex> OptionParser.parse(["--unlock", "path/to/file", "--unlock", "path/to/another/file"], strict: [unlock: :keep]) + {[unlock: "path/to/file", unlock: "path/to/another/file"], [], []} - All switches starting with `--no-` are considered to be booleans and never - parse the next value: + """ + @spec parse(argv, options) :: {parsed, argv, errors} + def parse(argv, opts \\ []) when is_list(argv) and is_list(opts) do + do_parse(argv, build_config(opts), [], [], [], true) + end - iex> OptionParser.parse(["--no-op", "path/to/file"]) - {[no_op: true], ["path/to/file"], []} + @doc """ + The same as `parse/2` but raises an `OptionParser.ParseError` + exception if any invalid options are given. - However, in case the base switch exists, it sets that particular switch to - false: + If there are no errors, returns a `{parsed, rest}` tuple where: - iex> OptionParser.parse(["--no-op", "path/to/file"], switches: [op: :boolean]) - {[op: false], ["path/to/file"], []} + * `parsed` is the list of parsed switches (same as in `parse/2`) + * `rest` is the list of arguments (same as in `parse/2`) - ## Aliases + ## Examples - A set of aliases can be given as options too: + iex> OptionParser.parse!(["--debug", "path/to/file"], strict: [debug: :boolean]) + {[debug: true], ["path/to/file"]} - iex> OptionParser.parse(["-d"], aliases: [d: :debug]) - {[debug: true], [], []} + iex> OptionParser.parse!(["--limit", "xyz"], strict: [limit: :integer]) + ** (OptionParser.ParseError) 1 error found! + --limit : Expected type integer, got "xyz" + + iex> OptionParser.parse!(["--unknown", "xyz"], strict: []) + ** (OptionParser.ParseError) 1 error found! + --unknown : Unknown option + + iex> OptionParser.parse!(["-l", "xyz", "-f", "bar"], + ...> switches: [limit: :integer, foo: :integer], aliases: [l: :limit, f: :foo]) + ** (OptionParser.ParseError) 2 errors found! + -l : Expected type integer, got "xyz" + -f : Expected type integer, got "bar" """ - @spec parse(argv, options) :: {parsed, argv, errors} - def parse(argv, opts \\ []) when is_list(argv) and is_list(opts) do - config = compile_config(opts, true) - do_parse(argv, config, [], [], []) + @spec parse!(argv, options) :: {parsed, argv} | no_return + def parse!(argv, opts \\ []) when is_list(argv) and is_list(opts) do + case parse(argv, opts) do + {parsed, args, []} -> {parsed, args} + {_, _, errors} -> raise ParseError, format_errors(errors, opts) + end end @doc """ @@ -125,46 +241,81 @@ defmodule OptionParser do ## Example - iex> OptionParser.parse_head(["--source", "lib", "test/enum_test.exs", "--verbose"]) + iex> OptionParser.parse_head(["--source", "lib", "test/enum_test.exs", "--verbose"], + ...> switches: [source: :string, verbose: :boolean]) {[source: "lib"], ["test/enum_test.exs", "--verbose"], []} - iex> OptionParser.parse_head(["--verbose", "--source", "lib", "test/enum_test.exs", "--unlock"]) + iex> OptionParser.parse_head(["--verbose", "--source", "lib", "test/enum_test.exs", "--unlock"], + ...> switches: [source: :string, verbose: :boolean, unlock: :boolean]) {[verbose: true, source: "lib"], ["test/enum_test.exs", "--unlock"], []} """ @spec parse_head(argv, options) :: {parsed, argv, errors} def parse_head(argv, opts \\ []) when is_list(argv) and is_list(opts) do - config = compile_config(opts, false) - do_parse(argv, config, [], [], []) + do_parse(argv, build_config(opts), [], [], [], false) end - defp do_parse([], _config, opts, args, invalid) do + @doc """ + The same as `parse_head/2` but raises an `OptionParser.ParseError` + exception if any invalid options are given. + + If there are no errors, returns a `{parsed, rest}` tuple where: + + * `parsed` is the list of parsed switches (same as in `parse_head/2`) + * `rest` is the list of arguments (same as in `parse_head/2`) + + ## Examples + + iex> OptionParser.parse_head!(["--source", "lib", "path/to/file", "--verbose"], + ...> switches: [source: :string, verbose: :boolean]) + {[source: "lib"], ["path/to/file", "--verbose"]} + + iex> OptionParser.parse_head!(["--number", "lib", "test/enum_test.exs", "--verbose"], + ...> strict: [number: :integer]) + ** (OptionParser.ParseError) 1 error found! + --number : Expected type integer, got "lib" + + iex> OptionParser.parse_head!(["--verbose", "--source", "lib", "test/enum_test.exs", "--unlock"], + ...> strict: [verbose: :integer, source: :integer]) + ** (OptionParser.ParseError) 2 errors found! + --verbose : Missing argument of type integer + --source : Expected type integer, got "lib" + """ + @spec parse_head!(argv, options) :: {parsed, argv} | no_return + def parse_head!(argv, opts \\ []) when is_list(argv) and is_list(opts) do + case parse_head(argv, opts) do + {parsed, args, []} -> {parsed, args} + {_, _, errors} -> raise ParseError, format_errors(errors, opts) + end + end + + defp do_parse([], _config, opts, args, invalid, _all?) do {Enum.reverse(opts), Enum.reverse(args), Enum.reverse(invalid)} end - defp do_parse(argv, {aliases, switches, strict, all}=config, opts, args, invalid) do - case next(argv, aliases, switches, strict) do + defp do_parse(argv, %{switches: switches} = config, opts, args, invalid, all?) do + case next_with_config(argv, config) do {:ok, option, value, rest} -> - # the option exist and it was successfully parsed + # the option exists and it was successfully parsed kinds = List.wrap Keyword.get(switches, option) - new_opts = do_store_option(opts, option, value, kinds) - do_parse(rest, config, new_opts, args, invalid) + new_opts = store_option(opts, option, value, kinds) + do_parse(rest, config, new_opts, args, invalid, all?) {:invalid, option, value, rest} -> # the option exist but it has wrong value - do_parse(rest, config, opts, args, [{option, value}|invalid]) + do_parse(rest, config, opts, args, [{option, value} | invalid], all?) {:undefined, option, _value, rest} -> # the option does not exist (for strict cases) - do_parse(rest, config, opts, args, [{option, nil}|invalid]) + do_parse(rest, config, opts, args, [{option, nil} | invalid], all?) - {:error, ["--"|rest]} -> + {:error, ["--" | rest]} -> {Enum.reverse(opts), Enum.reverse(args, rest), Enum.reverse(invalid)} - {:error, [arg|rest]=remaining_args} -> + {:error, [arg | rest] = remaining_args} -> # there is no option - if all do - do_parse(rest, config, opts, [arg|args], invalid) + if all? do + do_parse(rest, config, opts, [arg | args], invalid, all?) else {Enum.reverse(opts), Enum.reverse(args, remaining_args), Enum.reverse(invalid)} end @@ -175,140 +326,312 @@ defmodule OptionParser do Low-level function that parses one option. It accepts the same options as `parse/2` and `parse_head/2` - as both functions are built on top of next. This function + as both functions are built on top of this function. This function may return: * `{:ok, key, value, rest}` - the option `key` with `value` was successfully parsed * `{:invalid, key, value, rest}` - the option `key` is invalid with `value` - (returned when the switch type does not match the one given via the - command line) + (returned when the value cannot be parsed according to the switch type) * `{:undefined, key, value, rest}` - the option `key` is undefined - (returned on strict cases and the switch is unknown) + (returned in strict mode when the switch is unknown) - * `{:error, rest}` - there are no switches at the top of the given argv - """ + * `{:error, rest}` - there are no switches at the head of the given `argv` + """ @spec next(argv, options) :: {:ok, key :: atom, value :: term, argv} | - {:invalid, key :: atom, value :: term, argv} | - {:undefined, key :: atom, value :: term, argv} | + {:invalid, String.t, String.t | nil, argv} | + {:undefined, String.t, String.t | nil, argv} | {:error, argv} def next(argv, opts \\ []) when is_list(argv) and is_list(opts) do - {aliases, switches, strict, _} = compile_config(opts, true) - next(argv, aliases, switches, strict) + next_with_config(argv, build_config(opts)) end - defp next([], _aliases, _switches, _strict) do + defp next_with_config([], _config) do {:error, []} end - defp next(["--"|_]=argv, _aliases, _switches, _strict) do + defp next_with_config(["--" | _] = argv, _config) do {:error, argv} end - defp next(["-"|_]=argv, _aliases, _switches, _strict) do + defp next_with_config(["-" | _] = argv, _config) do {:error, argv} end - defp next(["- " <> _|_]=argv, _aliases, _switches, _strict) do + defp next_with_config(["- " <> _ | _] = argv, _config) do {:error, argv} end - defp next(["-" <> option|rest], aliases, switches, strict) do + # Handles --foo or --foo=bar + defp next_with_config(["--" <> option | rest], config) do {option, value} = split_option(option) - opt_name_bin = "-" <> option - tagged = tag_option(option, value, switches, aliases) + tagged = tag_option(option, config) + next_tagged(tagged, value, "--" <> option, rest, config) + end + + # Handles -a, -abc, -abc=something + defp next_with_config(["-" <> option | rest] = argv, config) do + %{aliases: aliases, allow_nonexistent_atoms?: allow_nonexistent_atoms?} = config + {option, value} = split_option(option) + original = "-" <> option - if strict and not option_defined?(tagged, switches) do - {:undefined, opt_name_bin, value, rest} + cond do + is_nil(value) and negative_number?(original) -> + {:error, argv} + String.contains?(option, ["-", "_"]) -> + {:undefined, original, value, rest} + String.length(option) > 1 -> + key = get_option_key(option, allow_nonexistent_atoms?) + option_key = aliases[key] + if key && option_key do + IO.warn "multi-letter aliases are deprecated, got: #{inspect(key)}" + next_tagged({:default, option_key}, value, original, rest, config) + else + next_with_config(expand_multiletter_alias(option, value) ++ rest, config) + end + true -> + # We have a regular one-letter alias here + tagged = tag_oneletter_alias(option, config) + next_tagged(tagged, value, original, rest, config) + end + end + + defp next_with_config(argv, _config) do + {:error, argv} + end + + defp next_tagged(tagged, value, original, rest, %{switches: switches, strict?: strict?}) do + if strict? and not option_defined?(tagged, switches) do + {:undefined, original, value, rest} else - {opt_name, kinds, value} = normalize_option(tagged, value, switches) - {value, kinds, rest} = normalize_value(value, kinds, rest, strict) + {option, kinds, value} = normalize_option(tagged, value, switches) + {value, kinds, rest} = normalize_value(value, kinds, rest, strict?) case validate_option(value, kinds) do - {:ok, new_value} -> {:ok, opt_name, new_value, rest} - :invalid -> {:invalid, opt_name_bin, value, rest} + {:ok, new_value} -> {:ok, option, new_value, rest} + :invalid -> {:invalid, original, value, rest} end end end - defp next(argv, _aliases, _switches, _strict) do - {:error, argv} + @doc """ + Receives a key-value enumerable and converts it to `t:argv/0`. + + Keys must be atoms. Keys with `nil` value are discarded, + boolean values are converted to `--key` or `--no-key` + (if the value is `true` or `false`, respectively), + and all other values are converted using `Kernel.to_string/1`. + + It is advised to pass to `to_argv/2` the same set of `options` + given to `parse/2`. Some switches can only be reconstructed + correctly with the `switches` information in hand. + + ## Examples + + iex> OptionParser.to_argv([foo_bar: "baz"]) + ["--foo-bar", "baz"] + iex> OptionParser.to_argv([bool: true, bool: false, discarded: nil]) + ["--bool", "--no-bool"] + + Some switches will output different values based on the switches + flag: + + iex> OptionParser.to_argv([number: 2], switches: []) + ["--number", "2"] + iex> OptionParser.to_argv([number: 2], switches: [number: :count]) + ["--number", "--number"] + + """ + @spec to_argv(Enumerable.t, options) :: argv + def to_argv(enum, opts \\ []) do + switches = Keyword.get(opts, :switches, []) + Enum.flat_map(enum, fn + {_key, nil} -> [] + {key, true} -> [to_switch(key)] + {key, false} -> [to_switch(key, "--no-")] + {key, value} -> to_argv(key, value, switches) + end) end - ## Helpers + defp to_argv(key, value, switches) do + if switches[key] == :count do + List.duplicate(to_switch(key), value) + else + [to_switch(key), to_string(value)] + end + end + + defp to_switch(key, prefix \\ "--") when is_atom(key) do + prefix <> String.replace(Atom.to_string(key), "_", "-") + end - defp compile_config(opts, all) do - aliases = opts[:aliases] || [] + @doc ~S""" + Splits a string into `t:argv/0` chunks. - {switches, strict} = cond do - s = opts[:switches] -> - {s, false} - s = opts[:strict] -> - {s, true} + This function splits the given `string` into a list of strings in a similar + way to many shells. + + ## Examples + + iex> OptionParser.split("foo bar") + ["foo", "bar"] + + iex> OptionParser.split("foo \"bar baz\"") + ["foo", "bar baz"] + + """ + @spec split(String.t) :: argv + def split(string) when is_binary(string) do + do_split(String.trim_leading(string, " "), "", [], nil) + end + + # If we have an escaped quote, simply remove the escape + defp do_split(<>, buffer, acc, quote), + do: do_split(t, <>, acc, quote) + + # If we have a quote and we were not in a quote, start one + defp do_split(<>, buffer, acc, nil) when quote in [?", ?'], + do: do_split(t, buffer, acc, quote) + + # If we have a quote and we were inside it, close it + defp do_split(<>, buffer, acc, quote), + do: do_split(t, buffer, acc, nil) + + # If we have an escaped quote/space, simply remove the escape as long as we are not inside a quote + defp do_split(<>, buffer, acc, nil) when h in [?\s, ?', ?"], + do: do_split(t, <>, acc, nil) + + # If we have space and we are outside of a quote, start new segment + defp do_split(<>, buffer, acc, nil), + do: do_split(String.trim_leading(t, " "), "", [buffer | acc], nil) + + # All other characters are moved to buffer + defp do_split(<>, buffer, acc, quote) do + do_split(t, <>, acc, quote) + end + + # Finish the string expecting a nil marker + defp do_split(<<>>, "", acc, nil), + do: Enum.reverse(acc) + + defp do_split(<<>>, buffer, acc, nil), + do: Enum.reverse([buffer | acc]) + + # Otherwise raise + defp do_split(<<>>, _, _acc, marker) do + raise "argv string did not terminate properly, a #{<>} was opened but never closed" + end + + ## Helpers + + defp build_config(opts) do + {switches, strict?} = cond do + opts[:switches] && opts[:strict] -> + raise ArgumentError, ":switches and :strict cannot be given together" + switches = opts[:switches] -> + {switches, false} + strict = opts[:strict] -> + {strict, true} true -> {[], false} end - {aliases, switches, strict, all} + %{ + aliases: opts[:aliases] || [], + allow_nonexistent_atoms?: opts[:allow_nonexistent_atoms] || false, + strict?: strict?, + switches: switches + } end defp validate_option(value, kinds) do - {is_invalid, value} = cond do - :invalid in kinds -> - {true, value} - :boolean in kinds -> - case value do - t when t in [true, "true"] -> {nil, true} - f when f in [false, "false"] -> {nil, false} - _ -> {true, value} - end - :integer in kinds -> - case Integer.parse(value) do - {value, ""} -> {nil, value} - _ -> {true, value} - end - :float in kinds -> - case Float.parse(value) do - {value, ""} -> {nil, value} - _ -> {true, value} - end - true -> - {nil, value} - end + {invalid?, value} = + cond do + :invalid in kinds -> + {true, value} + :boolean in kinds -> + case value do + t when t in [true, "true"] -> {false, true} + f when f in [false, "false"] -> {false, false} + _ -> {true, value} + end + :count in kinds -> + case value do + 1 -> {false, value} + _ -> {true, value} + end + :integer in kinds -> + case Integer.parse(value) do + {value, ""} -> {false, value} + _ -> {true, value} + end + :float in kinds -> + case Float.parse(value) do + {value, ""} -> {false, value} + _ -> {true, value} + end + true -> + {false, value} + end - if is_invalid do + if invalid? do :invalid else {:ok, value} end end - defp do_store_option(dict, option, value, kinds) do + defp store_option(dict, option, value, kinds) do cond do + :count in kinds -> + Keyword.update(dict, option, value, & &1 + 1) :keep in kinds -> - [{option, value}|dict] + [{option, value} | dict] + true -> + [{option, value} | Keyword.delete(dict, option)] + end + end + + defp tag_option("no-" <> option = original, %{switches: switches, allow_nonexistent_atoms?: allow_nonexistent_atoms?}) do + cond do + (negated = get_option_key(option, allow_nonexistent_atoms?)) && :boolean in List.wrap(switches[negated]) -> + {:negated, negated} + option_key = get_option_key(original, allow_nonexistent_atoms?) -> + {:default, option_key} true -> - [{option, value}|Keyword.delete(dict, option)] + :unknown end end - defp tag_option(<>, value, switches, _aliases) do - get_negated(option, value, switches) + defp tag_option(option, %{allow_nonexistent_atoms?: allow_nonexistent_atoms?}) do + if option_key = get_option_key(option, allow_nonexistent_atoms?) do + {:default, option_key} + else + :unknown + end end - defp tag_option(option, _value, _switches, aliases) when is_binary(option) do - opt = get_option(option) - if alias = aliases[opt] do - {:default, alias} + defp tag_oneletter_alias(alias, %{aliases: aliases, allow_nonexistent_atoms?: allow_nonexistent_atoms?}) when is_binary(alias) do + if option_key = aliases[to_existing_key(alias, allow_nonexistent_atoms?)] do + {:default, option_key} else :unknown end end + defp expand_multiletter_alias(letters, value) when is_binary(letters) do + {last, expanded} = + letters + |> String.codepoints() + |> Enum.map(&("-" <> &1)) + |> List.pop_at(-1) + expanded ++ [last <> if(value, do: "=" <> value, else: "")] + end + defp option_defined?(:unknown, _switches) do false end @@ -325,51 +648,45 @@ defmodule OptionParser do {nil, [:invalid], value} end - defp normalize_option({:negated, option}, nil, switches) do - kinds = List.wrap(switches[option]) - - cond do - :boolean in kinds -> - {option, kinds, false} - kinds == [] -> - {option, kinds, true} - true -> - {reverse_negated(option), [:invalid], nil} + defp normalize_option({:negated, option}, value, switches) do + if value do + {option, [:invalid], value} + else + {option, List.wrap(switches[option]), false} end end - defp normalize_option({:negated, option}, value, _switches) do - {option, [:invalid], value} - end - defp normalize_option({:default, option}, value, switches) do {option, List.wrap(switches[option]), value} end - defp normalize_value(nil, kinds, t, strict) do - nil_or_true = if strict, do: nil, else: true + defp normalize_value(nil, kinds, t, strict?) do cond do :boolean in kinds -> {true, kinds, t} + :count in kinds -> + {1, kinds, t} value_in_tail?(t) -> - [h|t] = t + [h | t] = t {h, kinds, t} + kinds == [] and strict? -> + {nil, kinds, t} kinds == [] -> - {nil_or_true, kinds, t} + {true, kinds, t} true -> {nil, [:invalid], t} end end - defp normalize_value(value, kinds, t, _) do + defp normalize_value(value, kinds, t, _strict?) do {value, kinds, t} end - defp value_in_tail?(["-"|_]), do: true - defp value_in_tail?(["- " <> _|_]), do: true - defp value_in_tail?(["-" <> _|_]), do: false - defp value_in_tail?([]), do: false - defp value_in_tail?(_), do: true + defp value_in_tail?(["-" | _]), do: true + defp value_in_tail?(["- " <> _ | _]), do: true + defp value_in_tail?(["-" <> arg | _]), do: negative_number?("-" <> arg) + defp value_in_tail?([]), do: false + defp value_in_tail?(_), do: true defp split_option(option) do case :binary.split(option, "=") do @@ -378,46 +695,66 @@ defmodule OptionParser do end end - defp to_underscore(option), do: to_underscore(option, <<>>) - - defp to_underscore("_" <> _rest, _acc), do: nil - + defp to_underscore(option), + do: to_underscore(option, <<>>) + defp to_underscore("_" <> _rest, _acc), + do: nil defp to_underscore("-" <> rest, acc), do: to_underscore(rest, acc <> "_") - defp to_underscore(<> <> rest, acc), do: to_underscore(rest, <>) + defp to_underscore(<<>>, acc), + do: acc - defp to_underscore(<<>>, acc), do: acc + defp get_option_key(option, allow_nonexistent_atoms?) do + if string = to_underscore(option) do + to_existing_key(string, allow_nonexistent_atoms?) + end + end - defp get_option(option) do - if str = to_underscore(option) do - String.to_atom(str) + defp to_existing_key(option, true), + do: String.to_atom(option) + defp to_existing_key(option, false) do + try do + String.to_existing_atom(option) + rescue + ArgumentError -> nil end end - defp reverse_negated(negated) do - String.to_atom("no_" <> Atom.to_string(negated)) + defp negative_number?(arg) do + match?({_, ""}, Float.parse(arg)) end - defp get_negated("no-" <> rest = option, value, switches) do - if negated = get_option(rest) do - option = if Keyword.has_key?(switches, negated) and value == nil do - negated - else - get_option(option) - end - {:negated, option} + defp format_errors([_ | _] = errors, opts) do + types = opts[:switches] || opts[:strict] + error_count = length(errors) + error = if error_count == 1, do: "error", else: "errors" + "#{error_count} #{error} found!\n" <> + Enum.map_join(errors, "\n", &format_error(&1, opts, types)) + end + + defp format_error({option, nil}, opts, types) do + if type = get_type(option, opts, types) do + "#{option} : Missing argument of type #{type}" else - :unknown + "#{option} : Unknown option" end end - defp get_negated(rest, _value, _switches) do - if option = get_option(rest) do - {:default, option} + defp format_error({option, value}, opts, types) do + type = get_type(option, opts, types) + "#{option} : Expected type #{type}, got #{inspect value}" + end + + defp get_type(option, opts, types) do + allow_nonexistent_atoms? = opts[:allow_nonexistent_atoms] || false + key = option |> String.trim_leading("-") |> get_option_key(allow_nonexistent_atoms?) + + if option_key = opts[:aliases][key] do + types[option_key] else - :unknown + types[key] end end end diff --git a/lib/elixir/lib/path.ex b/lib/elixir/lib/path.ex index 1dd4068a18b..d5c7f4f96f1 100644 --- a/lib/elixir/lib/path.ex +++ b/lib/elixir/lib/path.ex @@ -3,23 +3,24 @@ defmodule Path do This module provides conveniences for manipulating or retrieving file system paths. - The functions in this module may receive a char data as + The functions in this module may receive a chardata as argument (i.e. a string or a list of characters / string) and will always return a string (encoded in UTF-8). The majority of the functions in this module do not interact with the file system, except for a few functions - that require it (like `wildcard/1` and `expand/1`). + that require it (like `wildcard/2` and `expand/1`). """ - alias :filename, as: FN @type t :: :unicode.chardata() @doc """ Converts the given path to an absolute one. Unlike `expand/1`, no attempt is made to resolve `..`, `.` or `~`. - ## Unix examples + ## Examples + + ### Unix Path.absname("foo") #=> "/usr/local/foo" @@ -27,12 +28,12 @@ defmodule Path do Path.absname("../x") #=> "/usr/local/../x" - ## Windows + ### Windows Path.absname("foo"). - "D:/usr/local/foo" + #=> "D:/usr/local/foo" Path.absname("../x"). - "D:/usr/local/../x" + #=> "D:/usr/local/../x" """ @spec absname(t) :: binary @@ -41,8 +42,10 @@ defmodule Path do end @doc """ - Builds a path from `relative_to` to `path`. If `path` is already - an absolute path, `relative_to` is ignored. See also `relative_to/2`. + Builds a path from `relative_to` to `path`. + + If `path` is already an absolute path, `relative_to` is ignored. See also + `relative_to/2`. Unlike `expand/2`, no attempt is made to resolve `..`, `.` or `~`. @@ -60,40 +63,70 @@ defmodule Path do def absname(path, relative_to) do path = IO.chardata_to_string(path) case type(path) do - :relative -> join(relative_to, path) - :absolute -> - cond do - path == "/" -> - path - :binary.last(path) == ?/ -> - binary_part(path, 0, byte_size(path) - 1) - true -> - path - end + :relative -> absname_join(relative_to, path) + :absolute -> absname_join([path]) :volumerelative -> relative_to = IO.chardata_to_string(relative_to) absname_vr(split(path), split(relative_to), relative_to) end end - ## Absolute path on current drive - defp absname_vr(["/"|rest], [volume|_], _relative), - do: join([volume|rest]) + # Absolute path on current drive + defp absname_vr(["/" | rest], [volume | _], _relative), + do: absname_join([volume | rest]) - ## Relative to current directory on current drive. - defp absname_vr([<>|rest], [<>|_], relative), - do: absname(join(rest), relative) + # Relative to current directory on current drive. + defp absname_vr([<> | rest], [<> | _], relative), + do: absname(absname_join(rest), relative) - ## Relative to current directory on another drive. - defp absname_vr([<>|name], _, _relative) do + # Relative to current directory on another drive. + defp absname_vr([<> | name], _, _relative) do cwd = case :file.get_cwd([x, ?:]) do {:ok, dir} -> IO.chardata_to_string(dir) {:error, _} -> <> end - absname(join(name), cwd) + absname(absname_join(name), cwd) end + # Joins a list + defp absname_join([name1, name2 | rest]), do: + absname_join([absname_join(name1, name2) | rest]) + defp absname_join([name]), do: + do_absname_join(IO.chardata_to_string(name), <<>>, [], major_os_type()) + + # Joins two paths + defp absname_join(left, right), + do: do_absname_join(IO.chardata_to_string(left), relative(right), [], major_os_type()) + + defp do_absname_join(<>, relativename, [], :win32) when uc_letter in ?A..?Z, do: + do_absname_join(rest, relativename, [?:, uc_letter + ?a - ?A], :win32) + defp do_absname_join(<>, relativename, result, :win32), do: + do_absname_join(<>, relativename, result, :win32) + defp do_absname_join(<>, relativename, [?., ?/ | result], os_type), do: + do_absname_join(rest, relativename, [?/ | result], os_type) + defp do_absname_join(<>, relativename, [?/ | result], os_type), do: + do_absname_join(rest, relativename, [?/ | result], os_type) + defp do_absname_join(<<>>, <<>>, result, os_type), do: + IO.iodata_to_binary(reverse_maybe_remove_dir_sep(result, os_type)) + defp do_absname_join(<<>>, relativename, [?: | rest], :win32), do: + do_absname_join(relativename, <<>>, [?: | rest], :win32) + defp do_absname_join(<<>>, relativename, [?/ | result], os_type), do: + do_absname_join(relativename, <<>>, [?/ | result], os_type) + defp do_absname_join(<<>>, relativename, result, os_type), do: + do_absname_join(relativename, <<>>, [?/ | result], os_type) + defp do_absname_join(<>, relativename, result, os_type), do: + do_absname_join(rest, relativename, [char | result], os_type) + + defp reverse_maybe_remove_dir_sep([?/, ?:, letter], :win32), do: + [letter, ?:, ?/] + defp reverse_maybe_remove_dir_sep([?/], _), do: + [?/] + defp reverse_maybe_remove_dir_sep([?/ | name], _), do: + :lists.reverse(name) + defp reverse_maybe_remove_dir_sep(name, _), do: + :lists.reverse(name) + @doc """ Converts the path to an absolute one and expands any `.` and `..` characters and a leading `~`. @@ -101,20 +134,21 @@ defmodule Path do ## Examples Path.expand("/foo/bar/../bar") - "/foo/bar" + #=> "/foo/bar" """ @spec expand(t) :: binary def expand(path) do - normalize absname(expand_home(path), System.cwd!) + expand_dot absname(expand_home(path), System.cwd!) end @doc """ Expands the path relative to the path given as the second argument - expanding any `.` and `..` characters. If the path is already an - absolute path, `relative_to` is ignored. + expanding any `.` and `..` characters. + + If the path is already an absolute path, `relative_to` is ignored. - Note, that this function treats `path` with a leading `~` as + Note that this function treats a `path` with a leading `~` as an absolute one. The second argument is first expanded to an absolute path. @@ -133,13 +167,15 @@ defmodule Path do """ @spec expand(t, t) :: binary def expand(path, relative_to) do - normalize absname(absname(expand_home(path), expand_home(relative_to)), System.cwd!) + expand_dot absname(absname(expand_home(path), expand_home(relative_to)), System.cwd!) end @doc """ Returns the path type. - ## Unix examples + ## Examples + + ### Unix Path.type("/") #=> :absolute Path.type("/usr/local/bin") #=> :absolute @@ -147,7 +183,7 @@ defmodule Path do Path.type("../usr/local/bin") #=> :relative Path.type("~/file") #=> :relative - ## Windows examples + ### Windows Path.type("D:/usr/local/bin") #=> :absolute Path.type("usr/local/bin") #=> :relative @@ -156,23 +192,24 @@ defmodule Path do """ @spec type(t) :: :absolute | :relative | :volumerelative - def type(name) when is_list(name) or is_binary(name) do - case :os.type() do - {:win32, _} -> win32_pathtype(name) - _ -> unix_pathtype(name) - end |> elem(0) + def type(name) + when is_list(name) + when is_binary(name) do + pathtype(name, major_os_type()) |> elem(0) end @doc """ Forces the path to be a relative path. - ## Unix examples + ## Examples + + ### Unix Path.relative("/usr/local/bin") #=> "usr/local/bin" Path.relative("usr/local/bin") #=> "usr/local/bin" Path.relative("../usr/local/bin") #=> "../usr/local/bin" - ## Windows examples + ### Windows Path.relative("D:/usr/local/bin") #=> "usr/local/bin" Path.relative("usr/local/bin") #=> "usr/local/bin" @@ -182,55 +219,68 @@ defmodule Path do """ @spec relative(t) :: binary def relative(name) do - case :os.type() do - {:win32, _} -> win32_pathtype(name) - _ -> unix_pathtype(name) - end |> elem(1) |> IO.chardata_to_string + relative(name, major_os_type()) end - defp unix_pathtype(<>), do: - {:absolute, relative} - defp unix_pathtype([?/|relative]), do: - {:absolute, relative} - defp unix_pathtype([list|rest]) when is_list(list), do: - unix_pathtype(list ++ rest) - defp unix_pathtype(relative), do: - {:relative, relative} + defp relative(name, os_type) do + pathtype(name, os_type) + |> elem(1) + |> IO.chardata_to_string + end + + defp pathtype(name, os_type) do + case os_type do + :win32 -> win32_pathtype(name) + _ -> unix_pathtype(name) + end + end + + defp unix_pathtype(path) when path in ["/", '/'], + do: {:absolute, "."} + defp unix_pathtype(<>), + do: {:absolute, relative} + defp unix_pathtype([?/ | relative]), + do: {:absolute, relative} + defp unix_pathtype([list | rest]) when is_list(list), + do: unix_pathtype(list ++ rest) + defp unix_pathtype(relative), + do: {:relative, relative} @slash [?/, ?\\] - defp win32_pathtype([list|rest]) when is_list(list), do: - win32_pathtype(list++rest) - defp win32_pathtype([char, list|rest]) when is_list(list), do: - win32_pathtype([char|list++rest]) - defp win32_pathtype(<>) when c1 in @slash and c2 in @slash, do: - {:absolute, relative} - defp win32_pathtype(<>) when c in @slash, do: - {:volumerelative, relative} - defp win32_pathtype(<<_letter, ?:, c, relative :: binary>>) when c in @slash, do: - {:absolute, relative} - defp win32_pathtype(<<_letter, ?:, relative :: binary>>), do: - {:volumerelative, relative} - - defp win32_pathtype([c1, c2 | relative]) when c1 in @slash and c2 in @slash, do: - {:absolute, relative} - defp win32_pathtype([c | relative]) when c in @slash, do: - {:volumerelative, relative} - defp win32_pathtype([c1, c2, list|rest]) when is_list(list), do: - win32_pathtype([c1, c2|list++rest]) - defp win32_pathtype([_letter, ?:, c | relative]) when c in @slash, do: - {:absolute, relative} - defp win32_pathtype([_letter, ?: | relative]), do: - {:volumerelative, relative} - defp win32_pathtype(relative), do: - {:relative, relative} + defp win32_pathtype([list | rest]) when is_list(list), + do: win32_pathtype(list ++ rest) + defp win32_pathtype([char, list | rest]) when is_list(list), + do: win32_pathtype([char | list ++ rest]) + defp win32_pathtype(<>) when c1 in @slash and c2 in @slash, + do: {:absolute, relative} + defp win32_pathtype(<>) when char in @slash, + do: {:volumerelative, relative} + defp win32_pathtype(<<_letter, ?:, char, relative::binary>>) when char in @slash, + do: {:absolute, relative} + defp win32_pathtype(<<_letter, ?:, relative::binary>>), + do: {:volumerelative, relative} + + defp win32_pathtype([c1, c2 | relative]) when c1 in @slash and c2 in @slash, + do: {:absolute, relative} + defp win32_pathtype([char | relative]) when char in @slash, + do: {:volumerelative, relative} + defp win32_pathtype([c1, c2, list | rest]) when is_list(list), + do: win32_pathtype([c1, c2 | list ++ rest]) + defp win32_pathtype([_letter, ?:, char | relative]) when char in @slash, + do: {:absolute, relative} + defp win32_pathtype([_letter, ?: | relative]), + do: {:volumerelative, relative} + defp win32_pathtype(relative), + do: {:relative, relative} @doc """ Returns the given `path` relative to the given `from` path. - In other words, it tries to strip the `from` prefix from `path`. + + In other words, this function tries to strip the `from` prefix from `path`. This function does not query the file system, so it assumes - no symlinks in between the paths. + no symlinks between the paths. In case a direct relative path cannot be found, it returns the original path. @@ -253,11 +303,11 @@ defmodule Path do relative_to(split(path), split(from), path) end - defp relative_to([h|t1], [h|t2], original) do + defp relative_to([h | t1], [h | t2], original) do relative_to(t1, t2, original) end - defp relative_to([_|_] = l1, [], _original) do + defp relative_to([_ | _] = l1, [], _original) do join(l1) end @@ -267,8 +317,10 @@ defmodule Path do @doc """ Convenience to get the path relative to the current working - directory. If, for some reason, the current working directory - cannot be retrieved, returns the full path. + directory. + + If, for some reason, the current working directory + cannot be retrieved, this function returns the given `path`. """ @spec relative_to_cwd(t) :: binary def relative_to_cwd(path) do @@ -296,13 +348,15 @@ defmodule Path do """ @spec basename(t) :: binary def basename(path) do - FN.basename(IO.chardata_to_string(path)) + :filename.basename(IO.chardata_to_string(path)) end @doc """ Returns the last component of `path` with the `extension` - stripped. This function should be used to remove a specific - extension which may, or may not, be there. + stripped. + + This function should be used to remove a specific + extension which may or may not be there. ## Examples @@ -318,7 +372,7 @@ defmodule Path do """ @spec basename(t, t) :: binary def basename(path, extension) do - FN.basename(IO.chardata_to_string(path), IO.chardata_to_string(extension)) + :filename.basename(IO.chardata_to_string(path), IO.chardata_to_string(extension)) end @doc """ @@ -326,15 +380,19 @@ defmodule Path do ## Examples - Path.dirname("/foo/bar.ex") - #=> "/foo" - Path.dirname("/foo/bar/baz.ex") - #=> "/foo/bar" + iex> Path.dirname("/foo/bar.ex") + "/foo" + + iex> Path.dirname("/foo/bar/baz.ex") + "/foo/bar" + + iex> Path.dirname("/foo/bar/") + "/foo/bar" """ @spec dirname(t) :: binary def dirname(path) do - FN.dirname(IO.chardata_to_string(path)) + :filename.dirname(IO.chardata_to_string(path)) end @doc """ @@ -351,7 +409,7 @@ defmodule Path do """ @spec extname(t) :: binary def extname(path) do - FN.extension(IO.chardata_to_string(path)) + :filename.extension(IO.chardata_to_string(path)) end @doc """ @@ -368,12 +426,14 @@ defmodule Path do """ @spec rootname(t) :: binary def rootname(path) do - FN.rootname(IO.chardata_to_string(path)) + :filename.rootname(IO.chardata_to_string(path)) end @doc """ - Returns the `path` with the `extension` stripped. This function should be used to - remove a specific extension which might, or might not, be there. + Returns the `path` with the `extension` stripped. + + This function should be used to remove a specific extension which may + or may not be there. ## Examples @@ -386,14 +446,14 @@ defmodule Path do """ @spec rootname(t, t) :: binary def rootname(path, extension) do - FN.rootname(IO.chardata_to_string(path), IO.chardata_to_string(extension)) + :filename.rootname(IO.chardata_to_string(path), IO.chardata_to_string(extension)) end @doc """ - Returns a string with one or more path components joined by the path separator. + Joins a list of paths. - This function should be used to convert a list of strings to a path. - Note that any trailing slash is removed on join. + This function should be used to convert a list of paths to a path. + Note that any trailing slash is removed when joining. ## Examples @@ -407,71 +467,68 @@ defmodule Path do "/foo/bar" """ - @spec join([t]) :: binary - def join([name1, name2|rest]), do: - join([join(name1, name2)|rest]) + @spec join(nonempty_list(t)) :: binary + def join([name1, name2 | rest]), do: + join([join(name1, name2) | rest]) def join([name]), do: - do_join(IO.chardata_to_string(name), <<>>, [], major_os_type()) + IO.chardata_to_string(name) @doc """ Joins two paths. + The right path will always be expanded to its relative format + and any trailing slash will be removed when joining. + ## Examples iex> Path.join("foo", "bar") "foo/bar" + iex> Path.join("/foo", "/bar/") + "/foo/bar" + """ @spec join(t, t) :: binary - def join(left, right), - do: do_join(IO.chardata_to_string(left), relative(right), [], major_os_type()) + def join(left, right) do + left = IO.chardata_to_string(left) + os_type = major_os_type() + do_join(left, right, os_type) |> remove_dir_sep(os_type) + end - defp major_os_type do - :os.type |> elem(0) + defp do_join("", right, os_type), do: relative(right, os_type) + defp do_join("/", right, os_type), do: "/" <> relative(right, os_type) + defp do_join(left, right, os_type), do: remove_dir_sep(left, os_type) <> "/" <> relative(right, os_type) + + defp remove_dir_sep("", _os_type), do: "" + defp remove_dir_sep("/", _os_type), do: "/" + defp remove_dir_sep(bin, os_type) do + last = :binary.last(bin) + if last == ?/ or (last == ?\\ and os_type == :win32) do + binary_part(bin, 0, byte_size(bin) - 1) + else + bin + end end - defp do_join(<>, relativename, [], :win32) when uc_letter in ?A..?Z, do: - do_join(rest, relativename, [?:, uc_letter+?a-?A], :win32) - defp do_join(<>, relativename, result, :win32), do: - do_join(<>, relativename, result, :win32) - defp do_join(<>, relativename, [?., ?/|result], os_type), do: - do_join(rest, relativename, [?/|result], os_type) - defp do_join(<>, relativename, [?/|result], os_type), do: - do_join(rest, relativename, [?/|result], os_type) - defp do_join(<<>>, <<>>, result, os_type), do: - IO.iodata_to_binary(maybe_remove_dirsep(result, os_type)) - defp do_join(<<>>, relativename, [?:|rest], :win32), do: - do_join(relativename, <<>>, [?:|rest], :win32) - defp do_join(<<>>, relativename, [?/|result], os_type), do: - do_join(relativename, <<>>, [?/|result], os_type) - defp do_join(<<>>, relativename, result, os_type), do: - do_join(relativename, <<>>, [?/|result], os_type) - defp do_join(<>, relativename, result, os_type), do: - do_join(rest, relativename, [char|result], os_type) - - defp maybe_remove_dirsep([?/, ?:, letter], :win32), do: - [letter, ?:, ?/] - defp maybe_remove_dirsep([?/], _), do: - [?/] - defp maybe_remove_dirsep([?/|name], _), do: - :lists.reverse(name) - defp maybe_remove_dirsep(name, _), do: - :lists.reverse(name) + @doc ~S""" + Splits the path into a list at the path separator. - @doc """ - Returns a list with the path split by the path separator. - If an empty string is given, returns the root path. + If an empty string is given, returns an empty list. + + On Windows, path is split on both "\" and "/" separators + and the driver letter, if there is one, is always returned + in lowercase. ## Examples - iex> Path.split("") - [] + iex> Path.split("") + [] - iex> Path.split("foo") - ["foo"] + iex> Path.split("foo") + ["foo"] - iex> Path.split("/foo/bar") - ["/", "foo", "bar"] + iex> Path.split("/foo/bar") + ["/", "foo", "bar"] """ @spec split(t) :: [binary] @@ -480,18 +537,18 @@ defmodule Path do def split(""), do: [] def split(path) do - FN.split(IO.chardata_to_string(path)) + :filename.split(IO.chardata_to_string(path)) end defmodule Wildcard do @moduledoc false - def read_file_info(file) do + def read_link_info(file) do call({:read_link_info, file}) end def list_dir(dir) do - case call({:list_dir, dir}) do + case call({:list_dir, dir}) do {:ok, files} -> {:ok, for(file <- files, hd(file) != ?., do: file)} other -> @@ -510,7 +567,8 @@ defmodule Path do end @doc """ - Traverses paths according to the given `glob` expression. + Traverses paths according to the given `glob` expression and returns a + list of matches. The wildcard looks like an ordinary path, except that certain "wildcard characters" are interpreted in a special way. The @@ -525,21 +583,25 @@ defmodule Path do files and zero or more directories and subdirectories * `[char1,char2,...]` - matches any of the characters listed; two - characters separated by a hyphen will match a range of characters + characters separated by a hyphen will match a range of characters. + Do not add spaces before and after the comma as it would then match + paths containing the space character itself. * `{item1,item2,...}` - matches one of the alternatives + Do not add spaces before and after the comma as it would then match + paths containing the space character itself. Other characters represent themselves. Only paths that have exactly the same character in the same position will match. Note - that matching is case-sensitive; i.e. "a" will not match "A". + that matching is case-sensitive: `"a"` will not match `"A"`. By default, the patterns `*` and `?` do not match files starting - with a dot `.` unless `match_dot: true` is given. + with a dot `.` unless `match_dot: true` is given in `opts`. ## Examples Imagine you have a directory called `projects` with three Elixir projects - inside of it: `elixir`, `ex_doc` and `dynamo`. You can find all `.beam` files + inside of it: `elixir`, `ex_doc`, and `plug`. You can find all `.beam` files inside the `ebin` directory of each project as follows: Path.wildcard("projects/*/ebin/**/*.beam") @@ -549,7 +611,7 @@ defmodule Path do Path.wildcard("projects/*/ebin/**/*.{beam,app}") """ - @spec wildcard(t) :: [binary] + @spec wildcard(t, Keyword.t) :: [binary] def wildcard(glob, opts \\ []) do mod = if Keyword.get(opts, :match_dot), do: :file, else: Path.Wildcard glob @@ -558,7 +620,7 @@ defmodule Path do |> Enum.map(&IO.chardata_to_string/1) end - # Normalize the given path by expanding "..", "." and "~". + # expand_dot the given path by expanding "..", "." and "~". defp chardata_to_list(chardata) do case :unicode.characters_to_list(chardata) do @@ -575,34 +637,47 @@ defmodule Path do defp expand_home(type) do case IO.chardata_to_string(type) do - "~" <> rest -> System.user_home! <> rest + "~" <> rest -> resolve_home(rest) rest -> rest end end - defp normalize(path), do: normalize(split(path), []) - - defp normalize([".."|t], ["/"|_] = acc) do - normalize t, acc - end - - defp normalize([".."|t], [<>|_] = acc) when letter in ?a..?z do - normalize t, acc - end + defp resolve_home(""), do: System.user_home! - defp normalize([".."|t], [_|acc]) do - normalize t, acc - end - - defp normalize(["."|t], acc) do - normalize t, acc + defp resolve_home(rest) do + case {rest, major_os_type()} do + {"\\" <> _, :win32} -> + System.user_home! <> rest + {"/" <> _, _} -> + System.user_home! <> rest + _ -> rest + end end - defp normalize([h|t], acc) do - normalize t, [h|acc] - end + defp expand_dot(<<"/", rest::binary>>), + do: "/" <> do_expand_dot(rest) + defp expand_dot(<>) when letter in ?a..?z, + do: <> <> do_expand_dot(rest) + defp expand_dot(path), + do: do_expand_dot(path) + + defp do_expand_dot(path), + do: do_expand_dot(:binary.split(path, "/", [:global]), []) + + defp do_expand_dot([".." | t], [_, _ | acc]), + do: do_expand_dot(t, acc) + defp do_expand_dot([".." | t], []), + do: do_expand_dot(t, []) + defp do_expand_dot(["." | t], acc), + do: do_expand_dot(t, acc) + defp do_expand_dot([h | t], acc), + do: do_expand_dot(t, ["/", h | acc]) + defp do_expand_dot([], []), + do: "" + defp do_expand_dot([], ["/" | acc]), + do: IO.iodata_to_binary(:lists.reverse(acc)) - defp normalize([], acc) do - join :lists.reverse(acc) + defp major_os_type do + :os.type |> elem(0) end end diff --git a/lib/elixir/lib/port.ex b/lib/elixir/lib/port.ex index e7f465a26bb..69b7d6c5085 100644 --- a/lib/elixir/lib/port.ex +++ b/lib/elixir/lib/port.ex @@ -1,68 +1,270 @@ defmodule Port do - @moduledoc """ - Functions related to Erlang ports. + @moduledoc ~S""" + Functions for interacting with the external world through ports. + + Ports provide a mechanism to start operating system processes external + to the Erlang VM and communicate with them via message passing. + + ## Example + + iex> port = Port.open({:spawn, "cat"}, [:binary]) + iex> send port, {self(), {:command, "hello"}} + iex> send port, {self(), {:command, "world"}} + iex> flush() + {#Port<0.1444>, {:data, "hello"}} + {#Port<0.1444>, {:data, "world"}} + iex> send port, {self(), :close} + :ok + iex> flush() + {#Port<0.1464>, :closed} + :ok + + In the example above, we have created a new port that executes the + program `cat`. `cat` is a program available on UNIX systems that + receives data from multiple inputs and concatenates them in the output. + + After the port was created, we sent it two commands in the form of + messages using `Kernel.send/2`. The first command has the binary payload + of "hello" and the second has "world". + + After sending those two messages, we invoked the IEx helper `flush()`, + which printed all messages received from the port, in this case we got + "hello" and "world" back. Notice the messages are in binary because we + passed the `:binary` option when opening the port in `Port.open/2`. Without + such option, it would have yielded a list of bytes. + + Once everything was done, we closed the port. + + Elixir provides many conveniences for working with ports and some drawbacks. + We will explore those below. + + ## Message and function APIs + + There are two APIs for working with ports. It can be either asynchronous via + message passing, as in the example above, or by calling the functions on this + module. + + The messages supported by ports and their counterpart function APIs are + listed below: + + * `{pid, {:command, binary}}` - sends the given data to the port. + See `command/3`. + + * `{pid, :close}` - closes the port. Unless the port is already closed, + the port will reply with `{port, :closed}` message once it has flushed + its buffers and effectively closed. See `close/1`. + + * `{pid, {:connect, new_pid}}` - sets the `new_pid` as the new owner of + the port. Once a port is opened, the port is linked and connected to the + caller process and communication to the port only happens through the + connected process. This message makes `new_pid` the new connected processes. + Unless the port is dead, the port will reply to the old owner with + `{port, :connected}`. See `connect/2`. + + On its turn, the port will send the connected process the following messages: + + * `{port, {:data, data}}` - data sent by the port + * `{port, :closed}` - reply to the `{pid, :close}` message + * `{port, :connected}` - reply to the `{pid, {:connect, new_pid}}` message + * `{:EXIT, port, reason}` - exit signals in case the port crashes. If reason + is not `:normal`, this message will only be received if the owner process + is trapping exits + + ## Open mechanisms + + The port can be opened through four main mechanisms. + + As a short summary, prefer to using the `:spawn` and `:spawn_executable` + options mentioned below. The other two options, `:spawn_driver` and `:fd` + are for advanced usage within the VM. Also consider using `System.cmd/3` + if all you want is to execute a program and retrieve its return value. + + ### spawn + + The `:spawn` tuple receives a binary that is going to be executed as a + full invocation. For example, we can use it to invoke "echo hello" directly: + + iex> port = Port.open({:spawn, "echo oops"}, [:binary]) + iex> flush() + {#Port<0.1444>, {:data, "oops\n"}} + + `:spawn` will retrieve the program name from the argument and traverse your + OS `$PATH` environment variable looking for a matching program. + + Although the above is handy, it means it is impossible to invoke an executable + that has whitespaces on its name or in any of its arguments. For those reasons, + most times it is preferrable to execute `:spawn_executable`. + + ### spawn_executable + + Spawn executable is a more restricted and explicit version of spawn. It expects + full file paths to the executable you want to execute. If they are in your `$PATH`, + they can be retrieved by calling `System.find_executable/1`: + + iex> path = System.find_executable("echo") + iex> port = Port.open({:spawn_executable, path}, [:binary, args: ["hello world"]]) + iex> flush() + {#Port<0.1380>, {:data, "hello world\n"}} + + When using `:spawn_executable`, the list of arguments can be passed via + the `:args` option as done above. For the full list of options, see the + documentation for the Erlang function `:erlang.open_port/2`. + + ### spawn_driver + + Spawn driver is used to start Port Drivers, which are programs written in + C that implements a specific communication protocols and are dynamically + linked to the Erlang VM. Port drivers are an advanced topic and one of the + mechanisms for integrating C code, alongside NIFs. For more information, + [please check the Erlang docs](http://erlang.org/doc/reference_manual/ports.html). + + ### fd + + The `:fd` name option allows developers to access `in` and `out` file + descriptors used by the Erlang VM. You would use those only if you are + reimplementing core part of the Runtime System, such as the `:user` and + `:shell` processes. + + ## Zombie processes + + A port can be closed via the `close/1` function or by sending a `{pid, :close}` + message. However, if the VM crashes, a long-running program started by the port + will have its stdin and stdout channels closed but **it won't be automatically + terminated**. + + While most UNIX command line tools will exit once its communication channels + are closed, not all command line applications will do so. While we encourage + graceful termination by detecting if stdin/stdout has been closed, we do not + always have control over how 3rd party software terminates. In those cases, + you can wrap the application in a script that checks for stdin. Here is such + script in bash: + + #!/bin/sh + "$@" + pid=$! + while read line ; do + : + done + kill -KILL $pid + + + Now instead of: + + Port.open({:spawn_executable, "/path/to/program"}, + [args: ["a", "b", "c"]]) + + You may invoke: + + Port.open({:spawn_executable, "/path/to/wrapper"}, + [args: ["/path/to/program", "a", "b", "c"]]) + """ + @type name :: {:spawn, charlist | binary} | + {:spawn_driver, charlist | binary} | + {:spawn_executable, charlist | atom} | + {:fd, non_neg_integer, non_neg_integer} + @doc """ - See http://www.erlang.org/doc/man/erlang.html#open_port-2. + Opens a port given a tuple `name` and a list of `options`. + + The module documentation above contains documentation and examples + for the supported `name` values, summarized below: + + * `{:spawn, command}` - runs an external program. `command` must contain + the program name and optionally a list of arguments separated by space. + If passing programs or arguments with space in their name, use the next option. + * `{:spawn_executable, filename}` - runs the executable given by the absolute + file name `filename`. Arguments can be passed via the `:args` option. + * `{:spawn_driver, command}` - spawns so-called port drivers. + * `{:fd, fd_in, fd_out}` - accesses file descriptors, `fd_in` and `fd_out` + opened by the VM. + + For more information and the list of options, see + [`:erlang.open_port/2`](http://www.erlang.org/doc/man/erlang.html#open_port-2). + + Inlined by the compiler. """ + @spec open(name, list) :: port def open(name, settings) do :erlang.open_port(name, settings) end @doc """ - See http://www.erlang.org/doc/man/erlang.html#port_close-1. + Closes the `port`. + + For more information, see [`:erlang.port_close/1`](http://www.erlang.org/doc/man/erlang.html#port_close-1). + + Inlined by the compiler. """ + @spec close(port) :: true def close(port) do :erlang.port_close(port) end @doc """ - See http://www.erlang.org/doc/man/erlang.html#port_command-2. + Sends `data` to the port driver `port`. + + For more information, see [`:erlang.port_command/2`](http://www.erlang.org/doc/man/erlang.html#port_command-2). + + Inlined by the compiler. """ + @spec command(port, iodata, [:force | :nosuspend]) :: boolean def command(port, data, options \\ []) do :erlang.port_command(port, data, options) end @doc """ - See http://www.erlang.org/doc/man/erlang.html#port_connect-2. + Associates the `port` identifier with a `pid`. + + For more information, see [`:erlang.port_connect/2`](http://www.erlang.org/doc/man/erlang.html#port_connect-2). + + Inlined by the compiler. """ + @spec connect(port, pid) :: true def connect(port, pid) do :erlang.port_connect(port, pid) end @doc """ - See http://www.erlang.org/doc/man/erlang.html#port_control-3. - """ - def control(port, operation, data) do - :erlang.port_control(port, operation, data) - end + Returns information about the `port` or `nil` if the port is closed. - @doc """ - See http://www.erlang.org/doc/man/erlang.html#port_call-3. + For more information, see [`:erlang.port_info/1`](http://www.erlang.org/doc/man/erlang.html#port_info-1). """ - def call(port, operation, data) do - :erlang.port_call(port, operation, data) + def info(port) do + nillify :erlang.port_info(port) end @doc """ - See http://www.erlang.org/doc/man/erlang.html#port_info-1. + Returns information about the `port` or `nil` if the port is closed. + + For more information, see [`:erlang.port_info/2`](http://www.erlang.org/doc/man/erlang.html#port_info-2). """ - def info(port) do - :erlang.port_info(port) + @spec info(port, atom) :: {atom, term} | nil + def info(port, spec) + + def info(port, :registered_name) do + case :erlang.port_info(port, :registered_name) do + [] -> {:registered_name, []} + other -> nillify(other) + end end - @doc """ - See http://www.erlang.org/doc/man/erlang.html#port_info-2. - """ def info(port, item) do - :erlang.port_info(port, item) + nillify :erlang.port_info(port, item) end @doc """ - See http://www.erlang.org/doc/man/erlang.html#ports-0. + Returns a list of all ports in the current node. + + Inlined by the compiler. """ + @spec list :: [port] def list do :erlang.ports end -end \ No newline at end of file + + @compile {:inline, nillify: 1} + defp nillify(:undefined), do: nil + defp nillify(other), do: other +end diff --git a/lib/elixir/lib/process.ex b/lib/elixir/lib/process.ex index ce73ca7bb16..3135b4b841f 100644 --- a/lib/elixir/lib/process.ex +++ b/lib/elixir/lib/process.ex @@ -4,7 +4,7 @@ defmodule Process do Besides the functions available in this module, the `Kernel` module exposes and auto-imports some basic functionality related to processes - available through the functions: + available through the following functions: * `Kernel.spawn/1` and `Kernel.spawn/3` * `Kernel.spawn_link/1` and `Kernel.spawn_link/3` @@ -15,28 +15,31 @@ defmodule Process do """ @doc """ - Returns true if the process exists and is alive, that is, - is not exiting and has not exited. Otherwise, returns false. + Tells whether the given process is alive. - `pid` must refer to a process at the local node. + If the process identified by `pid` is alive (that is, it's not exiting and has + not exited yet) than this function returns `true`. Otherwise, it returns + `false`. + + `pid` must refer to a process running on the local node. + + Inlined by the compiler. """ @spec alive?(pid) :: boolean - def alive?(pid) do - :erlang.is_process_alive(pid) - end + defdelegate alive?(pid), to: :erlang, as: :is_process_alive @doc """ - Returns all key-values in the dictionary. + Returns all key-value pairs in the process dictionary. + + Inlined by the compiler. """ - @spec get :: [{term, term}] - def get do - :erlang.get() - end + @spec get() :: [{term, term}] + defdelegate get(), to: :erlang @doc """ - Returns the value for the given key. + Returns the value for the given `key` in the process dictionary, + or `default` if `key` is not set. """ - @spec get(term) :: term @spec get(term, default :: term) :: term def get(key, default \\ nil) do case :erlang.get(key) do @@ -48,15 +51,35 @@ defmodule Process do end @doc """ - Returns all keys that have the given `value`. + Returns all keys in the process dictionary. + + Inlined by the compiler. + """ + @spec get_keys() :: [term] + defdelegate get_keys(), to: :erlang + + @doc """ + Returns all keys in the process dictionary that have the given `value`. + + Inlined by the compiler. """ @spec get_keys(term) :: [term] - def get_keys(value) do - :erlang.get_keys(value) - end + defdelegate get_keys(value), to: :erlang @doc """ - Stores the given key-value in the process dictionary. + Stores the given `key`-`value` pair in the process dictionary. + + The return value of this function is the value that was previously stored + under `key`, or `nil` in case no value was stored under `key`. + + ## Examples + + # Assuming :locale was not set + Process.put(:locale, "en") + #=> nil + Process.put(:locale, "fr") + #=> "en" + """ @spec put(term, term) :: term | nil def put(key, value) do @@ -64,7 +87,7 @@ defmodule Process do end @doc """ - Deletes the given key from the dictionary. + Deletes the given `key` from the process dictionary. """ @spec delete(term) :: term | nil def delete(key) do @@ -72,84 +95,233 @@ defmodule Process do end @doc """ - Sends an exit signal with the given reason to the pid. + Sends an exit signal with the given `reason` to `pid`. - The following behaviour applies if reason is any term except `:normal` or `:kill`: + The following behaviour applies if `reason` is any term except `:normal` + or `:kill`: - 1. If pid is not trapping exits, pid will exit with the given reason. + 1. If `pid` is not trapping exits, `pid` will exit with the given + `reason`. - 2. If pid is trapping exits, the exit signal is transformed into a message - {:EXIT, from, reason} and delivered to the message queue of pid. + 2. If `pid` is trapping exits, the exit signal is transformed into a + message `{:EXIT, from, reason}` and delivered to the message queue + of `pid`. - 3. If reason is the atom `:normal`, pid will not exit. If it is trapping - exits, the exit signal is transformed into a message {:EXIT, from, - :normal} and delivered to its message queue. + If `reason` is the atom `:normal`, `pid` will not exit (unless `pid` is + the calling process, in which case it will exit with the reason `:normal`). + If it is trapping exits, the exit signal is transformed into a message + `{:EXIT, from, :normal}` and delivered to its message queue. - 4. If reason is the atom `:kill`, that is if `exit(pid, :kill)` is called, - an untrappable exit signal is sent to pid which will unconditionally - exit with exit reason `:killed`. + If `reason` is the atom `:kill`, that is if `Process.exit(pid, :kill)` is called, + an untrappable exit signal is sent to `pid` which will unconditionally exit + with reason `:killed`. Inlined by the compiler. ## Examples Process.exit(pid, :kill) + #=> true """ @spec exit(pid, term) :: true - def exit(pid, reason) do - :erlang.exit(pid, reason) + defdelegate exit(pid, reason), to: :erlang + + @doc """ + Sleeps the current process for the given `timeout`. + + `timeout` is either the number of milliseconds to sleep as an + integer or the atom `:infinity`. When `:infinity` is given, + the current process will sleep forever, and not + consume or reply to messages. + + **Use this function with extreme care**. For almost all situations + where you would use `sleep/1` in Elixir, there is likely a + more correct, faster and precise way of achieving the same with + message passing. + + For example, if you are waiting a process to perform some + action, it is better to communicate the progress of such action + with messages. + + In other words, **do not**: + + Task.start_link fn -> + do_something() + ... + end + + # Wait until work is done + Process.sleep(2000) + + But **do**: + + parent = self() + Task.start_link fn -> + do_something() + send parent, :work_is_done + ... + end + + receive do + :work_is_done -> :ok + after + 30_000 -> :timeout # Optional timeout + end + + For cases like the one above, `Task.async/1` and `Task.await/2` are + preferred. + + Similarly, if you are waiting for a process to terminate, + monitor that process instead of sleeping. **Do not**: + + Task.start_link fn -> + ... + end + + # Wait until task terminates + Process.sleep(2000) + + Instead **do**: + + {:ok, pid} = + Task.start_link fn -> + ... + end + + ref = Process.monitor(pid) + receive do + {:DOWN, ^ref, _, _, _} -> :task_is_down + after + 30_000 -> :timeout # Optional timeout + end + + """ + @spec sleep(timeout) :: :ok + def sleep(timeout) + when is_integer(timeout) and timeout >= 0 + when timeout == :infinity do + receive after: (timeout -> :ok) end @doc """ Sends a message to the given process. - If the option `:noconnect` is used and sending the message would require an - auto-connection to another node the message is not sent and `:noconnect` is - returned. + ## Options + + * `:noconnect` - when used, if sending the message would require an + auto-connection to another node the message is not sent and `:noconnect` is + returned. - If the option `:nosuspend` is used and sending the message would cause the - sender to be suspended the message is not sent and `:nosuspend` is returned. + * `:nosuspend` - when used, if sending the message would cause the sender to + be suspended the message is not sent and `:nosuspend` is returned. Otherwise the message is sent and `:ok` is returned. ## Examples - iex> Process.send({:name, :node_does_not_exist}, :hi, [:noconnect]) + iex> Process.send({:name, :node_that_does_not_exist}, :hi, [:noconnect]) :noconnect + Inlined by the compiler. """ - @spec send(dest, msg, [option]) :: result when - dest: pid | port | atom | {atom, node}, - msg: any, - option: :noconnect | :nosuspend, - result: :ok | :noconnect | :nosuspend - def send(dest, msg, options) do - :erlang.send(dest, msg, options) - end + @spec send(dest, msg, [option]) :: :ok | :noconnect | :nosuspend + when dest: pid | port | atom | {atom, node}, + msg: any, + option: :noconnect | :nosuspend + defdelegate send(dest, msg, options), to: :erlang @doc """ - Sends `msg` to `dest` after `time` millisecons. + Sends `msg` to `dest` after `time` milliseconds. - If `dest` is a pid, it has to be a pid of a local process, dead or alive. - If `dest` is an atom, it is supposed to be the name of a registered process - which is looked up at the time of delivery. No error is given if the name does + If `dest` is a PID, it must be the PID of a local process, dead or alive. + If `dest` is an atom, it must be the name of a registered process + which is looked up at the time of delivery. No error is produced if the name does not refer to a process. - This function returns a timer reference, which can be read or canceled with - `:erlang.read_timer/1`, `:erlang.start_timer/3` and `:erlang.cancel_timer/1`. - Note `time` cannot be greater than `4294967295`. + This function returns a timer reference, which can be read with `read_timer/1` + or canceled with `cancel_timer/1`. - Finally, the timer will be automatically canceled if the given `dest` is a pid - which is not alive or when the given pid exits. Note that timers will not be + The timer will be automatically canceled if the given `dest` is a PID + which is not alive or when the given PID exits. Note that timers will not be automatically canceled when `dest` is an atom (as the atom resolution is done on delivery). + + Inlined by the compiler. + + ## Options + + * `:abs` - (boolean) when `false`, `time` is treated as relative to the + current monotonic time. When `true`, `time` is the absolute value of the + Erlang monotonic time at which `msg` should be delivered to `dest`. + To read more about Erlang monotonic time and other time-related concepts, + look at the documentation for the `System` module. Defaults to `false`. + + ## Examples + + timer_ref = Process.send_after(pid, :hi, 1000) + """ - @spec send_after(pid | atom, term, non_neg_integer) :: reference - def send_after(dest, msg, time) do - :erlang.send_after(time, dest, msg) + @spec send_after(pid | atom, term, non_neg_integer, [option]) :: reference + when option: {:abs, boolean} + def send_after(dest, msg, time, opts \\ []) do + :erlang.send_after(time, dest, msg, opts) end + @doc """ + Cancels a timer returned by `send_after/3`. + + When the result is an integer, it represents the time in milliseconds + left until the timer would have expired. + + When the result is `false`, a timer corresponding to `timer_ref` could not be + found. This can happen either because the timer expired, because it has + already been canceled, or because `timer_ref` never corresponded to a timer. + + Even if the timer had expired and the message was sent, this function does not + tell you if the timeout message has arrived at its destination yet. + + ## Options + + * `:async` - (boolean) when `false`, the request for cancellation is + synchronous. When `true`, the request for cancellation is asynchronous, + meaning that the request to cancel the timer is issued and `:ok` is + returned right away. Defaults to `false`. + + * `:info` - (boolean) whether to return information about the timer being + cancelled. When the `:async` option is `false` and `:info` is `true`, then + either an integer or `false` (like described above) is returned. If + `:async` is `false` and `:info` is `false`, `:ok` is returned. If `:async` + is `true` and `:info` is `true`, a message in the form `{:cancel_timer, + timer_ref, result}` (where `result` is an integer or `false` like + described above) is sent to the caller of this function when the + cancellation has been performed. If `:async` is `true` and `:info` is + `false`, no message is sent. Defaults to `true`. + + Inlined by the compiler. + """ + @spec cancel_timer(reference, options) :: non_neg_integer | false | :ok + when options: [async: boolean, info: boolean] + defdelegate cancel_timer(timer_ref, options \\ []), to: :erlang + + @doc """ + Reads a timer created by `send_after/3`. + + When the result is an integer, it represents the time in milliseconds + left until the timer will expire. + + When the result is `false`, a timer corresponding to `timer_ref` could not be + found. This can be either because the timer expired, because it has already + been canceled, or because `timer_ref` never corresponded to a timer. + + Even if the timer had expired and the message was sent, this function does not + tell you if the timeout message has arrived at its destination yet. + + Inlined by the compiler. + """ + @spec read_timer(reference) :: non_neg_integer | false + defdelegate read_timer(timer_ref), to: :erlang + @type spawn_opt :: :link | :monitor | {:priority, :low | :normal | :high} | {:fullsweep_after, non_neg_integer} | {:min_heap_size, non_neg_integer} | @@ -157,48 +329,56 @@ defmodule Process do @type spawn_opts :: [spawn_opt] @doc """ - Spawns the given module and function passing the given args - according to the given options. + Spawns the given function according to the given options. The result depends on the given options. In particular, if `:monitor` is given as an option, it will return a tuple - containing the pid and the monitoring reference, otherwise - just the spawned process pid. + containing the PID and the monitoring reference, otherwise + just the spawned process PID. - It also accepts extra options, for the list of available options - check http://www.erlang.org/doc/man/erlang.html#spawn_opt-4 + More options are available; for the comprehensive list of available options + check [`:erlang.spawn_opt/4`](http://www.erlang.org/doc/man/erlang.html#spawn_opt-4). Inlined by the compiler. """ @spec spawn((() -> any), spawn_opts) :: pid | {pid, reference} - def spawn(fun, opts) do - :erlang.spawn_opt(fun, opts) - end + defdelegate spawn(fun, opts), to: :erlang, as: :spawn_opt @doc """ - Spawns the given module and function passing the given args + Spawns the given function `fun` from module `mod`, passing the given `args` according to the given options. The result depends on the given options. In particular, if `:monitor` is given as an option, it will return a tuple - containing the pid and the monitoring reference, otherwise - just the spawned process pid. + containing the PID and the monitoring reference, otherwise + just the spawned process PID. It also accepts extra options, for the list of available options - check http://www.erlang.org/doc/man/erlang.html#spawn_opt-4 + check [`:erlang.spawn_opt/4`](http://www.erlang.org/doc/man/erlang.html#spawn_opt-4). Inlined by the compiler. """ @spec spawn(module, atom, list, spawn_opts) :: pid | {pid, reference} - def spawn(mod, fun, args, opts) do - :erlang.spawn_opt(mod, fun, args, opts) - end + defdelegate spawn(mod, fun, args, opts), to: :erlang, as: :spawn_opt @doc """ - The calling process starts monitoring the item given. - It returns the monitor reference. + Starts monitoring the given `item` from the calling process. - See http://www.erlang.org/doc/man/erlang.html#monitor-2 for more info. + Once the monitored process dies, a message is delivered to the + monitoring process in the shape of: + + {:DOWN, ref, :process, object, reason} + + where: + + * `ref` is a monitor reference returned by this function; + * `object` is either a `pid` of the monitored process (if monitoring + a PID) or `{name, node}` (if monitoring a remote or local name); + * `reason` is the exit reason. + + See [the need for monitoring](http://elixir-lang.org/getting-started/mix-otp/genserver.html#the-need-for-monitoring) + for an example. + See [`:erlang.monitor/2`](http://www.erlang.org/doc/man/erlang.html#monitor-2) for more info. Inlined by the compiler. """ @@ -208,91 +388,131 @@ defmodule Process do end @doc """ - If monitor_ref is a reference which the calling process - obtained by calling monitor/1, this monitoring is turned off. + Demonitors the monitor identifies by the given `reference`. + + If `monitor_ref` is a reference which the calling process + obtained by calling `monitor/1`, that monitoring is turned off. If the monitoring is already turned off, nothing happens. - See http://www.erlang.org/doc/man/erlang.html#demonitor-2 for more info. + See [`:erlang.demonitor/2`](http://www.erlang.org/doc/man/erlang.html#demonitor-2) for more info. Inlined by the compiler. """ - @spec demonitor(reference) :: true @spec demonitor(reference, options :: [:flush | :info]) :: boolean - def demonitor(monitor_ref, options \\ []) do - :erlang.demonitor(monitor_ref, options) - end + defdelegate demonitor(monitor_ref, options \\ []), to: :erlang @doc """ - Returns a list of process identifiers corresponding to all the + Returns a list of PIDs corresponding to all the processes currently existing on the local node. - Note that a process that is exiting, exists but is not alive, i.e., - alive?/1 will return false for a process that is exiting, - but its process identifier will be part of the result returned. + Note that if a process is exiting, it is considered to exist but not be + alive. This means that for such process, `alive?/1` will return `false` but + its PID will be part of the list of PIDs returned by this function. - See http://www.erlang.org/doc/man/erlang.html#processes-0 for more info. + See [`:erlang.processes/0`](http://www.erlang.org/doc/man/erlang.html#processes-0) for more info. + + Inlined by the compiler. """ - @spec list :: [pid] - def list do - :erlang.processes() - end + @spec list() :: [pid] + defdelegate list(), to: :erlang, as: :processes @doc """ - Creates a link between the calling process and another process - (or port) `pid`, if there is not such a link already. + Creates a link between the calling process and the given item (process or + port). + + Links are bidirectional. Linked processes can be unlinked by using `unlink/1`. + + If such a link exists already, this function does nothing since there can only + be one link between two given processes. If a process tries to create a link + to itself, nothing will happen. + + When two processes are linked, each one receives exit signals from the other + (see also `exit/2`). Let's assume `pid1` and `pid2` are linked. If `pid2` + exits with a reason other than `:normal` (which is also the exit reason used + when a process finishes its job) and `pid1` is not trapping exits (see + `flag/2`), then `pid1` will exit with the same reason as `pid2` and in turn + emit an exit signal to all its other linked processes. The behaviour when + `pid1` is trapping exits is described in `exit/2`. - See http://www.erlang.org/doc/man/erlang.html#link-1 for more info. + See [`:erlang.link/1`](http://www.erlang.org/doc/man/erlang.html#link-1) for more info. Inlined by the compiler. """ @spec link(pid | port) :: true - def link(pid) do - :erlang.link(pid) - end + defdelegate link(pid_or_port), to: :erlang @doc """ - Removes the link, if there is one, between the calling process and - the process or port referred to by `pid`. Returns true and does not - fail, even if there is no link or `id` does not exist - See http://www.erlang.org/doc/man/erlang.html#unlink-1 for more info. + Removes the link between the calling process and the given item (process or + port). + + If there is no such link, this function does nothing. If `pid_or_port` does + not exist, this function does not produce any errors and simply does nothing. + + The return value of this function is always `true`. + + See [`:erlang.unlink/1`](http://www.erlang.org/doc/man/erlang.html#unlink-1) for more info. Inlined by the compiler. """ @spec unlink(pid | port) :: true - def unlink(pid) do - :erlang.unlink(pid) - end + defdelegate unlink(pid_or_port), to: :erlang @doc """ - Associates the name with a pid or a port identifier. name, which must - be an atom, can be used instead of the pid / port identifier with the - `Kernel.send/2` function. + Registers the given `pid_or_port` under the given `name`. + + `name` must be an atom and can then be used instead of the + PID/port identifier when sending messages with `Kernel.send/2`. + + `register/2` will fail with `ArgumentError` in any of the following cases: + + * the PID/Port is not existing locally and alive + * the name is already registered + * the `pid_or_port` is already registered under a different `name` + + The following names are reserved and cannot be assigned to + processes nor ports: + + * `nil` + * `false` + * `true` + * `:undefined` - `Process.register/2` will fail with `ArgumentError` if the pid supplied - is no longer alive, (check with `alive?/1`) or if the name is - already registered (check with `registered?/1`). """ @spec register(pid | port, atom) :: true - def register(pid, name) when not name in [nil, false, true] do - :erlang.register(name, pid) + def register(pid_or_port, name) when is_atom(name) and name not in [nil, false, true, :undefined] do + :erlang.register(name, pid_or_port) + catch + :error, :badarg when node(pid_or_port) != node() -> + message = "could not register the #{pid_or_port pid_or_port} because it belongs to another node" + :erlang.error ArgumentError.exception(message), [pid_or_port, name] + :error, :badarg -> + message = "could not register the #{pid_or_port pid_or_port} with " <> + "name #{inspect name}. Or it is not alive, or the name is already " <> + "taken, or it has already been given another name" + :erlang.error ArgumentError.exception(message), [pid_or_port, name] end + defp pid_or_port(pid) when is_pid(pid), do: "pid #{inspect pid}" + defp pid_or_port(port) when is_port(port), do: "port #{inspect port}" + @doc """ - Removes the registered name, associated with a pid or a port identifier. + Removes the registered `name`, associated with a PID + or a port identifier. + + Fails with `ArgumentError` if the name is not registered + to any PID or port. - See http://www.erlang.org/doc/man/erlang.html#unregister-1 for more info. + Inlined by the compiler. """ @spec unregister(atom) :: true - def unregister(name) do - :erlang.unregister(name) - end + defdelegate unregister(name), to: :erlang @doc """ - Returns the pid or port identifier with the registered name. - Returns nil if the name is not registered. + Returns the PID or port identifier registered under `name` or `nil` if the + name is not registered. - See http://www.erlang.org/doc/man/erlang.html#whereis-1 for more info. + See [`:erlang.whereis/1`](http://www.erlang.org/doc/man/erlang.html#whereis-1) for more info. """ @spec whereis(atom) :: pid | port | nil def whereis(name) do @@ -300,16 +520,20 @@ defmodule Process do end @doc """ - Returns the pid of the group leader for the process which evaluates the function. + Returns the PID of the group leader for the calling process. + + Inlined by the compiler. """ - @spec group_leader :: pid - def group_leader do - :erlang.group_leader - end + @spec group_leader() :: pid + defdelegate group_leader(), to: :erlang @doc """ - Sets the group leader of `pid` to `leader`. Typically, this is used when a processes - started from a certain shell should have another group leader than `:init`. + Sets the group leader of the given `pid` to `leader`. + + Typically, this is used when a process started from a certain shell should + have a group leader other than `:init`. + + Inlined by the compiler. """ @spec group_leader(pid, leader :: pid) :: true def group_leader(pid, leader) do @@ -317,45 +541,66 @@ defmodule Process do end @doc """ - Returns a list of names which have been registered using register/2. + Returns a list of names which have been registered using `register/2`. + + Inlined by the compiler. """ - @spec registered :: [atom] - def registered do - :erlang.registered() - end + @spec registered() :: [atom] + defdelegate registered(), to: :erlang + + @typep heap_size :: non_neg_integer | + %{size: non_neg_integer, kill: boolean, error_logger: boolean} + + @typep priority_level :: :low | :normal | :high | :max - @typep process_flag :: :trap_exit | :error_handler | :min_heap_size | - :min_bin_vheap_size | :priority | :save_calls | - :sensitive @doc """ - Sets certain flags for the process which calls this function. - Returns the old value of the flag. + Sets the given `flag` to `value` for the calling process. + + Returns the old value of `flag`. + + See [`:erlang.process_flag/2`](http://www.erlang.org/doc/man/erlang.html#process_flag-2) for more info. + + Note that `flag` values `:max_heap_size` and `:message_queue_data` are only available since OTP 19. - See http://www.erlang.org/doc/man/erlang.html#process_flag-2 for more info. + Inlined by the compiler. """ - @spec flag(process_flag, term) :: term - def flag(flag, value) do - :erlang.process_flag(flag, value) - end + @spec flag(:error_handler, module) :: module + @spec flag(:max_heap_size, heap_size) :: heap_size + @spec flag(:message_queue_data, :erlang.message_queue_data) :: :erlang.message_queue_data + @spec flag(:min_bin_vheap_size, non_neg_integer) :: non_neg_integer + @spec flag(:min_heap_size, non_neg_integer) :: non_neg_integer + @spec flag(:monitor_nodes, term) :: term + @spec flag({:monitor_nodes, term()}, term) :: term + @spec flag(:priority, priority_level) :: priority_level + @spec flag(:save_calls, 0..10_000) :: 0..10_000 + @spec flag(:sensitive, boolean) :: boolean + @spec flag(:trap_exit, boolean) :: boolean + defdelegate flag(flag, value), to: :erlang, as: :process_flag @doc """ - Sets certain flags for the process Pid, in the same manner as flag/2. - Returns the old value of the flag. The allowed values for Flag are - only a subset of those allowed in flag/2, namely: save_calls. + Sets the given `flag` to `value` for the given process `pid`. + + Returns the old value of `flag`. + + It raises `ArgumentError` if `pid` is not a local process. - See http://www.erlang.org/doc/man/erlang.html#process_flag-3 for more info. + The allowed values for `flag` are only a subset of those allowed in `flag/2`, + namely `:save_calls`. + + See [`:erlang.process_flag/3`](http://www.erlang.org/doc/man/erlang.html#process_flag-3) for more info. + + Inlined by the compiler. """ - @spec flag(pid, process_flag, term) :: term - def flag(pid, flag, value) do - :erlang.process_flag(pid, flag, value) - end + @spec flag(pid, :save_calls, 0..10_000) :: 0..10_000 + defdelegate flag(pid, flag, value), to: :erlang, as: :process_flag @doc """ - Returns information about the process identified by pid or nil if the process + Returns information about the process identified by `pid`, or returns `nil` if the process is not alive. + Use this only for debugging information. - See http://www.erlang.org/doc/man/erlang.html#process_info-1 for more info. + See [`:erlang.process_info/1`](http://www.erlang.org/doc/man/erlang.html#process_info-1) for more info. """ @spec info(pid) :: Keyword.t def info(pid) do @@ -363,12 +608,12 @@ defmodule Process do end @doc """ - Returns information about the process identified by pid - or nil if the process is not alive. + Returns information about the process identified by `pid`, + or returns `nil` if the process is not alive. - See http://www.erlang.org/doc/man/erlang.html#process_info-2 for more info. + See [`:erlang.process_info/2`](http://www.erlang.org/doc/man/erlang.html#process_info-2) for more info. """ - @spec info(pid, atom) :: {atom, term} | nil + @spec info(pid, atom | [atom]) :: {atom, term} | [{atom, term}] | nil def info(pid, spec) def info(pid, :registered_name) do @@ -379,10 +624,25 @@ defmodule Process do end end - def info(pid, spec) when is_atom(spec) do + def info(pid, spec) when is_atom(spec) or is_list(spec) do nillify :erlang.process_info(pid, spec) end + @doc """ + Puts the calling process into a "hibernation" state. + + The calling process is put into a waiting state + where its memory allocation has been reduced as much as possible, + which is useful if the process does not expect to receive any messages + in the near future. + + See [`:erlang.hibernate/3`](http://www.erlang.org/doc/man/erlang.html#hibernate-3) for more info. + + Inlined by the compiler. + """ + @spec hibernate(module, atom, list) :: no_return + defdelegate hibernate(mod, fun_name, args), to: :erlang + @compile {:inline, nillify: 1} defp nillify(:undefined), do: nil defp nillify(other), do: other diff --git a/lib/elixir/lib/protocol.ex b/lib/elixir/lib/protocol.ex index e29f31e1517..acb66311baf 100644 --- a/lib/elixir/lib/protocol.ex +++ b/lib/elixir/lib/protocol.ex @@ -19,18 +19,19 @@ defmodule Protocol do defmacro def({name, _, args}) when is_atom(name) and is_list(args) do arity = length(args) - type_args = for _ <- :lists.seq(2, arity), do: quote(do: term) + type_args = :lists.map(fn _ -> quote(do: term) end, + :lists.seq(2, arity)) type_args = [quote(do: t) | type_args] - call_args = for i <- :lists.seq(2, arity), - do: {String.to_atom(<>), [], __MODULE__} - call_args = [quote(do: t) | call_args] + call_args = :lists.map(fn pos -> Macro.var(String.to_atom("var" <> Integer.to_string(pos)), __MODULE__) end, + :lists.seq(2, arity)) + call_args = [quote(do: term) | call_args] quote do name = unquote(name) arity = unquote(arity) - @functions [{name, arity}|@functions] + @functions [{name, arity} | @functions] # Generate a fake definition with the user # signature that will be used by docs @@ -38,7 +39,7 @@ defmodule Protocol do # Generate the actual implementation Kernel.def unquote(name)(unquote_splicing(call_args)) do - impl_for!(t).unquote(name)(unquote_splicing(call_args)) + impl_for!(term).unquote(name)(unquote_splicing(call_args)) end # Convert the spec to callback if possible, @@ -49,13 +50,13 @@ defmodule Protocol do end defmacro def(_) do - raise ArgumentError, "invalid args for def inside defprotocol" + raise ArgumentError, "invalid arguments for def inside defprotocol" end @doc """ Checks if the given module is loaded and is protocol. - Returns `:ok` if so, otherwise raises ArgumentError. + Returns `:ok` if so, otherwise raises `ArgumentError`. """ @spec assert_protocol!(module) :: :ok | no_return def assert_protocol!(module) do @@ -69,7 +70,7 @@ defmodule Protocol do end try do - module.__protocol__(:name) + module.__protocol__(:module) rescue UndefinedFunctionError -> raise ArgumentError, "#{inspect module} is not a protocol" <> extra @@ -82,14 +83,16 @@ defmodule Protocol do Checks if the given module is loaded and is an implementation of the given protocol. - Returns `:ok` if so, otherwise raises ArgumentError. + Returns `:ok` if so, otherwise raises `ArgumentError`. """ @spec assert_impl!(module, module) :: :ok | no_return - def assert_impl!(protocol, impl) do - assert_impl!(protocol, impl, "") + def assert_impl!(protocol, base) do + assert_impl!(protocol, base, "") end - defp assert_impl!(protocol, impl, extra) do + defp assert_impl!(protocol, base, extra) do + impl = Module.concat(protocol, base) + case Code.ensure_compiled(impl) do {:module, ^impl} -> :ok _ -> raise ArgumentError, @@ -112,7 +115,7 @@ defmodule Protocol do end @doc """ - Derive the `protocol` for `module` with the given options. + Derives the `protocol` for `module` with the given options. """ defmacro derive(protocol, module, options \\ []) do quote do @@ -124,10 +127,10 @@ defmodule Protocol do ## Consolidation @doc """ - Extract all protocols from the given paths. + Extracts all protocols from the given paths. - The paths can be either a char list or a string. Internally - they are worked on as char lists, so passing them as lists + The paths can be either a charlist or a string. Internally + they are worked on as charlists, so passing them as lists avoid extra conversion. Does not load any of the protocols. @@ -141,23 +144,23 @@ defmodule Protocol do true """ - @spec extract_protocols([char_list | String.t]) :: [atom] + @spec extract_protocols([charlist | String.t]) :: [atom] def extract_protocols(paths) do extract_matching_by_attribute paths, 'Elixir.', fn module, attributes -> case attributes[:protocol] do - [fallback_to_any: _, consolidated: _] -> module + [fallback_to_any: _] -> module _ -> nil end end end @doc """ - Extract all types implemented for the given protocol from + Extracts all types implemented for the given protocol from the given paths. - The paths can be either a char list or a string. Internally - they are worked on as char lists, so passing them as lists + The paths can be either a charlist or a string. Internally + they are worked on as charlists, so passing them as lists avoid extra conversion. Does not load any of the implementations. @@ -171,12 +174,12 @@ defmodule Protocol do true """ - @spec extract_impls(module, [char_list | String.t]) :: [atom] + @spec extract_impls(module, [charlist | String.t]) :: [atom] def extract_impls(protocol, paths) when is_atom(protocol) do - prefix = Atom.to_char_list(protocol) ++ '.' + prefix = Atom.to_charlist(protocol) ++ '.' extract_matching_by_attribute paths, prefix, fn _mod, attributes -> - case attributes[:impl] do + case attributes[:protocol_impl] do [protocol: ^protocol, for: for] -> for _ -> nil end @@ -197,7 +200,7 @@ defmodule Protocol do end end - defp list_dir(path), do: list_dir(to_char_list(path)) + defp list_dir(path), do: list_dir(to_charlist(path)) defp extract_from_file(path, file, prefix, callback) do if :lists.prefix(prefix, file) and :filename.extension(file) == '.beam' do @@ -214,21 +217,12 @@ defmodule Protocol do end end - defmacrop if_ok(expr, call) do - quote do - case unquote(expr) do - {:ok, var} -> unquote(Macro.pipe(quote(do: var), call, 0)) - other -> other - end - end - end - @doc """ - Returns true if the protocol was consolidated. + Returns `true` if the protocol was consolidated. """ @spec consolidated?(module) :: boolean def consolidated?(protocol) do - protocol.__info__(:attributes)[:protocol][:consolidated] + protocol.__protocol__(:consolidated?) end @doc """ @@ -247,7 +241,7 @@ defmodule Protocol do Protocol.consolidated?(Enumerable) - If the first element of the tuple is true, it means + If the first element of the tuple is `true`, it means the protocol was consolidated. This function does not load the protocol at any point @@ -260,22 +254,20 @@ defmodule Protocol do {:error, :not_a_protocol} | {:error, :no_beam_info} def consolidate(protocol, types) when is_atom(protocol) do - beam_protocol(protocol) - |> if_ok(change_debug_info types) - |> if_ok(compile) + with {:ok, info} <- beam_protocol(protocol), + {:ok, code, docs} <- change_debug_info(info, types), + do: compile(code, docs) end - @docs_chunk 'ExDc' - defp beam_protocol(protocol) do - chunk_ids = [:abstract_code, :attributes, @docs_chunk] + chunk_ids = [:abstract_code, :attributes, 'ExDc'] opts = [:allow_missing_chunks] case :beam_lib.chunks(beam_file(protocol), chunk_ids, opts) do {:ok, {^protocol, [{:abstract_code, {_raw, abstract_code}}, {:attributes, attributes}, - {@docs_chunk, docs}]}} -> + {'ExDc', docs}]}} -> case attributes[:protocol] do - [fallback_to_any: any, consolidated: _] -> + [fallback_to_any: any] -> {:ok, {protocol, any, abstract_code, docs}} _ -> {:error, :not_a_protocol} @@ -287,7 +279,7 @@ defmodule Protocol do defp beam_file(module) when is_atom(module) do case :code.which(module) do - :non_existing -> module + atom when is_atom(atom) -> module file -> file end end @@ -296,49 +288,68 @@ defmodule Protocol do # impl_for/1 dispatch version. defp change_debug_info({protocol, any, code, docs}, types) do types = if any, do: types, else: List.delete(types, Any) - all = [Any] ++ for {_guard, mod} <- builtin, do: mod + all = [Any] ++ for {_guard, mod} <- __builtin__(), do: mod structs = types -- all case change_impl_for(code, protocol, types, structs, false, []) do - {:ok, ret} -> {:ok, {ret, docs}} + {:ok, ret} -> {:ok, ret, docs} other -> other end end - defp change_impl_for([{:attribute, line, :protocol, opts}|t], protocol, types, structs, _, acc) do - opts = [fallback_to_any: opts[:fallback_to_any], consolidated: true] - change_impl_for(t, protocol, types, structs, true, - [{:attribute, line, :protocol, opts}|acc]) + defp change_impl_for([{:function, line, :__protocol__, 1, clauses} | tail], protocol, types, structs, _, acc) do + clauses = :lists.map(fn + {:clause, l, [{:atom, _, :consolidated?}], [], [{:atom, _, _}]} -> + {:clause, l, [{:atom, 0, :consolidated?}], [], [{:atom, 0, true}]} + {:clause, _, _, _, _} = c -> + c + end, clauses) + + change_impl_for(tail, protocol, types, structs, true, + [{:function, line, :__protocol__, 1, clauses} | acc]) end - defp change_impl_for([{:function, line, :impl_for, 1, _}|t], protocol, types, structs, is_protocol, acc) do - fallback = if Any in types, do: load_impl(protocol, Any), else: nil + defp change_impl_for([{:function, line, :impl_for, 1, _} | tail], protocol, types, structs, protocol?, acc) do + fallback = if Any in types, do: load_impl(protocol, Any) - clauses = for {guard, mod} <- builtin, + clauses = for {guard, mod} <- __builtin__(), mod in types, do: builtin_clause_for(mod, guard, protocol, line) - clauses = [struct_clause_for(line)|clauses] ++ + clauses = [struct_clause_for(line) | clauses] ++ [fallback_clause_for(fallback, protocol, line)] - change_impl_for(t, protocol, types, structs, is_protocol, - [{:function, line, :impl_for, 1, clauses}|acc]) + change_impl_for(tail, protocol, types, structs, protocol?, + [{:function, line, :impl_for, 1, clauses} | acc]) end - defp change_impl_for([{:function, line, :struct_impl_for, 1, _}|t], protocol, types, structs, is_protocol, acc) do - fallback = if Any in types, do: load_impl(protocol, Any), else: nil + defp change_impl_for([{:function, line, :struct_impl_for, 1, _} | tail], protocol, types, structs, protocol?, acc) do + fallback = if Any in types, do: load_impl(protocol, Any) clauses = for struct <- structs, do: each_struct_clause_for(struct, protocol, line) clauses = clauses ++ [fallback_clause_for(fallback, protocol, line)] - change_impl_for(t, protocol, types, structs, is_protocol, - [{:function, line, :struct_impl_for, 1, clauses}|acc]) + change_impl_for(tail, protocol, types, structs, protocol?, + [{:function, line, :struct_impl_for, 1, clauses} | acc]) end - defp change_impl_for([h|t], protocol, info, types, is_protocol, acc) do - change_impl_for(t, protocol, info, types, is_protocol, [h|acc]) + defp change_impl_for([{:attribute, line, :spec, {{:__protocol__, 1}, funspecs}} | tail], protocol, types, structs, protocol?, acc) do + new_specs = for spec <- funspecs do + case spec do + {:type, line, :fun, [{:type, _, :product, [{:atom, _, :consolidated?}]}, _]} -> + {:type, line, :fun, + [{:type, line, :product, [{:atom, 0, :consolidated?}]}, + {:atom, 0, true}]} + other -> other + end + end + change_impl_for(tail, protocol, types, structs, protocol?, [{:attribute, line, :spec, {{:__protocol__, 1}, new_specs}} | acc]) + end + + defp change_impl_for([head | tail], protocol, info, types, protocol?, acc) do + change_impl_for(tail, protocol, info, types, protocol?, [head | acc]) end - defp change_impl_for([], protocol, _info, _types, is_protocol, acc) do - if is_protocol do + defp change_impl_for([], protocol, _info, _types, protocol?, acc) do + if protocol? do {:ok, {protocol, Enum.reverse(acc)}} else {:error, :not_a_protocol} @@ -369,9 +380,9 @@ defmodule Protocol do [{:var, line, :x}]}]} end - defp each_struct_clause_for(other, protocol, line) do - {:clause, line, [{:atom, line, other}], [], - [{:atom, line, load_impl(protocol, other)}]} + defp each_struct_clause_for(struct, protocol, line) do + {:clause, line, [{:atom, line, struct}], [], + [{:atom, line, load_impl(protocol, struct)}]} end defp fallback_clause_for(value, _protocol, line) do @@ -384,13 +395,14 @@ defmodule Protocol do end # Finally compile the module and emit its bytecode. - defp compile({{protocol, code}, docs}) do + defp compile({protocol, code}, docs) do opts = if Code.compiler_options[:debug_info], do: [:debug_info], else: [] - {:ok, ^protocol, binary, _warnings} = :compile.forms(code, [:return|opts]) - unless docs == :missing_chunk do - binary = :elixir_module.add_beam_chunk(binary, @docs_chunk, docs) - end - {:ok, binary} + {:ok, ^protocol, binary, _warnings} = :compile.forms(code, [:return | opts]) + {:ok, + case docs do + :missing_chunk -> binary + _ -> :elixir_erl.add_beam_chunks(binary, [{"ExDc", docs}]) + end} end ## Definition callbacks @@ -416,17 +428,18 @@ defmodule Protocol do @fallback_to_any false # Invoke the user given block - unquote(block) + _ = unquote(block) # Finalize expansion - unquote(after_defprotocol) + unquote(after_defprotocol()) end end end defp after_defprotocol do - quote bind_quoted: [builtin: builtin] do - @spec impl_for(term) :: module | nil + quote bind_quoted: [builtin: __builtin__()] do + @doc false + @spec impl_for(term) :: atom | nil Kernel.def impl_for(data) # Define the implementation for structs. @@ -437,33 +450,39 @@ defmodule Protocol do struct_impl_for(struct) end - # Define the implementation for builtins. - for {guard, mod} <- builtin do + # Define the implementation for built-ins + :lists.foreach(fn {guard, mod} -> target = Module.concat(__MODULE__, mod) Kernel.def impl_for(data) when :erlang.unquote(guard)(data) do case impl_for?(unquote(target)) do true -> unquote(target).__impl__(:target) - false -> any_impl_for + false -> any_impl_for() end end + end, builtin) + + # Define a catch-all impl_for/1 clause to pacify Dialyzer (since + # destructuring opaque types is illegal, Dialyzer will think none of the + # previous clauses matches opaque types, and without this clause, will + # conclude that impl_for can't handle an opaque argument). This is a hack + # since it relies on Dialyzer not being smart enough to conclude that all + # opaque types will get the any_impl_for/0 implementation. + Kernel.def impl_for(_) do + any_impl_for() end - @spec impl_for!(term) :: module | no_return + @doc false + @spec impl_for!(term) :: atom | no_return Kernel.def impl_for!(data) do impl_for(data) || raise(Protocol.UndefinedError, protocol: __MODULE__, value: data) end # Internal handler for Any if @fallback_to_any do - Kernel.defp any_impl_for do - case impl_for?(__MODULE__.Any) do - true -> __MODULE__.Any.__impl__(:target) - false -> nil - end - end + Kernel.defp any_impl_for(), do: __MODULE__.Any.__impl__(:target) else - Kernel.defp any_impl_for, do: nil + Kernel.defp any_impl_for(), do: nil end # Internal handler for Structs @@ -471,7 +490,7 @@ defmodule Protocol do target = Module.concat(__MODULE__, struct) case impl_for?(target) do true -> target.__impl__(:target) - false -> any_impl_for + false -> any_impl_for() end end @@ -491,15 +510,24 @@ defmodule Protocol do # Store information as an attribute so it # can be read without loading the module. Module.register_attribute(__MODULE__, :protocol, persist: true) - @protocol [fallback_to_any: !!@fallback_to_any, consolidated: false] + @protocol [fallback_to_any: !!@fallback_to_any] @doc false - @spec __protocol__(atom) :: term - Kernel.def __protocol__(:name), do: __MODULE__ + @spec __protocol__(:module) :: __MODULE__ + @spec __protocol__(:functions) :: unquote(Protocol.__functions_spec__(@functions)) + @spec __protocol__(:consolidated?) :: false + Kernel.def __protocol__(:module), do: __MODULE__ Kernel.def __protocol__(:functions), do: unquote(:lists.sort(@functions)) + Kernel.def __protocol__(:consolidated?), do: false end end + @doc false + def __functions_spec__([]), + do: [] + def __functions_spec__([head | tail]), + do: [:lists.foldl(&{:|, [], [&1, &2]}, head, tail), quote(do: ...)] + @doc false def __impl__(protocol, opts) do do_defimpl(protocol, :lists.keysort(1, opts)) @@ -510,12 +538,27 @@ defmodule Protocol do end defp do_defimpl(protocol, [do: block, for: for]) do + # Unquote the implementation just later + # when all variables will already be injected + # into the module body. + impl = + quote unquote: false do + @doc false + @spec __impl__(:for) :: unquote(for) + @spec __impl__(:target) :: __MODULE__ + @spec __impl__(:protocol) :: unquote(protocol) + def __impl__(:for), do: unquote(for) + def __impl__(:target), do: __MODULE__ + def __impl__(:protocol), do: unquote(protocol) + end + quote do protocol = unquote(protocol) for = unquote(for) name = Module.concat(protocol, for) Protocol.assert_protocol!(protocol) + Protocol.__ensure_defimpl__(protocol, for, __ENV__) defmodule name do @behaviour protocol @@ -524,14 +567,10 @@ defmodule Protocol do unquote(block) - Module.register_attribute(__MODULE__, :impl, persist: true) - @impl [protocol: @protocol, for: @for] + Module.register_attribute(__MODULE__, :protocol_impl, persist: true) + @protocol_impl [protocol: @protocol, for: @for] - @doc false - @spec __impl__(atom) :: term - def __impl__(:target), do: __MODULE__ - def __impl__(:protocol), do: @protocol - def __impl__(:for), do: @for + unquote(impl) end end end @@ -557,14 +596,15 @@ defmodule Protocol do end defp derive(protocol, for, struct, opts, env) do - impl = Module.concat(protocol, Map) extra = ", cannot derive #{inspect protocol} for #{inspect for}" assert_protocol!(protocol, extra) - assert_impl!(protocol, impl, extra) + __ensure_defimpl__(protocol, for, env) + assert_impl!(protocol, Any, extra) # Clean up variables from eval context env = %{env | vars: [], export_vars: nil} args = [for, struct, opts] + impl = Module.concat(protocol, Any) :elixir_module.expand_callback(env.line, impl, :__deriving__, args, env, fn mod, fun, args -> @@ -572,11 +612,13 @@ defmodule Protocol do apply(mod, fun, args) else Module.create(Module.concat(protocol, for), quote do - Module.register_attribute(__MODULE__, :impl, persist: true) - @impl [protocol: unquote(protocol), for: unquote(for)] + Module.register_attribute(__MODULE__, :protocol_impl, persist: true) + @protocol_impl [protocol: unquote(protocol), for: unquote(for)] @doc false - @spec __impl__(atom) :: term + @spec __impl__(:target) :: unquote(impl) + @spec __impl__(:protocol) :: unquote(protocol) + @spec __impl__(:for) :: unquote(for) def __impl__(:target), do: unquote(impl) def __impl__(:protocol), do: unquote(protocol) def __impl__(:for), do: unquote(for) @@ -585,24 +627,37 @@ defmodule Protocol do end) end + @doc false + def __ensure_defimpl__(protocol, for, env) do + if Protocol.consolidated?(protocol) do + message = + "the #{inspect protocol} protocol has already been consolidated" <> + ", an implementation for #{inspect for} has no effect" + :elixir_errors.warn(env.line, env.file, message) + end + :ok + end + @doc false def __spec__?(module, name, arity) do signature = {name, arity} - specs = Module.get_attribute(module, :spec) + specs = Module.get_attribute(module, :spec) found = - for {:spec, expr, caller} <- specs, - Kernel.Typespec.spec_to_signature(expr) == signature do - Kernel.Typespec.define_spec(:callback, expr, caller) + :lists.map(fn {:spec, expr, pos} -> + if Kernel.Typespec.spec_to_signature(expr) == signature do + Module.store_typespec(module, :callback, {:callback, expr, pos}) true end + end, specs) - found != [] + :lists.any(& &1 == true, found) end ## Helpers - defp builtin do + @doc false + def __builtin__ do [is_tuple: Tuple, is_atom: Atom, is_list: List, diff --git a/lib/elixir/lib/range.ex b/lib/elixir/lib/range.ex index 55c09855305..3c4845d5507 100644 --- a/lib/elixir/lib/range.ex +++ b/lib/elixir/lib/range.ex @@ -1,34 +1,64 @@ defmodule Range do @moduledoc """ - Defines a Range. + Defines a range. - A Range are represented internally as a struct. However, + A range represents a discrete number of values where + the first and last values are integers. + + Ranges can be either increasing (first <= last) or + decreasing (first > last). Ranges are also always + inclusive. + + A Range is represented internally as a struct. However, the most common form of creating and matching on ranges - is via the `../2` macro, auto-imported from Kernel: + is via the `../2` macro, auto-imported from `Kernel`: iex> range = 1..3 1..3 - iex> first .. last = range + iex> first..last = range iex> first 1 iex> last 3 + A Range implements the Enumerable protocol, which means + all of the functions in the Enum module is available: + + iex> range = 1..10 + 1..10 + iex> Enum.reduce(range, 0, fn i, acc -> i * i + acc end) + 385 + iex> Enum.count(range) + 10 + iex> Enum.member?(range, 11) + false + iex> Enum.member?(range, 8) + true + """ defstruct first: nil, last: nil - @type t(first, last) :: %{__struct__: Range, first: first, last: last} + @type t :: %Range{first: integer, last: integer} + @type t(first, last) :: %Range{first: first, last: last} + @doc """ Creates a new range. """ - def new(first, last) do + @spec new(integer, integer) :: t + def new(first, last) when is_integer(first) and is_integer(last) do %Range{first: first, last: last} end + def new(first, last) do + raise ArgumentError, + "ranges (first..last) expect both sides to be integers, " <> + "got: #{inspect first}..#{inspect last}" + end + @doc """ - Returns true if the given argument is a range. + Returns `true` if the given `term` is a valid range. ## Examples @@ -39,52 +69,38 @@ defmodule Range do false """ - def range?(%Range{}), do: true + @spec range?(term) :: boolean + def range?(term) + def range?(first..last) when is_integer(first) and is_integer(last), do: true def range?(_), do: false end -defprotocol Range.Iterator do - @moduledoc """ - A protocol used for iterating range elements. - """ - - @doc """ - Returns the function that calculates the next item. - """ - def next(first, range) - - @doc """ - Count how many items are in the range. - """ - def count(first, range) -end - defimpl Enumerable, for: Range do - def reduce(first .. last = range, acc, fun) do - reduce(first, last, acc, fun, Range.Iterator.next(first, range), last >= first) + def reduce(first..last, acc, fun) do + reduce(first, last, acc, fun, _up? = last >= first) end - defp reduce(_x, _y, {:halt, acc}, _fun, _next, _up) do + defp reduce(_x, _y, {:halt, acc}, _fun, _up?) do {:halted, acc} end - defp reduce(x, y, {:suspend, acc}, fun, next, up) do - {:suspended, acc, &reduce(x, y, &1, fun, next, up)} + defp reduce(x, y, {:suspend, acc}, fun, up?) do + {:suspended, acc, &reduce(x, y, &1, fun, up?)} end - defp reduce(x, y, {:cont, acc}, fun, next, true) when x <= y do - reduce(next.(x), y, fun.(x, acc), fun, next, true) + defp reduce(x, y, {:cont, acc}, fun, _up? = true) when x <= y do + reduce(x + 1, y, fun.(x, acc), fun, _up? = true) end - defp reduce(x, y, {:cont, acc}, fun, next, false) when x >= y do - reduce(next.(x), y, fun.(x, acc), fun, next, false) + defp reduce(x, y, {:cont, acc}, fun, _up? = false) when x >= y do + reduce(x - 1, y, fun.(x, acc), fun, _up? = false) end - defp reduce(_, _, {:cont, acc}, _fun, _next, _up) do + defp reduce(_, _, {:cont, acc}, _fun, _up) do {:done, acc} end - def member?(first .. last, value) do + def member?(first..last, value) when is_integer(value) do if first <= last do {:ok, first <= value and value <= last} else @@ -92,25 +108,15 @@ defimpl Enumerable, for: Range do end end - def count(first .. _ = range) do - {:ok, Range.Iterator.count(first, range)} - end -end - -defimpl Range.Iterator, for: Integer do - def next(first, _ .. last) when is_integer(last) do - if last >= first do - &(&1 + 1) - else - &(&1 - 1) - end + def member?(_.._, _value) do + {:ok, false} end - def count(first, _ .. last) when is_integer(last) do - if last >= first do - last - first + 1 + def count(first..last) do + if first <= last do + {:ok, last - first + 1} else - first - last + 1 + {:ok, first - last + 1} end end end @@ -118,7 +124,7 @@ end defimpl Inspect, for: Range do import Inspect.Algebra - def inspect(first .. last, opts) do + def inspect(first..last, opts) do concat [to_doc(first, opts), "..", to_doc(last, opts)] end end diff --git a/lib/elixir/lib/record.ex b/lib/elixir/lib/record.ex index c58081870f9..af93aabc391 100644 --- a/lib/elixir/lib/record.ex +++ b/lib/elixir/lib/record.ex @@ -1,33 +1,66 @@ defmodule Record do @moduledoc """ - Module to work, define and import records. + Module to work with, define, and import records. Records are simply tuples where the first element is an atom: - iex> Record.record? {User, "jose", 27} + iex> Record.is_record {User, "john", 27} true This module provides conveniences for working with records at compilation time, where compile-time field names are used to manipulate the tuples, providing fast operations on top of - the tuples compact structure. + the tuples' compact structure. In Elixir, records are used mostly in two situations: 1. to work with short, internal data 2. to interface with Erlang records - The macros `defrecord/3` and `defrecordp/3` can be used to create - records while `extract/2` can be used to extract records from Erlang - files. + The macros `defrecord/3` and `defrecordp/3` can be used to create records + while `extract/2` and `extract_all/1` can be used to extract records from + Erlang files. + + ## Types + + Types can be defined for tuples with the `record/2` macro (only available in + typespecs). This macro will expand to a tuple as seen in the example below: + + defmodule MyModule do + require Record + Record.defrecord :user, name: "john", age: 25 + + @type user :: record(:user, name: String.t, age: integer) + # expands to: "@type user :: {:user, String.t, integer}" + end + """ @doc """ Extracts record information from an Erlang file. Returns a quoted expression containing the fields as a list - of tuples. It expects the record name to be an atom and the - library path to be a string at expansion time. + of tuples. + + `name`, which is the name of the extracted record, is expected to be an atom + *at compile time*. + + ## Options + + This function accepts the following options, which are exclusive to each other + (i.e., only one of them can be used in the same call): + + * `:from` - (binary representing a path to a file) path to the Erlang file + that contains the record definition to extract; with this option, this + function uses the same path lookup used by the `-include` attribute used in + Erlang modules. + * `:from_lib` - (binary representing a path to a file) path to the Erlang + file that contains the record definition to extract; with this option, + this function uses the same path lookup used by the `-include_lib` + attribute used in Erlang modules. + + These options are expected to be literals (including the binary values) at + compile time. ## Examples @@ -38,34 +71,64 @@ defmodule Record do uid: :undefined, gid: :undefined] """ - defmacro extract(name, opts) when is_atom(name) and is_list(opts) do - Macro.escape Record.Extractor.extract(name, opts) + @spec extract(name :: atom, Keyword.t) :: Keyword.t + def extract(name, opts) when is_atom(name) and is_list(opts) do + Record.Extractor.extract(name, opts) end @doc """ - Checks if the given `data` is a record of `kind`. + Extracts all records information from an Erlang file. + + Returns a keyword list of `{record_name, fields}` tuples where `record_name` + is the name of an extracted record and `fields` is a list of `{field, value}` + tuples representing the fields for that record. + + ## Options + + This function accepts the following options, which are exclusive to each other + (i.e., only one of them can be used in the same call): + + * `:from` - (binary representing a path to a file) path to the Erlang file + that contains the record definitions to extract; with this option, this + function uses the same path lookup used by the `-include` attribute used in + Erlang modules. + * `:from_lib` - (binary representing a path to a file) path to the Erlang + file that contains the record definitions to extract; with this option, + this function uses the same path lookup used by the `-include_lib` + attribute used in Erlang modules. + + These options are expected to be literals (including the binary values) at + compile time. + """ + @spec extract_all(Keyword.t) :: [{name :: atom, Keyword.t}] + def extract_all(opts) when is_list(opts) do + Record.Extractor.extract_all(opts) + end + + @doc """ + Checks if the given `data` is a record of kind `kind`. This is implemented as a macro so it can be used in guard clauses. ## Examples - iex> record = {User, "jose", 27} - iex> Record.record?(record, User) + iex> record = {User, "john", 27} + iex> Record.is_record(record, User) true """ - defmacro record?(data, kind) do + defmacro is_record(data, kind) do case Macro.Env.in_guard?(__CALLER__) do true -> quote do - is_tuple(unquote(data)) and tuple_size(unquote(data)) > 0 - and :erlang.element(1, unquote(data)) == unquote(kind) + is_atom(unquote(kind)) and is_tuple(unquote(data)) and tuple_size(unquote(data)) > 0 and + elem(unquote(data), 0) == unquote(kind) end false -> quote do result = unquote(data) - is_tuple(result) and tuple_size(result) > 0 - and :erlang.element(1, result) == unquote(kind) + kind = unquote(kind) + is_atom(kind) and is_tuple(result) and tuple_size(result) > 0 and elem(result, 0) == kind end end end @@ -77,71 +140,127 @@ defmodule Record do ## Examples - iex> record = {User, "jose", 27} - iex> Record.record?(record) + iex> record = {User, "john", 27} + iex> Record.is_record(record) true iex> tuple = {} - iex> Record.record?(tuple) + iex> Record.is_record(tuple) false """ - defmacro record?(data) do + defmacro is_record(data) do case Macro.Env.in_guard?(__CALLER__) do true -> quote do - is_tuple(unquote(data)) and tuple_size(unquote(data)) > 0 - and is_atom(:erlang.element(1, unquote(data))) + is_tuple(unquote(data)) and tuple_size(unquote(data)) > 0 and + is_atom(elem(unquote(data), 0)) end false -> quote do result = unquote(data) - is_tuple(result) and tuple_size(result) > 0 - and is_atom(:erlang.element(1, result)) + is_tuple(result) and tuple_size(result) > 0 and is_atom(elem(result, 0)) end end end @doc """ - Defines a set of macros to create and access a record. + Defines a set of macros to create, access, and pattern match + on a record. + + The name of the generated macros will be `name` (which has to be an + atom). `tag` is also an atom and is used as the "tag" for the record (i.e., + the first element of the record tuple); by default (if `nil`), it's the same + as `name`. `kv` is a keyword list of `name: default_value` fields for the + new record. + + The following macros are generated: - The macros are going to have `name`, a tag (which defaults) - to the name if none is given, and a set of fields given by - `kv`. + * `name/0` to create a new record with default values for all fields + * `name/1` to create a new record with the given fields and values, + to get the zero-based index of the given field in a record or to + convert the given record to a keyword list + * `name/2` to update an existing record with the given fields and values + or to access a given field in a given record + + All these macros are public macros (as defined by `defmacro`). + + See the "Examples" section for examples on how to use these macros. ## Examples defmodule User do - Record.defrecord :user, [name: "José", age: "25"] + require Record + Record.defrecord :user, [name: "meg", age: "25"] end In the example above, a set of macros named `user` but with different - arities will be defined to manipulate the underlying record: + arities will be defined to manipulate the underlying record. + + # Import the module to make the user macros locally available + import User # To create records - user() #=> {:user, "José", 25} - user(age: 26) #=> {:user, "José", 26} + record = user() #=> {:user, "meg", 25} + record = user(age: 26) #=> {:user, "meg", 26} # To get a field from the record - user(record, :name) #=> "José" + user(record, :name) #=> "meg" # To update the record - user(record, age: 26) #=> {:user, "José", 26} + user(record, age: 26) #=> {:user, "meg", 26} - By default, Elixir uses the record name as the first element of - the tuple (the tag). But it can be changed to something else: + # To get the zero-based index of the field in record tuple + # (index 0 is occupied by the record "tag") + user(:name) #=> 1 + + # Convert a record to a keyword list + user(record) #=> [name: "meg", age: 26] + + The generated macros can also be used in order to pattern match on records and + to bind variables during the match: + + record = user() #=> {:user, "meg", 25} + + user(name: name) = record + name #=> "meg" + + By default, Elixir uses the record name as the first element of the tuple (the "tag"). + However, a different tag can be specified when defining a record, + as in the following example, in which we use `Customer` as the second argument of `defrecord/3`: defmodule User do - Record.defrecord :user, User, name: nil + require Record + Record.defrecord :user, Customer, name: nil end require User - User.user() #=> {User, nil} + User.user() #=> {Customer, nil} + + ## Defining extracted records with anonymous functions in the values + + If a record defines an anonymous function in the default values, an + `ArgumentError` will be raised. This can happen unintentionally when defining + a record after extracting it from an Erlang library that uses anonymous + functions for defaults. + + Record.defrecord :my_rec, Record.extract(...) + #=> ** (ArgumentError) invalid value for record field fun_field, + cannot escape #Function<12.90072148/2 in :erl_eval.expr/5>. + + To work around this error, redefine the field with your own &M.f/a function, + like so: + + defmodule MyRec do + require Record + Record.defrecord :my_rec, Record.extract(...) |> Keyword.merge(fun_field: &__MODULE__.foo/2) + def foo(bar, baz), do: IO.inspect({bar, baz}) + end """ defmacro defrecord(name, tag \\ nil, kv) do quote bind_quoted: [name: name, tag: tag, kv: kv] do tag = tag || name - fields = Macro.escape Record.__fields__(:defrecord, kv) + fields = Record.__fields__(:defrecord, kv) defmacro(unquote(name)(args \\ [])) do Record.__access__(unquote(tag), unquote(fields), args, __CALLER__) @@ -159,7 +278,7 @@ defmodule Record do defmacro defrecordp(name, tag \\ nil, kv) do quote bind_quoted: [name: name, tag: tag, kv: kv] do tag = tag || name - fields = Macro.escape Record.__fields__(:defrecordp, kv) + fields = Record.__fields__(:defrecordp, kv) defmacrop(unquote(name)(args \\ [])) do Record.__access__(unquote(tag), unquote(fields), args, __CALLER__) @@ -175,63 +294,79 @@ defmodule Record do @doc false def __fields__(type, fields) do :lists.map(fn - { key, _ } = pair when is_atom(key) -> pair - key when is_atom(key) -> { key, nil } - other -> raise ArgumentError, "#{type} fields must be atoms, got: #{inspect other}" + {key, value} when is_atom(key) -> + try do + Macro.escape(value) + rescue + e in [ArgumentError] -> + raise ArgumentError, "invalid value for record field #{key}, " <> Exception.message(e) + else + value -> {key, value} + end + key when is_atom(key) -> + {key, nil} + other -> + raise ArgumentError, "#{type} fields must be atoms, got: #{inspect other}" end, fields) end # Callback invoked from record/0 and record/1 macros. @doc false - def __access__(atom, fields, args, caller) do + def __access__(tag, fields, args, caller) do cond do is_atom(args) -> - index(atom, fields, args) + index(tag, fields, args) Keyword.keyword?(args) -> - create(atom, fields, args, caller) + create(tag, fields, args, caller) true -> - msg = "expected arguments to be a compile time atom or keywords, got: #{Macro.to_string args}" - raise ArgumentError, msg + fields = Macro.escape(fields) + case Macro.expand(args, caller) do + {:{}, _, [^tag | list]} when length(list) == length(fields) -> + record = List.to_tuple([tag | list]) + Record.__keyword__(tag, fields, record) + {^tag, arg} when length(fields) == 1 -> + Record.__keyword__(tag, fields, {tag, arg}) + _ -> + quote do: Record.__keyword__(unquote(tag), unquote(fields), unquote(args)) + end end end # Callback invoked from the record/2 macro. @doc false - def __access__(atom, fields, record, args, caller) do + def __access__(tag, fields, record, args, caller) do cond do is_atom(args) -> - get(atom, fields, record, args) + get(tag, fields, record, args) Keyword.keyword?(args) -> - update(atom, fields, record, args, caller) + update(tag, fields, record, args, caller) true -> - msg = "expected arguments to be a compile time atom or keywords, got: #{Macro.to_string args}" + msg = "expected arguments to be a compile time atom or a keyword list, got: #{Macro.to_string args}" raise ArgumentError, msg end end # Gets the index of field. - defp index(atom, fields, field) do + defp index(tag, fields, field) do if index = find_index(fields, field, 0) do index - 1 # Convert to Elixir index else - raise ArgumentError, "record #{inspect atom} does not have the key: #{inspect field}" + raise ArgumentError, "record #{inspect tag} does not have the key: #{inspect field}" end end # Creates a new record with the given default fields and keyword values. - defp create(atom, fields, keyword, caller) do + defp create(tag, fields, keyword, caller) do in_match = Macro.Env.in_match?(caller) + keyword = apply_underscore(fields, keyword) {match, remaining} = Enum.map_reduce(fields, keyword, fn({field, default}, each_keyword) -> new_fields = - case Keyword.has_key?(each_keyword, field) do - true -> Keyword.get(each_keyword, field) - false -> - case in_match do - true -> {:_, [], nil} - false -> Macro.escape(default) - end + case Keyword.fetch(each_keyword, field) do + {:ok, value} -> value + :error when in_match -> {:_, [], nil} + :error -> Macro.escape(default) end {new_fields, Keyword.delete(each_keyword, field)} @@ -239,19 +374,21 @@ defmodule Record do case remaining do [] -> - {:{}, [], [atom|match]} + {:{}, [], [tag | match]} _ -> keys = for {key, _} <- remaining, do: key - raise ArgumentError, "record #{inspect atom} does not have the key: #{inspect hd(keys)}" + raise ArgumentError, "record #{inspect tag} does not have the key: #{inspect hd(keys)}" end end # Updates a record given by var with the given keyword. - defp update(atom, fields, var, keyword, caller) do + defp update(tag, fields, var, keyword, caller) do if Macro.Env.in_match?(caller) do raise ArgumentError, "cannot invoke update style macro inside match" end + keyword = apply_underscore(fields, keyword) + Enum.reduce keyword, var, fn({key, value}, acc) -> index = find_index(fields, key, 0) if index do @@ -259,24 +396,62 @@ defmodule Record do :erlang.setelement(unquote(index), unquote(acc), unquote(value)) end else - raise ArgumentError, "record #{inspect atom} does not have the key: #{inspect key}" + raise ArgumentError, "record #{inspect tag} does not have the key: #{inspect key}" end end end # Gets a record key from the given var. - defp get(atom, fields, var, key) do + defp get(tag, fields, var, key) do index = find_index(fields, key, 0) if index do quote do :erlang.element(unquote(index), unquote(var)) end else - raise ArgumentError, "record #{inspect atom} does not have the key: #{inspect key}" + raise ArgumentError, "record #{inspect tag} does not have the key: #{inspect key}" end end - defp find_index([{k, _}|_], k, i), do: i + 2 - defp find_index([{_, _}|t], k, i), do: find_index(t, k, i + 1) + defp find_index([{k, _} | _], k, i), do: i + 2 + defp find_index([{_, _} | t], k, i), do: find_index(t, k, i + 1) defp find_index([], _k, _i), do: nil + + # Returns a keyword list of the record + @doc false + def __keyword__(tag, fields, record) do + if is_record(record, tag) do + [_tag | values] = Tuple.to_list(record) + case join_keyword(fields, values, []) do + kv when is_list(kv) -> + kv + expected_fields -> + msg = "expected argument to be a #{inspect tag} record with #{expected_fields} fields, got: #{inspect record}" + raise ArgumentError, msg + end + else + msg = "expected argument to be a literal atom, literal keyword or a #{inspect tag} record, got runtime: #{inspect record}" + raise ArgumentError, msg + end + end + + # Returns a keyword list, or expected number of fields on size mismatch + defp join_keyword([{field, _default} | fields], [value | values], acc), + do: join_keyword(fields, values, [{field, value} | acc]) + defp join_keyword([], [], acc), + do: :lists.reverse(acc) + defp join_keyword(rest_fields, _rest_values, acc), + do: length(acc) + length(rest_fields) # expected fields + + defp apply_underscore(fields, keyword) do + case Keyword.fetch(keyword, :_) do + {:ok, default} -> + fields + |> Enum.map(fn {k, _} -> {k, default} end) + |> Keyword.merge(keyword) + |> Keyword.delete(:_) + :error -> + keyword + end + end end diff --git a/lib/elixir/lib/record/extractor.ex b/lib/elixir/lib/record/extractor.ex index 9de1ad4f0dd..005ab4db694 100644 --- a/lib/elixir/lib/record/extractor.ex +++ b/lib/elixir/lib/record/extractor.ex @@ -4,27 +4,44 @@ defmodule Record.Extractor do # Retrieve a record definition from an Erlang file using # the same lookup as the *include* attribute from Erlang modules. def extract(name, from: file) when is_binary(file) do - file = String.to_char_list(file) - - realfile = - case :code.where_is_file(file) do - :non_existing -> file - realfile -> realfile - end - - extract_record(name, realfile) + extract_record(name, from_file(file)) end # Retrieve a record definition from an Erlang file using # the same lookup as the *include_lib* attribute from Erlang modules. def extract(name, from_lib: file) when is_binary(file) do - [app|path] = :filename.split(String.to_char_list(file)) + extract_record(name, from_lib_file(file)) + end + + # Retrieve all records definitions from an Erlang file using + # the same lookup as the *include* attribute from Erlang modules. + def extract_all(from: file) when is_binary(file) do + extract_all_records(from_file(file)) + end + + # Retrieve all records definitions from an Erlang file using + # the same lookup as the *include_lib* attribute from Erlang modules. + def extract_all(from_lib: file) when is_binary(file) do + extract_all_records(from_lib_file(file)) + end + + # Find file using the same lookup as the *include* attribute from Erlang modules. + defp from_file(file) do + file = String.to_charlist(file) + case :code.where_is_file(file) do + :non_existing -> file + realfile -> realfile + end + end + # Find file using the same lookup as the *include_lib* attribute from Erlang modules. + defp from_lib_file(file) do + [app | path] = :filename.split(String.to_charlist(file)) case :code.lib_dir(List.to_atom(app)) do {:error, _} -> raise ArgumentError, "lib file #{file} could not be found" libpath -> - extract_record name, :filename.join([libpath|path]) + :filename.join([libpath | path]) end end @@ -39,16 +56,24 @@ defmodule Record.Extractor do end end + # Retrieve all records from the given file + defp extract_all_records(file) do + form = read_file(file) + records = extract_records(form) + for rec = {name, _fields} <- records, do: {name, parse_record(rec, form)} + end + # Parse the given file and extract all existent records. defp extract_records(form) do for {:attribute, _, :record, record} <- form, do: record end # Read a file and return its abstract syntax form that also - # includes record and other preprocessor modules. This is done - # by using Erlang's epp_dodger. + # includes record but with macros and other attributes expanded, + # such as "-include(...)" and "-include_lib(...)". This is done + # by using Erlang's epp. defp read_file(file) do - case :epp_dodger.quick_parse_file(file) do + case :epp.parse_file(file, []) do {:ok, form} -> form other -> @@ -80,11 +105,11 @@ defmodule Record.Extractor do defp eval_record(cons, form) do form = form ++ - [ {:function, 0, :hello, 0, [ - {:clause, 0, [], [], [ cons ]} ]} ] + [{:function, 0, :hello, 0, [ + {:clause, 0, [], [], [cons]}]}] {:function, 0, :hello, 0, [ - {:clause, 0, [], [], [ record_ast ]} ]} = :erl_expand_records.module(form, []) |> List.last + {:clause, 0, [], [], [record_ast]}]} = :erl_expand_records.module(form, []) |> List.last {:value, record, _} = :erl_eval.expr(record_ast, []) record diff --git a/lib/elixir/lib/regex.ex b/lib/elixir/lib/regex.ex index b6591de723e..7d51d7929ae 100644 --- a/lib/elixir/lib/regex.ex +++ b/lib/elixir/lib/regex.ex @@ -1,31 +1,60 @@ defmodule Regex do @moduledoc ~S""" - Regular expressions for Elixir built on top of Erlang's `re` module. + Provides regular expressions for Elixir. - As the `re` module, Regex is based on PCRE - (Perl Compatible Regular Expressions). More information can be - found in the [`re` documentation](http://www.erlang.org/doc/man/re.html). + Regex is based on PCRE (Perl Compatible Regular Expressions) and + built on top of Erlang's `:re` module. More information can be found + in the [`:re` module documentation](http://www.erlang.org/doc/man/re.html). - Regular expressions in Elixir can be created using `Regex.compile!/2` - or using the special form with [`~r`](Kernel.html#sigil_r/2): + Regular expressions in Elixir can be created using the sigils + [`~r`](Kernel.html#sigil_r/2) or [`~R`](Kernel.html#sigil_R/2): # A simple regular expressions that matches foo anywhere in the string ~r/foo/ - # A regular expression with case insensitive and unicode options + # A regular expression with case insensitive and Unicode options ~r/foo/iu + Regular expressions created via sigils are pre-compiled and stored + in the `.beam` file. Notice this may be a problem if you are precompiling + Elixir, see the "Precompilation" section for more information. + A Regex is represented internally as the `Regex` struct. Therefore, `%Regex{}` can be used whenever there is a need to match on them. + Keep in mind it is not guaranteed two regular expressions from the + same source are equal, for example: + + ~r/(?.)(?.)/ == ~r/(?.)(?.)/ + + may return `true` or `false` depending on your machine, endianess, + available optimizations and others. You can, however, retrieve the source + of a compiled regular expression by accessing the `source` field, and then + compare those directly: + + ~r/(?.)(?.)/.source == ~r/(?.)(?.)/.source + + ## Precompilation + + Regular expressions built with sigil are precompiled and stored in `.beam` + files. This may be a problem if you are precompiling Elixir to run in + different OTP releases, as OTP releases may update the underlying regular + expression engine at any time. + + For such reasons, we always recomend precompiling Elixir projects using + the OTP version meant to run in production. In case cross-compilation is + really necessary, you can manually invoke `Regex.recompile/1` or `Regex. + recompile!/1` to perform a runtime version check and recompile the regex + if necessary. ## Modifiers The modifiers available when creating a Regex are: - * `unicode` (u) - enables unicode specific patterns like `\p`; it expects - valid unicode strings to be given on match + * `unicode` (u) - enables Unicode specific patterns like `\p` and change + modifiers like `\w`, `\W`, `\s` and friends to also match on Unicode. + It expects valid Unicode strings to be given on match - * `caseless` (i) - add case insensitivity + * `caseless` (i) - adds case insensitivity * `dotall` (s) - causes dot to match newlines and also set newline to anycrlf; the new line setting can be overridden by setting `(*CR)` or @@ -40,7 +69,8 @@ defmodule Regex do * `firstline` (f) - forces the unanchored pattern to match before or at the first newline, though the matched text may continue over the newline - * `ungreedy` (r) - inverts the "greediness" of the regexp + * `ungreedy` (U) - inverts the "greediness" of the regexp + (the previous `r` option is deprecated in favor of `U`) The options not available are: @@ -53,7 +83,7 @@ defmodule Regex do ## Captures - Many functions in this module allows what to capture in a regex + Many functions in this module handle what to capture in a regex match via the `:capture` option. The supported values are: * `:all` - all captured subpatterns including the complete matching string @@ -67,7 +97,7 @@ defmodule Regex do explicitly captured subpatterns, but not the complete matching part of the string - * `:none` - do not return matching subpatterns at all + * `:none` - does not return matching subpatterns at all * `:all_names` - captures all names in the Regex @@ -75,7 +105,9 @@ defmodule Regex do """ - defstruct re_pattern: nil :: term, source: "" :: binary, opts: "" :: binary + defstruct re_pattern: nil, source: "", opts: "", re_version: "" + + @type t :: %__MODULE__{re_pattern: term, source: binary, opts: binary} defmodule CompileError do defexception message: "regex could not be compiled" @@ -86,7 +118,7 @@ defmodule Regex do The given options can either be a binary with the characters representing the same regex options given to the `~r` sigil, - or a list of options, as expected by the [Erlang `re` docs](http://www.erlang.org/doc/man/re.html). + or a list of options, as expected by the Erlang's `:re` module. It returns `{:ok, regex}` in case of success, `{:error, reason}` otherwise. @@ -101,39 +133,84 @@ defmodule Regex do """ @spec compile(binary, binary | [term]) :: {:ok, t} | {:error, any} - def compile(source, options \\ "") + def compile(source, options \\ "") do + compile(source, options, version()) + end - def compile(source, options) when is_binary(options) do - case translate_options(options) do + defp compile(source, options, version) when is_binary(options) do + case translate_options(options, []) do {:error, rest} -> {:error, {:invalid_option, rest}} translated_options -> - compile(source, translated_options, options) + compile(source, translated_options, options, version) end end - def compile(source, options) when is_list(options) do - compile(source, options, "") + defp compile(source, options, version) when is_list(options) do + compile(source, options, "", version) end - defp compile(source, opts, doc_opts) when is_binary(source) do + defp compile(source, opts, doc_opts, version) when is_binary(source) do case :re.compile(source, opts) do {:ok, re_pattern} -> - {:ok, %Regex{re_pattern: re_pattern, source: source, opts: doc_opts}} + {:ok, %Regex{re_pattern: re_pattern, re_version: version, source: source, opts: doc_opts}} error -> error end end @doc """ - Compiles the regular expression according to the given options. - Fails with `Regex.CompileError` if the regex cannot be compiled. + Compiles the regular expression and raises `Regex.CompileError` in case of errors. """ + @spec compile!(binary, binary | [term]) :: t def compile!(source, options \\ "") do case compile(source, options) do {:ok, regex} -> regex - {:error, {reason, at}} -> raise Regex.CompileError, message: "#{reason} at position #{at}" + {:error, {reason, at}} -> raise Regex.CompileError, "#{reason} at position #{at}" + end + end + + @doc """ + Recompiles the existing regular expression if necessary. + + This checks the version stored in the regular expression + and recompiles the regex in case of version mismatch. + """ + @spec recompile(t) :: t + def recompile(%Regex{} = regex) do + version = version() + + # We use Map.get/3 by choice to support old regexes versions. + case Map.get(regex, :re_version, :error) do + ^version -> + {:ok, regex} + _ -> + %{source: source, opts: opts} = regex + compile(source, opts, version) + end + end + + @doc """ + Recompiles the existing regular expression and raises `Regex.CompileError` in case of errors. + """ + @spec recompile!(t) :: t + def recompile!(regex) do + case recompile(regex) do + {:ok, regex} -> regex + {:error, {reason, at}} -> raise Regex.CompileError, "#{reason} at position #{at}" + end + end + + @doc """ + Returns the version of the underlying Regex engine. + """ + # TODO: No longer check for function_exported? on OTP 20+. + def version do + if function_exported?(:re, :version, 0) do + :re.version() + else + "8.33 2013-05-29" end end @@ -149,12 +226,14 @@ defmodule Regex do false """ + @spec match?(t, String.t) :: boolean def match?(%Regex{re_pattern: compiled}, string) when is_binary(string) do :re.run(string, compiled, [{:capture, :none}]) == :match end @doc """ - Returns true if the given argument is a regex. + Returns `true` if the given `term` is a regex. + Otherwise returns `false`. ## Examples @@ -165,6 +244,8 @@ defmodule Regex do false """ + @spec regex?(any) :: boolean + def regex?(term) def regex?(%Regex{}), do: true def regex?(_), do: false @@ -174,9 +255,9 @@ defmodule Regex do ## Options - * `:return` - set to `:index` to return indexes. Defaults to `:binary`. + * `:return` - sets to `:index` to return indexes. Defaults to `:binary`. * `:capture` - what to capture in the result. Check the moduledoc for `Regex` - to see the possible capture values. + to see the possible capture values. ## Examples @@ -187,9 +268,10 @@ defmodule Regex do nil iex> Regex.run(~r/c(d)/, "abcd", return: :index) - [{2,2},{3,1}] + [{2, 2}, {3, 1}] """ + @spec run(t, binary, [term]) :: nil | [binary] | [{integer, integer}] def run(regex, string, options \\ []) def run(%Regex{re_pattern: compiled}, string, options) when is_binary(string) do @@ -220,6 +302,7 @@ defmodule Regex do nil """ + @spec named_captures(t, String.t, [term]) :: map | nil def named_captures(regex, string, options \\ []) when is_binary(string) do names = names(regex) options = Keyword.put(options, :capture, names) @@ -230,6 +313,7 @@ defmodule Regex do @doc """ Returns the underlying `re_pattern` in the regular expression. """ + @spec re_pattern(t) :: term def re_pattern(%Regex{re_pattern: compiled}) do compiled end @@ -243,6 +327,7 @@ defmodule Regex do "foo" """ + @spec source(t) :: String.t def source(%Regex{source: source}) do source end @@ -256,6 +341,7 @@ defmodule Regex do "m" """ + @spec opts(t) :: String.t def opts(%Regex{opts: opts}) do opts end @@ -269,22 +355,24 @@ defmodule Regex do ["foo"] """ + @spec names(t) :: [String.t] def names(%Regex{re_pattern: re_pattern}) do {:namelist, names} = :re.inspect(re_pattern, :namelist) names end - @doc """ + @doc ~S""" Same as `run/3`, but scans the target several times collecting all - matches of the regular expression. A list of lists is returned, - where each entry in the primary list represents a match and each - entry in the secondary list represents the captured contents. + matches of the regular expression. + + A list of lists is returned, where each entry in the primary list represents a + match and each entry in the secondary list represents the captured contents. ## Options - * `:return` - set to `:index` to return indexes. Defaults to `:binary`. + * `:return` - sets to `:index` to return indexes. Defaults to `:binary`. * `:capture` - what to capture in the result. Check the moduledoc for `Regex` - to see the possible capture values. + to see the possible capture values. ## Examples @@ -297,7 +385,11 @@ defmodule Regex do iex> Regex.scan(~r/e/, "abcd") [] + iex> Regex.scan(~r/\p{Sc}/u, "$, £, and €") + [["$"], ["£"], ["€"]] + """ + @spec scan(t, String.t, [term]) :: [[String.t]] def scan(regex, string, options \\ []) def scan(%Regex{re_pattern: compiled}, string, options) when is_binary(string) do @@ -313,61 +405,135 @@ defmodule Regex do end @doc """ - Splits the given target into the number of parts specified. + Splits the given target based on the given pattern and in the given number of + parts. ## Options * `:parts` - when specified, splits the string into the given number of - parts. If not specified, `:parts` is defaulted to `:infinity`, which will + parts. If not specified, `:parts` defaults to `:infinity`, which will split the string into the maximum number of parts possible based on the given pattern. - * `:trim` - when true, remove blank strings from the result. + * `:trim` - when `true`, removes empty strings (`""`) from the result. + + * `:on` - specifies which captures to split the string on, and in what + order. Defaults to `:first` which means captures inside the regex do not + affect the splitting process. + + * `:include_captures` - when `true`, includes in the result the matches of + the regular expression. Defaults to `false`. ## Examples - iex> Regex.split(~r/-/, "a-b-c") - ["a","b","c"] + iex> Regex.split(~r{-}, "a-b-c") + ["a", "b", "c"] - iex> Regex.split(~r/-/, "a-b-c", [parts: 2]) - ["a","b-c"] + iex> Regex.split(~r{-}, "a-b-c", [parts: 2]) + ["a", "b-c"] - iex> Regex.split(~r/-/, "abc") + iex> Regex.split(~r{-}, "abc") ["abc"] - iex> Regex.split(~r//, "abc") + iex> Regex.split(~r{}, "abc") ["a", "b", "c", ""] - iex> Regex.split(~r//, "abc", trim: true) + iex> Regex.split(~r{a(?b)c}, "abc") + ["", ""] + + iex> Regex.split(~r{a(?b)c}, "abc", on: [:second]) + ["a", "c"] + + iex> Regex.split(~r{(x)}, "Elixir", include_captures: true) + ["Eli", "x", "ir"] + + iex> Regex.split(~r{a(?b)c}, "abc", on: [:second], include_captures: true) ["a", "b", "c"] """ - + @spec split(t, String.t, [term]) :: [String.t] def split(regex, string, options \\ []) - def split(%Regex{re_pattern: compiled}, string, options) when is_binary(string) do - parts = Keyword.get(options, :parts, :infinity) - opts = [return: :binary, parts: zero_to_infinity(parts)] - splits = :re.split(string, compiled, opts) + def split(%Regex{}, "", opts) do + if Keyword.get(opts, :trim, false) do + [] + else + [""] + end + end - if Keyword.get(options, :trim, false) do - for split <- splits, split != "", do: split + def split(%Regex{re_pattern: compiled}, string, opts) when is_binary(string) and is_list(opts) do + on = Keyword.get(opts, :on, :first) + case :re.run(string, compiled, [:global, capture: on]) do + {:match, matches} -> + do_split(matches, string, 0, + parts_to_index(Keyword.get(opts, :parts, :infinity)), + Keyword.get(opts, :trim, false), + Keyword.get(opts, :include_captures, false)) + :match -> + [string] + :nomatch -> + [string] + end + end + + defp parts_to_index(:infinity), do: 0 + defp parts_to_index(n) when is_integer(n) and n > 0, do: n + + defp do_split(_, string, offset, _counter, true, _with_captures) when byte_size(string) <= offset, + do: [] + + defp do_split(_, string, offset, 1, _trim, _with_captures), + do: [binary_part(string, offset, byte_size(string) - offset)] + + defp do_split([], string, offset, _counter, _trim, _with_captures), + do: [binary_part(string, offset, byte_size(string) - offset)] + + defp do_split([[{pos, _} | h] | t], string, offset, counter, trim, with_captures) when pos - offset < 0, + do: do_split([h | t], string, offset, counter, trim, with_captures) + + defp do_split([[] | t], string, offset, counter, trim, with_captures), + do: do_split(t, string, offset, counter, trim, with_captures) + + defp do_split([[{pos, length} | h] | t], string, offset, counter, trim, true) do + new_offset = pos + length + keep = pos - offset + + if keep == 0 and length == 0 do + do_split([h | t], string, new_offset, counter, trim, true) else - splits + <<_::binary-size(offset), part::binary-size(keep), match::binary-size(length), _::binary>> = string + + if keep == 0 and (length == 0 or trim) do + [match | do_split([h | t], string, new_offset, counter - 1, trim, true)] + else + [part, match | do_split([h | t], string, new_offset, counter - 1, trim, true)] + end end end - defp zero_to_infinity(0), do: :infinity - defp zero_to_infinity(n), do: n + defp do_split([[{pos, length} | h] | t], string, offset, counter, trim, false) do + new_offset = pos + length + keep = pos - offset + + if keep == 0 and (length == 0 or trim) do + do_split([h | t], string, new_offset, counter, trim, false) + else + <<_::binary-size(offset), part::binary-size(keep), _::binary>> = string + [part | do_split([h | t], string, new_offset, counter - 1, trim, false)] + end + end @doc ~S""" Receives a regex, a binary and a replacement, returns a new - binary where the all matches are replaced by replacement. + binary where all matches are replaced by the replacement. The replacement can be either a string or a function. The string is used as a replacement for every match and it allows specific - captures to be accessed via `\N`, where `N` is the capture. In - case `\0` is used, the whole match is inserted. + captures to be accessed via `\N` or `\g{N}`, where `N` is the + capture. In case `\0` is used, the whole match is inserted. Note + that in regexes the backslash needs to be escaped, hence in practice + you'll need to use `\\N` and `\\g{N}`. When the replacement is a function, the function may have arity N where each argument maps to a capture, with the first argument @@ -377,7 +543,7 @@ defmodule Regex do ## Options * `:global` - when `false`, replaces only the first occurrence - (defaults to true) + (defaults to `true`) ## Examples @@ -393,30 +559,39 @@ defmodule Regex do iex> Regex.replace(~r/a(b|d)c/, "abcadc", "[\\1]") "[b][d]" + iex> Regex.replace(~r/\.(\d)$/, "500.5", ".\\g{1}0") + "500.50" + iex> Regex.replace(~r/a(b|d)c/, "abcadc", fn _, x -> "[#{x}]" end) "[b][d]" + iex> Regex.replace(~r/a/, "abcadc", "A", global: false) + "Abcadc" + """ + @spec replace(t, String.t, String.t | (... -> String.t), [term]) :: String.t def replace(regex, string, replacement, options \\ []) - def replace(regex, string, replacement, options) when is_binary(replacement) do + def replace(regex, string, replacement, options) + when is_binary(string) and is_binary(replacement) and is_list(options) do do_replace(regex, string, precompile_replacement(replacement), options) end - def replace(regex, string, replacement, options) when is_function(replacement) do + def replace(regex, string, replacement, options) + when is_binary(string) and is_function(replacement) and is_list(options) do {:arity, arity} = :erlang.fun_info(replacement, :arity) do_replace(regex, string, {replacement, arity}, options) end defp do_replace(%Regex{re_pattern: compiled}, string, replacement, options) do opts = if Keyword.get(options, :global) != false, do: [:global], else: [] - opts = [{:capture, :all, :index}|opts] + opts = [{:capture, :all, :index} | opts] case :re.run(string, compiled, opts) do :nomatch -> string - {:match, [mlist|t]} when is_list(mlist) -> - apply_list(string, replacement, [mlist|t]) |> IO.iodata_to_binary + {:match, [mlist | t]} when is_list(mlist) -> + apply_list(string, replacement, [mlist | t]) |> IO.iodata_to_binary {:match, slist} -> apply_list(string, replacement, [slist]) |> IO.iodata_to_binary end @@ -425,32 +600,32 @@ defmodule Regex do defp precompile_replacement(""), do: [] - defp precompile_replacement(<>) when x < ?0 or x > ?9 do - case precompile_replacement(rest) do - [head | t] when is_binary(head) -> - [<> | t] - other -> - [<> | other] - end + defp precompile_replacement(<>) when byte_size(rest) > 0 do + {ns, <>} = pick_int(rest) + [List.to_integer(ns) | precompile_replacement(rest)] + end + + defp precompile_replacement(<>) do + [<> | precompile_replacement(rest)] end - defp precompile_replacement(<>) when byte_size(rest) > 0 do + defp precompile_replacement(<>) when x in ?0..?9 do {ns, rest} = pick_int(rest) - [List.to_integer(ns) | precompile_replacement(rest)] + [List.to_integer([x | ns]) | precompile_replacement(rest)] end - defp precompile_replacement(<>) do + defp precompile_replacement(<>) do case precompile_replacement(rest) do [head | t] when is_binary(head) -> - [<> | t] + [<> | t] other -> [<> | other] end end - defp pick_int(<>) when x in ?0..?9 do + defp pick_int(<>) when x in ?0..?9 do {found, rest} = pick_int(rest) - {[x|found], rest} + {[x | found], rest} end defp pick_int(bin) do @@ -471,12 +646,12 @@ defmodule Regex do defp apply_list(whole, string, pos, replacement, [[{mpos, _} | _] | _] = list) when mpos > pos do length = mpos - pos - <> = string + <> = string [untouched | apply_list(whole, rest, mpos, replacement, list)] end - defp apply_list(whole, string, pos, replacement, [[{mpos, length} | _] = head | tail]) when mpos == pos do - <<_ :: [size(length), binary], rest :: binary>> = string + defp apply_list(whole, string, pos, replacement, [[{pos, length} | _] = head | tail]) do + <<_::size(length)-binary, rest::binary>> = string new_data = apply_replace(whole, replacement, head) [new_data | apply_list(whole, rest, pos + length, replacement, tail)] end @@ -496,7 +671,7 @@ defmodule Regex do cond do is_binary(part) -> part - part > tuple_size(indexes) -> + part >= tuple_size(indexes) -> "" true -> get_index(string, elem(indexes, part)) @@ -509,7 +684,7 @@ defmodule Regex do end defp get_index(string, {pos, len}) do - <<_ :: [size(pos), binary], res :: [size(len), binary], _ :: binary>> = string + <<_::size(pos)-binary, res::size(len)-binary, _::binary>> = string res end @@ -518,16 +693,13 @@ defmodule Regex do end defp get_indexes(string, [], arity) do - [""|get_indexes(string, [], arity - 1)] + ["" | get_indexes(string, [], arity - 1)] end - defp get_indexes(string, [h|t], arity) do - [get_index(string, h)|get_indexes(string, t, arity - 1)] + defp get_indexes(string, [h | t], arity) do + [get_index(string, h) | get_indexes(string, t, arity - 1)] end - {:ok, pattern} = :re.compile(~S"[.^$*+?()[{\\\|\s#]", [:unicode]) - @escape_pattern pattern - @doc ~S""" Escapes a string to be literally matched in a regex. @@ -542,7 +714,31 @@ defmodule Regex do """ @spec escape(String.t) :: String.t def escape(string) when is_binary(string) do - :re.replace(string, @escape_pattern, "\\\\&", [:global, {:return, :binary}]) + string + |> escape(_length = 0, string) + |> IO.iodata_to_binary + end + + @escapable '.^$*+?()[]{}|#-\\\t\n\v\f\r\s' + + defp escape(<>, length, original) when char in @escapable do + escape_char(rest, length, original, char) + end + + defp escape(<<_, rest::binary>>, length, original) do + escape(rest, length + 1, original) + end + + defp escape(<<>>, _length, original) do + original + end + + defp escape_char(<>, 0, _original, char) do + [?\\, char | escape(rest, 0, rest)] + end + + defp escape_char(<>, length, original, char) do + [binary_part(original, 0, length), ?\\, char | escape(rest, 0, rest)] end # Helpers @@ -559,18 +755,20 @@ defmodule Regex do # Private Helpers - defp translate_options(<>) do - IO.write :stderr, "The /g flag for regular expressions is no longer needed\n#{Exception.format_stacktrace}" - translate_options(t) + defp translate_options(<>, acc), do: translate_options(t, [:unicode, :ucp | acc]) + defp translate_options(<>, acc), do: translate_options(t, [:caseless | acc]) + defp translate_options(<>, acc), do: translate_options(t, [:extended | acc]) + defp translate_options(<>, acc), do: translate_options(t, [:firstline | acc]) + defp translate_options(<>, acc), do: translate_options(t, [:ungreedy | acc]) + defp translate_options(<>, acc), do: translate_options(t, [:dotall, {:newline, :anycrlf} | acc]) + defp translate_options(<>, acc), do: translate_options(t, [:multiline | acc]) + + # TODO: Remove on 2.0 + defp translate_options(<>, acc) do + IO.warn "the /r modifier in regular expressions is deprecated, please use /U instead" + translate_options(t, [:ungreedy | acc]) end - defp translate_options(<>), do: [:unicode|translate_options(t)] - defp translate_options(<>), do: [:caseless|translate_options(t)] - defp translate_options(<>), do: [:extended|translate_options(t)] - defp translate_options(<>), do: [:firstline|translate_options(t)] - defp translate_options(<>), do: [:ungreedy|translate_options(t)] - defp translate_options(<>), do: [:dotall, {:newline, :anycrlf}|translate_options(t)] - defp translate_options(<>), do: [:multiline|translate_options(t)] - defp translate_options(<<>>), do: [] - defp translate_options(rest), do: {:error, rest} + defp translate_options(<<>>, acc), do: acc + defp translate_options(rest, _acc), do: {:error, rest} end diff --git a/lib/elixir/lib/registry.ex b/lib/elixir/lib/registry.ex new file mode 100644 index 00000000000..db689f3a51c --- /dev/null +++ b/lib/elixir/lib/registry.ex @@ -0,0 +1,935 @@ +defmodule Registry do + @moduledoc ~S""" + A local, decentralized and scalable key-value process storage. + + It allows developers to lookup one or more processes with a given key. + If the registry has `:unique` keys, a key points to 0 or 1 processes. + If the registry allows `:duplicate` keys, a single key may point to any + number of processes. In both cases, different keys could identify the + same process. + + Each entry in the registry is associated to the process that has + registered the key. If the process crashes, the keys associated to that + process are automatically removed. All key comparisons in the registry + are done using the match operation (`===`). + + The registry can be used for different purposes, such as name lookups (using + the `:via` option), storing properties, custom dispatching rules, or a pubsub + implementation. We explore some of those use cases below. + + The registry may also be transparently partitioned, which provides + more scalable behaviour for running registries on highly concurrent + environments with thousands or millions of entries. + + ## Using in `:via` + + Once the registry is started with a given name using + `Registry.start_link/3`, it can be used to register and access named + processes using the `{:via, Registry, {registry, key}}` tuple: + + {:ok, _} = Registry.start_link(:unique, Registry.ViaTest) + name = {:via, Registry, {Registry.ViaTest, "agent"}} + {:ok, _} = Agent.start_link(fn -> 0 end, name: name) + Agent.get(name, & &1) + #=> 0 + Agent.update(name, & &1 + 1) + Agent.get(name, & &1) + #=> 1 + + Typically the registry is started as part of a supervision tree though: + + supervisor(Registry, [:unique, Registry.ViaTest]) + + Only registries with unique keys can be used in `:via`. If the name is + already taken, the case-specific `start_link` function (`Agent.start_link/2` + in the example above) will return `{:error, {:already_started, current_pid}}`. + + ## Using as a dispatcher + + `Registry` has a dispatch mechanism that allows developers to implement custom + dispatch logic triggered from the caller. For example, let's say we have a + duplicate registry started as so: + + {:ok, _} = Registry.start_link(:duplicate, Registry.DispatcherTest) + + By calling `register/3`, different processes can register under a given key + and associate any value under that key. In this case, let's register the + current process under the key `"hello"` and attach the `{IO, :inspect}` tuple + to it: + + {:ok, _} = Registry.register(Registry.DispatcherTest, "hello", {IO, :inspect}) + + Now, an entity interested in dispatching events for a given key may call + `dispatch/3` passing in the key and a callback. This callback will be invoked + with a list of all the values registered under the requested key, alongside + the pid of the process that registered each value, in the form of `{pid, + value}` tuples. In our example, `value` will be the `{module, function}` tuple + in the code above: + + Registry.dispatch(Registry.DispatcherTest, "hello", fn entries -> + for {pid, {module, function}} <- entries, do: apply(module, function, [pid]) + end) + # Prints #PID<...> where the pid is for the process that called register/3 above + #=> :ok + + Dispatching happens in the process that calls `dispatch/3` either serially or + concurrently in case of multiple partitions (via spawned tasks). The + registered processes are not involved in dispatching unless involving them is + done explicitly (for example, by sending them a message in the callback). + + Furthermore, if there is a failure when dispatching, due to a bad + registration, dispatching will always fail and the registered process will not + be notified. Therefore let's make sure we at least wrap and report those + errors: + + require Logger + Registry.dispatch(Registry.DispatcherTest, "hello", fn entries -> + for {pid, {module, function}} <- entries do + try do + apply(module, function, [pid]) + catch + kind, reason -> + formatted = Exception.format(kind, reason, System.stacktrace) + Logger.error "Registry.dispatch/3 failed with #{formatted}" + end + end + end) + # Prints #PID<...> + #=> :ok + + You could also replace the whole `apply` system by explicitly sending + messages. That's the example we will see next. + + ## Using as a PubSub + + Registries can also be used to implement a local, non-distributed, scalable + PubSub by relying on the `dispatch/3` function, similarly to the previous + section: in this case, however, we will send messages to each associated + process, instead of invoking a given module-function. + + In this example, we will also set the number of partitions to the number of + schedulers online, which will make the registry more performant on highly + concurrent environments as each partition will spawn a new process, allowing + dispatching to happen in parallel: + + {:ok, _} = Registry.start_link(:duplicate, Registry.PubSubTest, + partitions: System.schedulers_online) + {:ok, _} = Registry.register(Registry.PubSubTest, "hello", []) + Registry.dispatch(Registry.PubSubTest, "hello", fn entries -> + for {pid, _} <- entries, do: send(pid, {:broadcast, "world"}) + end) + #=> :ok + + The example above broadcasted the message `{:broadcast, "world"}` to all + processes registered under the "topic" (or "key" as we called it until now) + `"hello"`. + + The third argument given to `register/3` is a value associated to the + current process. While in the previous section we used it when dispatching, + in this particular example we are not interested in it, so we have set it + to an empty list. You could store a more meaningful value if necessary. + + ## Registrations + + Looking up, dispatching and registering are efficient and immediate at + the cost of delayed unsubscription. For example, if a process crashes, + its keys are automatically removed from the registry but the change may + not propagate immediately. This means certain operations may return processes + that are already dead. When such may happen, it will be explicitly stated + in the function documentation. + + However, keep in mind those cases are typically not an issue. After all, a + process referenced by a pid may crash at any time, including between getting + the value from the registry and sending it a message. Many parts of the standard + library are designed to cope with that, such as `Process.monitor/1` which will + deliver the `:DOWN` message immediately if the monitored process is already dead + and `Kernel.send/2` which acts as a no-op for dead processes. + + ## ETS + + Note that the registry uses one ETS table plus two ETS tables per partition. + """ + + # TODO: Decide if it should be started as part of Elixir's supervision tree. + + @kind [:unique, :duplicate] + @all_info -1 + @key_info -2 + + @typedoc "The registry identifier" + @type registry :: atom + + @typedoc "The type of the registry" + @type kind :: :unique | :duplicate + + @typedoc "The type of keys allowed on registration" + @type key :: term + + @typedoc "The type of values allowed on registration" + @type value :: term + + @typedoc "The type of registry metadata keys" + @type meta_key :: atom | tuple + + @typedoc "The type of registry metadata values" + @type meta_value :: term + + ## Via callbacks + + @doc false + def whereis_name({registry, key}) do + case key_info!(registry) do + {:unique, partitions, key_ets} -> + key_ets = key_ets || key_ets!(registry, key, partitions) + case safe_lookup_second(key_ets, key) do + {pid, _} -> + if Process.alive?(pid), do: pid, else: :undefined + _ -> + :undefined + end + {kind, _, _} -> + raise ArgumentError, ":via is not supported for #{kind} registries" + end + end + + @doc false + def register_name({registry, key}, pid) when pid == self() do + case register(registry, key, nil) do + {:ok, _} -> :yes + {:error, _} -> :no + end + end + + @doc false + def send({registry, key}, msg) do + case lookup(registry, key) do + [{pid, _}] -> Kernel.send(pid, msg) + [] -> :erlang.error(:badarg, [{registry, key}, msg]) + end + end + + @doc false + def unregister_name({registry, key}) do + unregister(registry, key) + end + + ## Registry API + + @doc """ + Starts the registry as a supervisor process. + + Manually it can be started as: + + Registry.start_link(:unique, MyApp.Registry) + + In your supervisor tree, you would write: + + supervisor(Registry, [:unique, MyApp.Registry]) + + For intensive workloads, the registry may also be partitioned (by specifying + the `:partitions` option). If partitioning is required then a good default is to + set the number of partitions to the number of schedulers available: + + Registry.start_link(:unique, MyApp.Registry, partitions: System.schedulers_online()) + + or: + + supervisor(Registry, [:unique, MyApp.Registry, [partitions: System.schedulers_online()]]) + + ## Options + + The registry supports the following options: + + * `:partitions` - the number of partitions in the registry. Defaults to `1`. + * `:listeners` - a list of named processes which are notified of `:register` + and `:unregister` events. The registered process must be monitored by the + listener if the listener wants to be notified if the registered process + crashes. + * `:meta` - a keyword list of metadata to be attached to the registry. + + """ + @spec start_link(kind, registry, options) :: {:ok, pid} | {:error, term} + when options: [partitions: pos_integer, listeners: [atom], meta: [{meta_key, meta_value}]] + def start_link(kind, registry, options \\ []) when kind in @kind and is_atom(registry) do + meta = Keyword.get(options, :meta, []) + unless Keyword.keyword?(meta) do + raise ArgumentError, "expected :meta to be a keyword list, got: #{inspect meta}" + end + + partitions = Keyword.get(options, :partitions, 1) + unless is_integer(partitions) and partitions >= 1 do + raise ArgumentError, "expected :partitions to be a positive integer, got: #{inspect partitions}" + end + + listeners = Keyword.get(options, :listeners, []) + unless is_list(listeners) and Enum.all?(listeners, &is_atom/1) do + raise ArgumentError, "expected :listeners to be a list of named processes, got: #{inspect listeners}" + end + + # The @info format must be kept in sync with Registry.Partition optimization. + entries = [{@all_info, {kind, partitions, nil, nil, listeners}}, + {@key_info, {kind, partitions, nil}} | meta] + Registry.Supervisor.start_link(kind, registry, partitions, listeners, entries) + end + + @doc """ + Updates the value for `key` for the current process in the unique `registry`. + + Returns a `{new_value, old_value}` tuple or `:error` if there + is no such key assigned to the current process. + + If a non-unique registry is given, an error is raised. + + ## Examples + + iex> Registry.start_link(:unique, Registry.UpdateTest) + iex> {:ok, _} = Registry.register(Registry.UpdateTest, "hello", 1) + iex> Registry.lookup(Registry.UpdateTest, "hello") + [{self(), 1}] + iex> Registry.update_value(Registry.UpdateTest, "hello", & &1 + 1) + {2, 1} + iex> Registry.lookup(Registry.UpdateTest, "hello") + [{self(), 2}] + + """ + @spec update_value(registry, key, (value -> value)) :: {new_value :: term, old_value :: term} | :error + def update_value(registry, key, callback) when is_atom(registry) and is_function(callback, 1) do + case key_info!(registry) do + {:unique, partitions, key_ets} -> + key_ets = key_ets || key_ets!(registry, key, partitions) + try do + :ets.lookup_element(key_ets, key, 2) + catch + :error, :badarg -> :error + else + {pid, old_value} when pid == self() -> + new_value = callback.(old_value) + :ets.insert(key_ets, {key, {pid, new_value}}) + {new_value, old_value} + {_, _} -> + :error + end + {kind, _, _} -> + raise ArgumentError, "Registry.update_value/3 is not supported for #{kind} registries" + end + end + + @doc """ + Invokes the callback with all entries under `key` in each partition + for the given `registry`. + + The list of `entries` is a non-empty list of two-element tuples where + the first element is the pid and the second element is the value + associated to the pid. If there are no entries for the given key, + the callback is never invoked. + + If the registry is not partitioned, the callback is invoked in the process + that calls `dispatch/3`. If the registry is partitioned, the callback is + invoked concurrently per partition by starting a task linked to the + caller. The callback, however, is only invoked if there are entries for that + partition. + + See the module documentation for examples of using the `dispatch/3` + function for building custom dispatching or a pubsub system. + """ + @spec dispatch(registry, key, (entries :: [{pid, value}] -> term)) :: :ok + def dispatch(registry, key, mfa_or_fun) + when is_atom(registry) and is_function(mfa_or_fun, 1) + when is_atom(registry) and tuple_size(mfa_or_fun) == 3 do + case key_info!(registry) do + {:unique, partitions, key_ets} -> + (key_ets || key_ets!(registry, key, partitions)) + |> safe_lookup_second(key) + |> List.wrap() + |> apply_non_empty_to_mfa_or_fun(mfa_or_fun) + {:duplicate, 1, key_ets} -> + key_ets + |> safe_lookup_second(key) + |> apply_non_empty_to_mfa_or_fun(mfa_or_fun) + {:duplicate, partitions, _} -> + registry + |> dispatch_task(key, mfa_or_fun, partitions) + |> Enum.each(&Task.await(&1, :infinity)) + end + :ok + end + + defp dispatch_task(_registry, _key, _mfa_or_fun, 0) do + [] + end + defp dispatch_task(registry, key, mfa_or_fun, partition) do + partition = partition - 1 + task = Task.async(fn -> + registry + |> key_ets!(partition) + |> safe_lookup_second(key) + |> apply_non_empty_to_mfa_or_fun(mfa_or_fun) + :ok + end) + [task | dispatch_task(registry, key, mfa_or_fun, partition)] + end + + defp apply_non_empty_to_mfa_or_fun([], _mfa_or_fun) do + :ok + end + defp apply_non_empty_to_mfa_or_fun(entries, {module, function, args}) do + apply(module, function, [entries | args]) + end + defp apply_non_empty_to_mfa_or_fun(entries, fun) do + fun.(entries) + end + + @doc """ + Finds the `{pid, value}` pair for the given `key` in `registry` in no particular order. + + An empty list if there is no match. + + For unique registries, a single partition lookup is necessary. For + duplicate registries, all partitions must be looked up. + + ## Examples + + In the example below we register the current process and look it up + both from itself and other processes: + + iex> Registry.start_link(:unique, Registry.UniqueLookupTest) + iex> Registry.lookup(Registry.UniqueLookupTest, "hello") + [] + iex> {:ok, _} = Registry.register(Registry.UniqueLookupTest, "hello", :world) + iex> Registry.lookup(Registry.UniqueLookupTest, "hello") + [{self(), :world}] + iex> Task.async(fn -> Registry.lookup(Registry.UniqueLookupTest, "hello") end) |> Task.await + [{self(), :world}] + + The same applies to duplicate registries: + + iex> Registry.start_link(:duplicate, Registry.DuplicateLookupTest) + iex> Registry.lookup(Registry.DuplicateLookupTest, "hello") + [] + iex> {:ok, _} = Registry.register(Registry.DuplicateLookupTest, "hello", :world) + iex> Registry.lookup(Registry.DuplicateLookupTest, "hello") + [{self(), :world}] + iex> {:ok, _} = Registry.register(Registry.DuplicateLookupTest, "hello", :another) + iex> Enum.sort(Registry.lookup(Registry.DuplicateLookupTest, "hello")) + [{self(), :another}, {self(), :world}] + + """ + @spec lookup(registry, key) :: [{pid, value}] + def lookup(registry, key) when is_atom(registry) do + case key_info!(registry) do + {:unique, partitions, key_ets} -> + key_ets = key_ets || key_ets!(registry, key, partitions) + case safe_lookup_second(key_ets, key) do + {_, _} = pair -> + [pair] + _ -> + [] + end + + {:duplicate, 1, key_ets} -> + safe_lookup_second(key_ets, key) + + {:duplicate, partitions, _key_ets} -> + for partition <- 0..(partitions - 1), + pair <- safe_lookup_second(key_ets!(registry, partition), key), + do: pair + end + end + + @doc """ + Returns `{pid, value}` pairs under the given `key` in `registry` that match `pattern`. + + Pattern must be an atom or a tuple that will match the structure of the + value stored in the registry. The atom `:_` can be used to ignore a given + value or tuple element, while :"$1" can be used to temporarily assign part + of pattern to a variable for a subsequent comparison. + + It is possible to pass list of guard conditions for more precise matching. + Each guard is a tuple, which describes check that should be passed by assigned part of pattern. + For example :"$1" > 1 guard condition would be expressed as {:>, :"$1", 1} tuple. + Please note that guard conditions will work only for assigned variables like :"$1", :"$2", etc. + Avoid usage of special match variables :"$_" and :"$$", because it might not work as expected. + + An empty list will be returned if there is no match. + + For unique registries, a single partition lookup is necessary. For + duplicate registries, all partitions must be looked up. + + ## Examples + + In the example below we register the current process under the same + key in a duplicate registry but with different values: + + iex> Registry.start_link(:duplicate, Registry.MatchTest) + iex> {:ok, _} = Registry.register(Registry.MatchTest, "hello", {1, :atom, 1}) + iex> {:ok, _} = Registry.register(Registry.MatchTest, "hello", {2, :atom, 2}) + iex> Registry.match(Registry.MatchTest, "hello", {1, :_, :_}) + [{self(), {1, :atom, 1}}] + iex> Registry.match(Registry.MatchTest, "hello", {2, :_, :_}) + [{self(), {2, :atom, 2}}] + iex> Registry.match(Registry.MatchTest, "hello", {:_, :atom, :_}) |> Enum.sort() + [{self(), {1, :atom, 1}}, {self(), {2, :atom, 2}}] + iex> Registry.match(Registry.MatchTest, "hello", {:"$1", :_, :"$1"}) |> Enum.sort() + [{self(), {1, :atom, 1}}, {self(), {2, :atom, 2}}] + iex> Registry.match(Registry.MatchTest, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 1}]) + [{self(), {2, :atom, 2}}] + iex> Registry.match(Registry.MatchTest, "hello", {:_, :"$1", :_}, [{:is_atom, :"$1"}]) |> Enum.sort() + [{self(), {1, :atom, 1}}, {self(), {2, :atom, 2}}] + + """ + @spec match(registry, key, match_pattern :: atom() | tuple(), guards :: list()) :: [{pid, term}] + def match(registry, key, pattern, guards \\ []) when is_atom(registry) and is_list(guards) do + spec = [{{key, {:_, pattern}}, guards, [{:element, 2, :"$_"}]}] + + case key_info!(registry) do + {:unique, partitions, key_ets} -> + key_ets = key_ets || key_ets!(registry, key, partitions) + :ets.select(key_ets, spec) + + {:duplicate, 1, key_ets} -> + :ets.select(key_ets, spec) + + {:duplicate, partitions, _key_ets} -> + for partition <- 0..(partitions - 1), + pair <- :ets.select(key_ets!(registry, partition), spec), + do: pair + end + end + + @doc """ + Returns the known keys for the given `pid` in `registry` in no particular order. + + If the registry is unique, the keys are unique. Otherwise + they may contain duplicates if the process was registered + under the same key multiple times. The list will be empty + if the process is dead or it has no keys in this registry. + + ## Examples + + Registering under a unique registry does not allow multiple entries: + + iex> Registry.start_link(:unique, Registry.UniqueKeysTest) + iex> Registry.keys(Registry.UniqueKeysTest, self()) + [] + iex> {:ok, _} = Registry.register(Registry.UniqueKeysTest, "hello", :world) + iex> Registry.register(Registry.UniqueKeysTest, "hello", :later) # registry is :unique + {:error, {:already_registered, self()}} + iex> Registry.keys(Registry.UniqueKeysTest, self()) + ["hello"] + + Such is possible for duplicate registries though: + + iex> Registry.start_link(:duplicate, Registry.DuplicateKeysTest) + iex> Registry.keys(Registry.DuplicateKeysTest, self()) + [] + iex> {:ok, _} = Registry.register(Registry.DuplicateKeysTest, "hello", :world) + iex> {:ok, _} = Registry.register(Registry.DuplicateKeysTest, "hello", :world) + iex> Registry.keys(Registry.DuplicateKeysTest, self()) + ["hello", "hello"] + + """ + @spec keys(registry, pid) :: [key] + def keys(registry, pid) when is_atom(registry) and is_pid(pid) do + {kind, partitions, _, pid_ets, _} = info!(registry) + {_, pid_ets} = pid_ets || pid_ets!(registry, pid, partitions) + keys = safe_lookup_second(pid_ets, pid) + + cond do + kind == :unique -> Enum.uniq(keys) + true -> keys + end + end + + @doc """ + Unregisters all entries for the given `key` associated to the current + process in `registry`. + + Always returns `:ok` and automatically unlinks the current process from + the owner if there are no more keys associated to the current process. See + also `register/3` to read more about the "owner". + + ## Examples + + For unique registries: + + iex> Registry.start_link(:unique, Registry.UniqueUnregisterTest) + iex> Registry.register(Registry.UniqueUnregisterTest, "hello", :world) + iex> Registry.keys(Registry.UniqueUnregisterTest, self()) + ["hello"] + iex> Registry.unregister(Registry.UniqueUnregisterTest, "hello") + :ok + iex> Registry.keys(Registry.UniqueUnregisterTest, self()) + [] + + For duplicate registries: + + iex> Registry.start_link(:duplicate, Registry.DuplicateUnregisterTest) + iex> Registry.register(Registry.DuplicateUnregisterTest, "hello", :world) + iex> Registry.register(Registry.DuplicateUnregisterTest, "hello", :world) + iex> Registry.keys(Registry.DuplicateUnregisterTest, self()) + ["hello", "hello"] + iex> Registry.unregister(Registry.DuplicateUnregisterTest, "hello") + :ok + iex> Registry.keys(Registry.DuplicateUnregisterTest, self()) + [] + + """ + @spec unregister(registry, key) :: :ok + def unregister(registry, key) when is_atom(registry) do + self = self() + {kind, partitions, key_ets, pid_ets, listeners} = info!(registry) + {key_partition, pid_partition} = partitions(kind, key, self, partitions) + key_ets = key_ets || key_ets!(registry, key_partition) + {pid_server, pid_ets} = pid_ets || pid_ets!(registry, pid_partition) + + # Remove first from the key_ets because in case of crashes + # the pid_ets will still be able to clean up. The last step is + # to clean if we have no more entries. + true = :ets.match_delete(key_ets, {key, {self, :_}}) + true = :ets.delete_object(pid_ets, {self, key, key_ets}) + + unlink_if_unregistered(pid_server, pid_ets, self) + + for listener <- listeners do + Kernel.send(listener, {:unregister, registry, key, self}) + end + + :ok + end + + @doc """ + Registers the current process under the given `key` in `registry`. + + A value to be associated with this registration must also be given. + This value will be retrieved whenever dispatching or doing a key + lookup. + + This function returns `{:ok, owner}` or `{:error, reason}`. + The `owner` is the pid in the registry partition responsible for + the pid. The owner is automatically linked to the caller. + + If the registry has unique keys, it will return `{:ok, owner}` unless + the key is already associated to a pid, in which case it returns + `{:error, {:already_registered, pid}}`. + + If the registry has duplicate keys, multiple registrations from the + current process under the same key are allowed. + + ## Examples + + Registering under a unique registry does not allow multiple entries: + + iex> Registry.start_link(:unique, Registry.UniqueRegisterTest) + iex> {:ok, _} = Registry.register(Registry.UniqueRegisterTest, "hello", :world) + iex> Registry.register(Registry.UniqueRegisterTest, "hello", :later) + {:error, {:already_registered, self()}} + iex> Registry.keys(Registry.UniqueRegisterTest, self()) + ["hello"] + + Such is possible for duplicate registries though: + + iex> Registry.start_link(:duplicate, Registry.DuplicateRegisterTest) + iex> {:ok, _} = Registry.register(Registry.DuplicateRegisterTest, "hello", :world) + iex> {:ok, _} = Registry.register(Registry.DuplicateRegisterTest, "hello", :world) + iex> Registry.keys(Registry.DuplicateRegisterTest, self()) + ["hello", "hello"] + + """ + @spec register(registry, key, value) :: {:ok, pid} | {:error, {:already_registered, pid}} + def register(registry, key, value) when is_atom(registry) do + self = self() + {kind, partitions, key_ets, pid_ets, listeners} = info!(registry) + {key_partition, pid_partition} = partitions(kind, key, self, partitions) + key_ets = key_ets || key_ets!(registry, key_partition) + {pid_server, pid_ets} = pid_ets || pid_ets!(registry, pid_partition) + + # Notice we write first to the pid ets table because it will + # always be able to do the clean up. If we register first to the + # key one and the process crashes, the key will stay there forever. + Process.link(pid_server) + true = :ets.insert(pid_ets, {self, key, key_ets}) + case register_key(kind, pid_server, key_ets, key, {key, {self, value}}) do + {:ok, _} = ok -> + for listener <- listeners do + Kernel.send(listener, {:register, registry, key, self, value}) + end + ok + {:error, {:already_registered, ^self}} = error -> + error + {:error, _} = error -> + true = :ets.delete_object(pid_ets, {self, key, key_ets}) + unlink_if_unregistered(pid_server, pid_ets, self) + error + end + end + + defp register_key(:duplicate, pid_server, key_ets, _key, entry) do + true = :ets.insert(key_ets, entry) + {:ok, pid_server} + end + defp register_key(:unique, pid_server, key_ets, key, entry) do + if :ets.insert_new(key_ets, entry) do + {:ok, pid_server} + else + # Notice we have to call register_key recursively + # because we are always at odds of a race condition. + case :ets.lookup(key_ets, key) do + [{^key, {pid, _}} = current] -> + if Process.alive?(pid) do + {:error, {:already_registered, pid}} + else + :ets.delete_object(key_ets, current) + register_key(:unique, pid_server, key_ets, key, entry) + end + [] -> + register_key(:unique, pid_server, key_ets, key, entry) + end + end + end + + @doc """ + Reads registry metadata given on `start_link/3`. + + Atoms and tuples are allowed as keys. + + ## Examples + + iex> Registry.start_link(:unique, Registry.MetaTest, meta: [custom_key: "custom_value"]) + iex> Registry.meta(Registry.MetaTest, :custom_key) + {:ok, "custom_value"} + iex> Registry.meta(Registry.MetaTest, :unknown_key) + :error + + """ + @spec meta(registry, meta_key) :: {:ok, meta_value} | :error + def meta(registry, key) when is_atom(registry) and (is_atom(key) or is_tuple(key)) do + try do + :ets.lookup(registry, key) + catch + :error, :badarg -> + raise ArgumentError, "unknown registry: #{inspect registry}" + else + [{^key, value}] -> {:ok, value} + _ -> :error + end + end + + @doc """ + Stores registry metadata. + + Atoms and tuples are allowed as keys. + + ## Examples + + iex> Registry.start_link(:unique, Registry.PutMetaTest) + iex> Registry.put_meta(Registry.PutMetaTest, :custom_key, "custom_value") + :ok + iex> Registry.meta(Registry.PutMetaTest, :custom_key) + {:ok, "custom_value"} + iex> Registry.put_meta(Registry.PutMetaTest, {:tuple, :key}, "tuple_value") + :ok + iex> Registry.meta(Registry.PutMetaTest, {:tuple, :key}) + {:ok, "tuple_value"} + + """ + @spec put_meta(registry, meta_key, meta_value) :: :ok + def put_meta(registry, key, value) when is_atom(registry) and (is_atom(key) or is_tuple(key)) do + try do + :ets.insert(registry, {key, value}) + :ok + catch + :error, :badarg -> + raise ArgumentError, "unknown registry: #{inspect registry}" + end + end + + ## Helpers + + @compile {:inline, hash: 2} + + defp hash(term, limit) do + :erlang.phash2(term, limit) + end + + defp info!(registry) do + try do + :ets.lookup_element(registry, @all_info, 2) + catch + :error, :badarg -> + raise ArgumentError, "unknown registry: #{inspect registry}" + end + end + + defp key_info!(registry) do + try do + :ets.lookup_element(registry, @key_info, 2) + catch + :error, :badarg -> + raise ArgumentError, "unknown registry: #{inspect registry}" + end + end + + defp key_ets!(registry, key, partitions) do + :ets.lookup_element(registry, hash(key, partitions), 2) + end + + defp key_ets!(registry, partition) do + :ets.lookup_element(registry, partition, 2) + end + + defp pid_ets!(registry, key, partitions) do + :ets.lookup_element(registry, hash(key, partitions), 3) + end + + defp pid_ets!(registry, partition) do + :ets.lookup_element(registry, partition, 3) + end + + defp safe_lookup_second(ets, key) do + try do + :ets.lookup_element(ets, key, 2) + catch + :error, :badarg -> [] + end + end + + defp partitions(:unique, key, pid, partitions) do + {hash(key, partitions), hash(pid, partitions)} + end + defp partitions(:duplicate, _key, pid, partitions) do + partition = hash(pid, partitions) + {partition, partition} + end + + defp unlink_if_unregistered(pid_server, pid_ets, self) do + unless :ets.member(pid_ets, self) do + Process.unlink(pid_server) + end + end +end + +defmodule Registry.Supervisor do + @moduledoc false + use Supervisor + + def start_link(kind, registry, partitions, listeners, entries) do + Supervisor.start_link(__MODULE__, {kind, registry, partitions, listeners, entries}, name: registry) + end + + def init({kind, registry, partitions, listeners, entries}) do + ^registry = :ets.new(registry, [:set, :public, :named_table, read_concurrency: true]) + true = :ets.insert(registry, entries) + + children = + for i <- 0..partitions - 1 do + key_partition = Registry.Partition.key_name(registry, i) + pid_partition = Registry.Partition.pid_name(registry, i) + arg = {kind, registry, i, partitions, key_partition, pid_partition, listeners} + worker(Registry.Partition, [pid_partition, arg], id: pid_partition) + end + + supervise(children, strategy: strategy_for_kind(kind)) + end + + # Unique registries have their key partition hashed by key. + # This means that, if a pid partition crashes, it may have + # entries from all key partitions, so we need to crash all. + defp strategy_for_kind(:unique), do: :one_for_all + + # Duplicate registries have both key and pid partitions hashed + # by pid. This means that, if a pid partition crashes, all of + # its associated entries are in its sibling table, so we crash one. + defp strategy_for_kind(:duplicate), do: :one_for_one +end + +defmodule Registry.Partition do + @moduledoc false + + # This process owns the equivalent key and pid ets tables + # and is responsible for monitoring processes that map to + # its own pid table. + use GenServer + @all_info -1 + @key_info -2 + + @doc """ + Returns the name of key partition table. + """ + @spec key_name(atom, non_neg_integer) :: atom + def key_name(registry, partition) do + Module.concat(registry, "KeyPartition" <> Integer.to_string(partition)) + end + + @doc """ + Returns the name of pid partition table. + """ + @spec pid_name(atom, non_neg_integer) :: atom + def pid_name(name, partition) do + Module.concat(name, "PIDPartition" <> Integer.to_string(partition)) + end + + @doc """ + Starts the registry partition. + + The process is only responsible for monitoring, demonitoring + and cleaning up when monitored processes crash. + """ + def start_link(registry, arg) do + GenServer.start_link(__MODULE__, arg, name: registry) + end + + ## Callbacks + + def init({kind, registry, i, partitions, key_partition, pid_partition, listeners}) do + Process.flag(:trap_exit, true) + key_ets = init_key_ets(kind, key_partition) + pid_ets = init_pid_ets(kind, pid_partition) + + # If we have only one partition, we do an optimization which + # is to write the table information alongside the registry info. + if partitions == 1 do + entries = + [{@key_info, {kind, partitions, key_ets}}, + {@all_info, {kind, partitions, key_ets, {self(), pid_ets}, listeners}}] + true = :ets.insert(registry, entries) + else + true = :ets.insert(registry, {i, key_ets, {self(), pid_ets}}) + end + + {:ok, pid_ets} + end + + # The key partition is a set for unique keys, + # duplicate bag for duplicate ones. + defp init_key_ets(:unique, key_partition) do + :ets.new(key_partition, [:set, :public, read_concurrency: true, write_concurrency: true]) + end + defp init_key_ets(:duplicate, key_partition) do + :ets.new(key_partition, [:duplicate_bag, :public, read_concurrency: true, write_concurrency: true]) + end + + # A process can always have multiple keys, so the + # pid partition is always a duplicate bag. + defp init_pid_ets(_, pid_partition) do + :ets.new(pid_partition, [:duplicate_bag, :public, read_concurrency: true, write_concurrency: true]) + end + + def handle_call(:sync, _, state) do + {:reply, :ok, state} + end + + def handle_info({:EXIT, pid, _reason}, ets) do + entries = :ets.take(ets, pid) + for {_pid, key, key_ets} <- entries do + try do + :ets.match_delete(key_ets, {key, {pid, :_}}) + catch + :error, :badarg -> :badarg + end + end + {:noreply, ets} + end + def handle_info(msg, state) do + super(msg, state) + end +end diff --git a/lib/elixir/lib/set.ex b/lib/elixir/lib/set.ex index 97d4aa4a160..fb3fb1b9217 100644 --- a/lib/elixir/lib/set.ex +++ b/lib/elixir/lib/set.ex @@ -1,49 +1,16 @@ defmodule Set do @moduledoc ~S""" - This module specifies the Set API expected to be - implemented by different representations. + WARNING: this module is deprecated. - It also provides functions that redirect to the - underlying Set, allowing a developer to work with - different Set implementations using one API. - - To create a new set, use the `new` functions defined - by each set type: - - HashSet.new #=> creates an empty HashSet - - In the examples below, `set_impl` means a specific - `Set` implementation, for example `HashSet`. - - ## Protocols - - Sets are required to implement both `Enumerable` and `Collectable` - protocols. - - ## Match - - Sets are required to implement all operations using the match (`===`) - operator. + Use the `MapSet` module instead. """ - use Behaviour - @type value :: any - @type values :: [ value ] + @type values :: [value] @type t :: map - defcallback new :: t - defcallback delete(t, value) :: t - defcallback difference(t, t) :: t - defcallback disjoint?(t, t) :: boolean - defcallback equal?(t, t) :: boolean - defcallback intersection(t, t) :: t - defcallback member?(t, value) :: boolean - defcallback put(t, value) :: t - defcallback size(t) :: non_neg_integer - defcallback subset?(t, t) :: boolean - defcallback to_list(t) :: list() - defcallback union(t, t) :: t + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) defmacrop target(set) do quote do @@ -56,39 +23,10 @@ defmodule Set do end end - @doc """ - Deletes `value` from `set`. - - ## Examples - - iex> s = Enum.into([1, 2, 3], set_impl.new) - iex> Set.delete(s, 4) |> Enum.sort - [1, 2, 3] - - iex> s = Enum.into([1, 2, 3], set_impl.new) - iex> Set.delete(s, 2) |> Enum.sort - [1, 3] - - """ - @spec delete(t, value) :: t def delete(set, value) do target(set).delete(set, value) end - @doc """ - Returns a set that is `set1` without the members of `set2`. - - Notice this function is polymorphic as it calculates the difference - for of any type. Each set implementation also provides a `difference` - function, but they can only work with sets of the same type. - - ## Examples - - iex> Set.difference(Enum.into([1,2], set_impl.new), Enum.into([2,3,4], set_impl.new)) |> Enum.sort - [1] - - """ - @spec difference(t, t) :: t def difference(set1, set2) do target1 = target(set1) target2 = target(set2) @@ -96,29 +34,12 @@ defmodule Set do if target1 == target2 do target1.difference(set1, set2) else - target2.reduce(set2, {:cont, set1}, fn v, acc -> + Enumerable.reduce(set2, {:cont, set1}, fn v, acc -> {:cont, target1.delete(acc, v)} end) |> elem(1) end end - @doc """ - Checks if `set1` and `set2` have no members in common. - - Notice this function is polymorphic as it checks for disjoint sets of - any type. Each set implementation also provides a `disjoint?` function, - but they can only work with sets of the same type. - - ## Examples - - iex> Set.disjoint?(Enum.into([1, 2], set_impl.new), Enum.into([3, 4], set_impl.new)) - true - - iex> Set.disjoint?(Enum.into([1, 2], set_impl.new), Enum.into([2, 3], set_impl.new)) - false - - """ - @spec disjoint?(t, t) :: boolean def disjoint?(set1, set2) do target1 = target(set1) target2 = target(set2) @@ -126,7 +47,7 @@ defmodule Set do if target1 == target2 do target1.disjoint?(set1, set2) else - target2.reduce(set2, {:cont, true}, fn member, acc -> + Enumerable.reduce(set2, {:cont, true}, fn member, acc -> case target1.member?(set1, member) do false -> {:cont, acc} _ -> {:halt, false} @@ -136,28 +57,10 @@ defmodule Set do end @doc false - @spec empty(t) :: t def empty(set) do target(set).empty(set) end - @doc """ - Check if two sets are equal using `===`. - - Notice this function is polymorphic as it compares sets of - any type. Each set implementation also provides an `equal?` - function, but they can only work with sets of the same type. - - ## Examples - - iex> Set.equal?(Enum.into([1, 2], set_impl.new), Enum.into([2, 1, 1], set_impl.new)) - true - - iex> Set.equal?(Enum.into([1, 2], set_impl.new), Enum.into([3, 4], set_impl.new)) - false - - """ - @spec equal?(t, t) :: boolean def equal?(set1, set2) do target1 = target(set1) target2 = target(set2) @@ -174,23 +77,7 @@ defmodule Set do end end - @doc """ - Returns a set containing only members in common between `set1` and `set2`. - - Notice this function is polymorphic as it calculates the intersection of - any type. Each set implementation also provides a `intersection` function, - but they can only work with sets of the same type. - ## Examples - - iex> Set.intersection(Enum.into([1,2], set_impl.new), Enum.into([2,3,4], set_impl.new)) |> Enum.sort - [2] - - iex> Set.intersection(Enum.into([1,2], set_impl.new), Enum.into([3,4], set_impl.new)) |> Enum.sort - [] - - """ - @spec intersection(t, t) :: t def intersection(set1, set2) do target1 = target(set1) target2 = target(set2) @@ -198,77 +85,26 @@ defmodule Set do if target1 == target2 do target1.intersection(set1, set2) else - target1.reduce(set1, {:cont, Collectable.empty(set1)}, fn v, acc -> + Enumerable.reduce(set1, {:cont, target1.new}, fn v, acc -> {:cont, if(target2.member?(set2, v), do: target1.put(acc, v), else: acc)} end) |> elem(1) end end - @doc """ - Checks if `set` contains `value`. - ## Examples - - iex> Set.member?(Enum.into([1, 2, 3], set_impl.new), 2) - true - - iex> Set.member?(Enum.into([1, 2, 3], set_impl.new), 4) - false - - """ - @spec member?(t, value) :: boolean def member?(set, value) do target(set).member?(set, value) end - @doc """ - Inserts `value` into `set` if it does not already contain it. - ## Examples - - iex> Set.put(Enum.into([1, 2, 3], set_impl.new), 3) |> Enum.sort - [1, 2, 3] - - iex> Set.put(Enum.into([1, 2, 3], set_impl.new), 4) |> Enum.sort - [1, 2, 3, 4] - - """ - @spec put(t, value) :: t def put(set, value) do target(set).put(set, value) end - @doc """ - Returns the number of elements in `set`. - - ## Examples - - iex> Set.size(Enum.into([1, 2, 3], set_impl.new)) - 3 - - """ - @spec size(t) :: non_neg_integer def size(set) do target(set).size(set) end - @doc """ - Checks if `set1`'s members are all contained in `set2`. - - Notice this function is polymorphic as it checks the subset for - any type. Each set implementation also provides a `subset?` function, - but they can only work with sets of the same type. - - ## Examples - - iex> Set.subset?(Enum.into([1, 2], set_impl.new), Enum.into([1, 2, 3], set_impl.new)) - true - - iex> Set.subset?(Enum.into([1, 2, 3], set_impl.new), Enum.into([1, 2], set_impl.new)) - false - - """ - @spec subset?(t, t) :: boolean def subset?(set1, set2) do target1 = target(set1) target2 = target(set2) @@ -280,34 +116,10 @@ defmodule Set do end end - @doc """ - Converts `set` to a list. - - ## Examples - - iex> set_impl.to_list(Enum.into([1, 2, 3], set_impl.new)) |> Enum.sort - [1,2,3] - - """ - @spec to_list(t) :: list def to_list(set) do target(set).to_list(set) end - @doc """ - Returns a set containing all members of `set1` and `set2`. - - Notice this function is polymorphic as it calculates the union of - any type. Each set implementation also provides a `union` function, - but they can only work with sets of the same type. - - ## Examples - - iex> Set.union(Enum.into([1,2], set_impl.new), Enum.into([2,3,4], set_impl.new)) |> Enum.sort - [1,2,3,4] - - """ - @spec union(t, t) :: t def union(set1, set2) do target1 = target(set1) target2 = target(set2) @@ -315,14 +127,14 @@ defmodule Set do if target1 == target2 do target1.union(set1, set2) else - target2.reduce(set2, {:cont, set1}, fn v, acc -> + Enumerable.reduce(set2, {:cont, set1}, fn v, acc -> {:cont, target1.put(acc, v)} end) |> elem(1) end end - defp do_subset?(target1, target2, set1, set2) do - target1.reduce(set1, {:cont, true}, fn member, acc -> + defp do_subset?(_target1, target2, set1, set2) do + Enumerable.reduce(set1, {:cont, true}, fn member, acc -> case target2.member?(set2, member) do true -> {:cont, acc} _ -> {:halt, false} diff --git a/lib/elixir/lib/stream.ex b/lib/elixir/lib/stream.ex index d61de10063b..ae7f22cc20c 100644 --- a/lib/elixir/lib/stream.ex +++ b/lib/elixir/lib/stream.ex @@ -9,7 +9,7 @@ defmodule Stream do iex> range = 1..5 1..5 iex> Enum.map range, &(&1 * 2) - [2,4,6,8,10] + [2, 4, 6, 8, 10] In the example above, as we mapped over the range, the elements being enumerated were created one by one, during enumeration. The `Stream` @@ -18,11 +18,11 @@ defmodule Stream do iex> range = 1..3 iex> stream = Stream.map(range, &(&1 * 2)) iex> Enum.map(stream, &(&1 + 1)) - [3,5,7] + [3, 5, 7] Notice we started with a range and then we created a stream that is meant to multiply each item in the range by 2. At this point, no - computation was done yet. Just when `Enum.map/2` is called we + computation was done. Only when `Enum.map/2` is called we actually enumerate over each item in the range, multiplying it by 2 and adding 1. We say the functions in `Stream` are *lazy* and the functions in `Enum` are *eager*. @@ -33,26 +33,26 @@ defmodule Stream do computations that are executed at a later moment. Let's see another example: - 1..3 |> - Enum.map(&IO.inspect(&1)) |> - Enum.map(&(&1 * 2)) |> - Enum.map(&IO.inspect(&1)) + 1..3 + |> Enum.map(&IO.inspect(&1)) + |> Enum.map(&(&1 * 2)) + |> Enum.map(&IO.inspect(&1)) 1 2 3 2 4 6 - #=> [2,4,6] + #=> [2, 4, 6] Notice that we first printed each item in the list, then multiplied each element by 2 and finally printed each new value. In this example, the list was enumerated three times. Let's see an example with streams: - stream = 1..3 |> - Stream.map(&IO.inspect(&1)) |> - Stream.map(&(&1 * 2)) |> - Stream.map(&IO.inspect(&1)) + stream = 1..3 + |> Stream.map(&IO.inspect(&1)) + |> Stream.map(&(&1 * 2)) + |> Stream.map(&IO.inspect(&1)) Enum.to_list(stream) 1 2 @@ -60,16 +60,16 @@ defmodule Stream do 4 3 6 - #=> [2,4,6] + #=> [2, 4, 6] Although the end result is the same, the order in which the items were printed changed! With streams, we print the first item and then print its double. In this example, the list was enumerated just once! - That's what we meant when we first said that streams are composable, + That's what we meant when we said earlier that streams are composable, lazy enumerables. Notice we could call `Stream.map/2` multiple times, - effectively composing the streams and they are lazy. The computations - are performed only when you call a function from the `Enum` module. + effectively composing the streams and keeping them lazy. The computations + are only performed when you call a function from the `Enum` module. ## Creating Streams @@ -85,32 +85,37 @@ defmodule Stream do Note the functions in this module are guaranteed to return enumerables. Since enumerables can have different shapes (structs, anonymous functions, and so on), the functions in this module may return any of those shapes - and that it may change at any time. For example, a function that today + and this may change at any time. For example, a function that today returns an anonymous function may return a struct in future releases. """ + @doc false defstruct enum: nil, funs: [], accs: [], done: nil - @type acc :: any + @type acc :: any @type element :: any - @type index :: non_neg_integer + @type index :: non_neg_integer @type default :: any # Require Stream.Reducers and its callbacks require Stream.Reducers, as: R - defmacrop cont(f, entry, acc) do - quote do: unquote(f).(unquote(entry), unquote(acc)) + defmacrop skip(acc) do + {:cont, acc} + end + + defmacrop next(fun, entry, acc) do + quote do: unquote(fun).(unquote(entry), unquote(acc)) end - defmacrop acc(h, n, t) do - quote do: [unquote(h),unquote(n)|unquote(t)] + defmacrop acc(head, state, tail) do + quote do: [unquote(head), unquote(state) | unquote(tail)] end - defmacrop cont_with_acc(f, entry, h, n, t) do + defmacrop next_with_acc(fun, entry, head, state, tail) do quote do - {reason, [h|t]} = unquote(f).(unquote(entry), [unquote(h)|unquote(t)]) - {reason, [h,unquote(n)|t]} + {reason, [head | tail]} = unquote(fun).(unquote(entry), [unquote(head) | unquote(tail)]) + {reason, [head, unquote(state) | tail]} end end @@ -119,21 +124,24 @@ defmodule Stream do @doc """ Shortcut to `chunk(enum, n, n)`. """ - @spec chunk(Enumerable.t, non_neg_integer) :: Enumerable.t + @spec chunk(Enumerable.t, pos_integer) :: Enumerable.t def chunk(enum, n), do: chunk(enum, n, n, nil) @doc """ Streams the enumerable in chunks, containing `n` items each, where each new chunk starts `step` elements into the enumerable. - `step` is optional and, if not passed, defaults to `n`, i.e. - chunks do not overlap. If the final chunk does not have `n` - elements to fill the chunk, elements are taken as necessary - from `pad` if it was passed. If `pad` is passed and does not - have enough elements to fill the chunk, then the chunk is - returned anyway with less than `n` elements. If `pad` is not - passed at all or is `nil`, then the partial chunk is discarded - from the result. + `step` is optional and, if not passed, defaults to `count`, i.e. + chunks do not overlap. + + If the final chunk does not have `count` elements to fill the chunk, + the final chunk is dropped unless `leftover` is given. + + If `leftover` is given, elements are taken from `leftover` to fill in + the chunk. If `leftover` is passed and does not have enough elements + to fill the chunk, then a partial chunk is returned with less than + `count` elements. Therefore, an empty list can be given to `leftover` + when one simply desires for the last chunk to not be discarded. ## Examples @@ -150,28 +158,32 @@ defmodule Stream do [[1, 2, 3], [4, 5, 6]] """ - @spec chunk(Enumerable.t, non_neg_integer, non_neg_integer) :: Enumerable.t - @spec chunk(Enumerable.t, non_neg_integer, non_neg_integer, Enumerable.t | nil) :: Enumerable.t - def chunk(enum, n, step, pad \\ nil) when n > 0 and step > 0 do + @spec chunk(Enumerable.t, pos_integer, pos_integer, Enumerable.t | nil) :: Enumerable.t + def chunk(enum, n, step, leftover \\ nil) + when is_integer(n) and n > 0 and is_integer(step) and step > 0 do limit = :erlang.max(n, step) - lazy enum, {[], 0}, - fn(f1) -> R.chunk(n, step, limit, f1) end, - fn(f1) -> &do_chunk(&1, n, pad, f1) end + if is_nil(leftover) do + lazy enum, {[], 0}, fn(f1) -> R.chunk(n, step, limit, f1) end + else + lazy enum, {[], 0}, + fn(f1) -> R.chunk(n, step, limit, f1) end, + &do_chunk(&1, n, leftover, &2) + end end - defp do_chunk(acc(h, {buffer, count} = old, t) = acc, n, pad, f1) do - if nil?(pad) || count == 0 do - {:cont, acc} - else - buffer = :lists.reverse(buffer) ++ Enum.take(pad, n - count) - cont_with_acc(f1, buffer, h, old, t) - end + defp do_chunk(acc(_, {_, 0}, _) = acc, _, _, _) do + {:cont, acc} + end + + defp do_chunk(acc(h, {buffer, count} = old, t), n, leftover, f1) do + buffer = :lists.reverse(buffer, Enum.take(leftover, n - count)) + next_with_acc(f1, buffer, h, old, t) end @doc """ - Chunks the `enum` by buffering elements for which `fun` returns - the same value and only emit them when `fun` returns a new value - or the `enum` finishes. + Chunks the `enum` by buffering elements for which `fun` returns the same value. + + Elements are only emitted when `fun` returns a new value or the `enum` finishes. ## Examples @@ -182,17 +194,96 @@ defmodule Stream do """ @spec chunk_by(Enumerable.t, (element -> any)) :: Enumerable.t def chunk_by(enum, fun) do - lazy enum, nil, - fn(f1) -> R.chunk_by(fun, f1) end, - fn(f1) -> &do_chunk_by(&1, f1) end + chunk_by(enum, nil, fn + entry, nil -> + {:cont, {[entry], fun.(entry)}} + entry, {acc, value} -> + case fun.(entry) do + ^value -> {:cont, {[entry | acc], value}} + new_value -> {:cont, :lists.reverse(acc), {[entry], new_value}} + end + end, fn + nil -> {:cont, :done} + {acc, _value} -> {:cont, :lists.reverse(acc), :done} + end) end - defp do_chunk_by(acc(_, nil, _) = acc, _f1) do - {:cont, acc} + @doc """ + Chunks the `enum` with fine grained control when every chunk is emitted. + + `chunk_fun` receives the current element and the accumulator and + must return `{:cont, element, acc}` to emit the given chunk and + continue with accumulator or `{:cont, acc}` to not emit any chunk + and continue with the return accumulator. + + `after_fun` is invoked when iteration is done and must also return + `{:cont, element, acc}` or `{:cont, acc}`. + + ## Examples + + iex> chunk_fun = fn i, acc -> + ...> if rem(i, 2) == 0 do + ...> {:cont, Enum.reverse([i | acc]), []} + ...> else + ...> {:cont, [i | acc]} + ...> end + ...> end + iex> after_fun = fn + ...> [] -> {:cont, []} + ...> acc -> {:cont, Enum.reverse(acc), []} + ...> end + iex> stream = Stream.chunk_by(1..10, [], chunk_fun, after_fun) + iex> Enum.to_list(stream) + [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]] + + """ + @spec chunk_by(Enumerable.t, acc, + (element, acc -> {:cont, chunk, acc} | {:cont, acc}), + (acc -> {:cont, chunk, acc} | {:cont, acc})) :: Enumerable.t when chunk: any + def chunk_by(enum, acc, chunk_fun, after_fun) do + lazy enum, acc, + fn(f1) -> R.chunk_by(chunk_fun, f1) end, + &after_chunk_by(&1, &2, after_fun) + end + + defp after_chunk_by(acc(h, acc, t), f1, after_fun) do + case after_fun.(acc) do + {:cont, emit, acc} -> next_with_acc(f1, emit, h, acc, t) + {:cont, acc} -> {:cont, acc(h, acc, t)} + end end - defp do_chunk_by(acc(h, {buffer, _}, t), f1) do - cont_with_acc(f1, :lists.reverse(buffer), h, nil, t) + @doc """ + Creates a stream that only emits elements if they are different from the last emitted element. + + This function only ever needs to store the last emitted element. + + Elements are compared using `===`. + + ## Examples + + iex> Stream.dedup([1, 2, 3, 3, 2, 1]) |> Enum.to_list + [1, 2, 3, 2, 1] + + """ + @spec dedup(Enumerable.t) :: Enumerable.t + def dedup(enum) do + dedup_by(enum, fn x -> x end) + end + + @doc """ + Creates a stream that only emits elements if the result of calling `fun` on the element is + different from the (stored) result of calling `fun` on the last emitted element. + + ## Examples + + iex> Stream.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end) |> Enum.to_list + [{1, :x}, {2, :y}, {1, :x}] + + """ + @spec dedup_by(Enumerable.t, (element -> term)) :: Enumerable.t + def dedup_by(enum, fun) do + lazy enum, nil, fn f1 -> R.dedup(fun, f1) end end @doc """ @@ -207,11 +298,11 @@ defmodule Stream do iex> stream = Stream.drop(1..10, 5) iex> Enum.to_list(stream) - [6,7,8,9,10] + [6, 7, 8, 9, 10] iex> stream = Stream.drop(1..10, -5) iex> Enum.to_list(stream) - [1,2,3,4,5] + [1, 2, 3, 4, 5] """ @spec drop(Enumerable.t, non_neg_integer) :: Enumerable.t @@ -226,23 +317,54 @@ defmodule Stream do fn entry, [h, {count, buf1, []} | t] -> do_drop(:cont, n, entry, h, count, buf1, [], t) - entry, [h, {count, buf1, [next|buf2]} | t] -> - {reason, [h|t]} = f1.(next, [h|t]) + entry, [h, {count, buf1, [next | buf2]} | t] -> + {reason, [h | t]} = f1.(next, [h | t]) do_drop(reason, n, entry, h, count, buf1, buf2, t) end end end defp do_drop(reason, n, entry, h, count, buf1, buf2, t) do - buf1 = [entry|buf1] + buf1 = [entry | buf1] count = count + 1 if count == n do - {reason, [h, {0, [], :lists.reverse(buf1)}|t]} + {reason, [h, {0, [], :lists.reverse(buf1)} | t]} else - {reason, [h, {count, buf1, buf2}|t]} + {reason, [h, {count, buf1, buf2} | t]} end end + @doc """ + Creates a stream that drops every `nth` item from the enumerable. + + The first item is always dropped, unless `nth` is 0. + + `nth` must be a non-negative integer. + + ## Examples + + iex> stream = Stream.drop_every(1..10, 2) + iex> Enum.to_list(stream) + [2, 4, 6, 8, 10] + + iex> stream = Stream.drop_every(1..1000, 1) + iex> Enum.to_list(stream) + [] + + iex> stream = Stream.drop_every([1, 2, 3, 4, 5], 0) + iex> Enum.to_list(stream) + [1, 2, 3, 4, 5] + + """ + @spec drop_every(Enumerable.t, non_neg_integer) :: Enumerable.t + def drop_every(enum, nth) + def drop_every(enum, 0), do: %Stream{enum: enum} + def drop_every([], _nth), do: %Stream{enum: []} + + def drop_every(enum, nth) when is_integer(nth) and nth > 0 do + lazy enum, nth, fn(f1) -> R.drop_every(nth, f1) end + end + @doc """ Lazily drops elements of the enumerable while the given function returns `true`. @@ -251,7 +373,7 @@ defmodule Stream do iex> stream = Stream.drop_while(1..10, &(&1 <= 5)) iex> Enum.to_list(stream) - [6,7,8,9,10] + [6, 7, 8, 9, 10] """ @spec drop_while(Enumerable.t, (element -> as_boolean(term))) :: Enumerable.t @@ -260,13 +382,13 @@ defmodule Stream do end @doc """ - Execute the given function for each item. + Executes the given function for each item. Useful for adding side effects (like printing) to a stream. ## Examples - iex> stream = Stream.each([1, 2, 3], fn(x) -> send self, x end) + iex> stream = Stream.each([1, 2, 3], fn(x) -> send self(), x end) iex> Enum.to_list(stream) iex> receive do: (x when is_integer(x) -> x) 1 @@ -287,8 +409,10 @@ defmodule Stream do end @doc """ - Creates a stream that will apply the given function on enumeration and - flatten the result. + Maps the given `fun` over `enumerable` and flattens the result. + + This function returns a new stream built by appending the result of invoking `fun` + on each element of `enumerable` together. ## Examples @@ -296,6 +420,10 @@ defmodule Stream do iex> Enum.to_list(stream) [1, 2, 2, 4, 3, 6] + iex> stream = Stream.flat_map([1, 2, 3], fn(x) -> [[x]] end) + iex> Enum.to_list(stream) + [[1], [2], [3]] + """ @spec flat_map(Enumerable.t, (element -> Enumerable.t)) :: Enumerable.t def flat_map(enum, mapper) do @@ -318,22 +446,37 @@ defmodule Stream do lazy enum, fn(f1) -> R.filter(fun, f1) end end + @doc false + # TODO: Remove on 2.0 + # (hard-deprecated in elixir_dispatch) + def filter_map(enum, filter, mapper) do + lazy enum, fn(f1) -> R.filter_map(filter, mapper, f1) end + end + @doc """ - Creates a stream that filters and then maps elements according - to given functions. + Creates a stream that emits a value after the given period `n` + in milliseconds. - Exists for symmetry with `Enum.filter_map/3`. + The values emitted are an increasing counter starting at `0`. + This operation will block the caller by the given interval + every time a new item is streamed. + + Do not use this function to generate a sequence of numbers. + If blocking the caller process is not necessary, use + `Stream.iterate(0, & &1 + 1)` instead. ## Examples - iex> stream = Stream.filter_map(1..6, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) - iex> Enum.to_list(stream) - [4,8,12] + iex> Stream.interval(10) |> Enum.take(10) + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] """ - @spec filter_map(Enumerable.t, (element -> as_boolean(term)), (element -> any)) :: Enumerable.t - def filter_map(enum, filter, mapper) do - lazy enum, fn(f1) -> R.filter_map(filter, mapper, f1) end + @spec interval(non_neg_integer) :: Enumerable.t + def interval(n) do + unfold 0, fn(count) -> + Process.sleep(n) + {count, count + 1} + end end @doc """ @@ -342,33 +485,33 @@ defmodule Stream do This function is often used with `run/1` since any evaluation is delayed until the stream is executed. See `run/1` for an example. """ - @spec into(Enumerable.t, Collectable.t) :: Enumerable.t + @spec into(Enumerable.t, Collectable.t, (term -> term)) :: Enumerable.t def into(enum, collectable, transform \\ fn x -> x end) do &do_into(enum, collectable, transform, &1, &2) end defp do_into(enum, collectable, transform, acc, fun) do {initial, into} = Collectable.into(collectable) - composed = fn x, [acc|collectable] -> + composed = fn x, [acc | collectable] -> collectable = into.(collectable, {:cont, transform.(x)}) {reason, acc} = fun.(x, acc) - {reason, [acc|collectable]} + {reason, [acc | collectable]} end do_into(&Enumerable.reduce(enum, &1, composed), initial, into, acc) end defp do_into(reduce, collectable, into, {command, acc}) do try do - reduce.({command, [acc|collectable]}) + reduce.({command, [acc | collectable]}) catch kind, reason -> stacktrace = System.stacktrace into.(collectable, :halt) :erlang.raise(kind, reason, stacktrace) else - {:suspended, [acc|collectable], continuation} -> + {:suspended, [acc | collectable], continuation} -> {:suspended, acc, &do_into(continuation, collectable, into, &1)} - {reason, [acc|collectable]} -> + {reason, [acc | collectable]} -> into.(collectable, :done) {reason, acc} end @@ -382,7 +525,7 @@ defmodule Stream do iex> stream = Stream.map([1, 2, 3], fn(x) -> x * 2 end) iex> Enum.to_list(stream) - [2,4,6] + [2, 4, 6] """ @spec map(Enumerable.t, (element -> any)) :: Enumerable.t @@ -390,6 +533,41 @@ defmodule Stream do lazy enum, fn(f1) -> R.map(fun, f1) end end + @doc """ + Creates a stream that will apply the given function on + every `nth` item from the enumerable. + + The first item is always passed to the given function. + + `nth` must be a non-negative integer. + + ## Examples + + iex> stream = Stream.map_every(1..10, 2, fn(x) -> x * 2 end) + iex> Enum.to_list(stream) + [2, 2, 6, 4, 10, 6, 14, 8, 18, 10] + + iex> stream = Stream.map_every([1, 2, 3, 4, 5], 1, fn(x) -> x * 2 end) + iex> Enum.to_list(stream) + [2, 4, 6, 8, 10] + + iex> stream = Stream.map_every(1..5, 0, fn(x) -> x * 2 end) + iex> Enum.to_list(stream) + [1, 2, 3, 4, 5] + + """ + @spec map_every(Enumerable.t, non_neg_integer, (element -> any)) :: Enumerable.t + def map_every(enum, nth, fun) + + def map_every(enum, 1, fun), do: map(enum, fun) + def map_every(enum, 0, _fun), do: %Stream{enum: enum} + def map_every([], _nth, _fun), do: %Stream{enum: []} + + def map_every(enum, nth, fun) when is_integer(nth) and nth > 0 do + lazy enum, nth, fn(f1) -> R.map_every(nth, fun, f1) end + end + + @doc """ Creates a stream that will reject elements according to the given function on enumeration. @@ -398,7 +576,7 @@ defmodule Stream do iex> stream = Stream.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) iex> Enum.to_list(stream) - [1,3] + [1, 3] """ @spec reject(Enumerable.t, (element -> as_boolean(term))) :: Enumerable.t @@ -427,25 +605,26 @@ defmodule Stream do """ @spec run(Enumerable.t) :: :ok def run(stream) do - Enumerable.reduce(stream, {:cont, nil}, fn(_, _) -> {:cont, nil} end) + _ = Enumerable.reduce(stream, {:cont, nil}, fn(_, _) -> {:cont, nil} end) :ok end @doc """ Creates a stream that applies the given function to each element, emits the result and uses the same result as the accumulator - for the next computation. + for the next computation. Uses the first element in the enumerable + as the starting value. ## Examples iex> stream = Stream.scan(1..5, &(&1 + &2)) iex> Enum.to_list(stream) - [1,3,6,10,15] + [1, 3, 6, 10, 15] """ @spec scan(Enumerable.t, (element, acc -> any)) :: Enumerable.t def scan(enum, fun) do - lazy enum, :first, fn(f1) -> R.scan_2(fun, f1) end + lazy enum, :first, fn(f1) -> R.scan2(fun, f1) end end @doc """ @@ -457,93 +636,81 @@ defmodule Stream do iex> stream = Stream.scan(1..5, 0, &(&1 + &2)) iex> Enum.to_list(stream) - [1,3,6,10,15] + [1, 3, 6, 10, 15] """ @spec scan(Enumerable.t, acc, (element, acc -> any)) :: Enumerable.t def scan(enum, acc, fun) do - lazy enum, acc, fn(f1) -> R.scan_3(fun, f1) end + lazy enum, acc, fn(f1) -> R.scan3(fun, f1) end end @doc """ - Lazily takes the next `n` items from the enumerable and stops + Lazily takes the next `count` items from the enumerable and stops enumeration. - If a negative `n` is given, the last `n` values will be taken. - For such, the collection is fully enumerated keeping up to `2 * n` + If a negative `count` is given, the last `count` values will be taken. + For such, the collection is fully enumerated keeping up to `2 * count` elements in memory. Once the end of the collection is reached, the last `count` elements will be executed. Therefore, using - a negative `n` on an infinite collection will never return. + a negative `count` on an infinite collection will never return. ## Examples iex> stream = Stream.take(1..100, 5) iex> Enum.to_list(stream) - [1,2,3,4,5] + [1, 2, 3, 4, 5] iex> stream = Stream.take(1..100, -5) iex> Enum.to_list(stream) - [96,97,98,99,100] + [96, 97, 98, 99, 100] iex> stream = Stream.cycle([1, 2, 3]) |> Stream.take(5) iex> Enum.to_list(stream) - [1,2,3,1,2] + [1, 2, 3, 1, 2] """ - @spec take(Enumerable.t, non_neg_integer) :: Enumerable.t + @spec take(Enumerable.t, integer) :: Enumerable.t def take(_enum, 0), do: %Stream{enum: []} + def take([], _count), do: %Stream{enum: []} - def take(enum, n) when n > 0 do - lazy enum, n, fn(f1) -> R.take(f1) end - end - - def take(enum, n) when n < 0 do - &do_take(enum, abs(n), &1, &2) + def take(enum, count) when is_integer(count) and count > 0 do + lazy enum, count, fn(f1) -> R.take(f1) end end - defp do_take(enum, n, acc, f) do - {_, {_count, buf1, buf2}} = - Enumerable.reduce(enum, {:cont, {0, [], []}}, fn - entry, {count, buf1, buf2} -> - buf1 = [entry|buf1] - count = count + 1 - if count == n do - {:cont, {0, [], buf1}} - else - {:cont, {count, buf1, buf2}} - end - end) - - Enumerable.reduce(do_take_last(buf1, buf2, n, []), acc, f) + def take(enum, count) when is_integer(count) and count < 0 do + &Enumerable.reduce(Enum.take(enum, count), &1, &2) end - defp do_take_last(_buf1, _buf2, 0, acc), - do: acc - defp do_take_last([], [], _, acc), - do: acc - defp do_take_last([], [h|t], n, acc), - do: do_take_last([], t, n-1, [h|acc]) - defp do_take_last([h|t], buf2, n, acc), - do: do_take_last(t, buf2, n-1, [h|acc]) - @doc """ - Creates a stream that takes every `n` item from the enumerable. + Creates a stream that takes every `nth` item from the enumerable. + + The first item is always included, unless `nth` is 0. - The first item is always included, unless `n` is 0. + `nth` must be a non-negative integer. ## Examples iex> stream = Stream.take_every(1..10, 2) iex> Enum.to_list(stream) - [1,3,5,7,9] + [1, 3, 5, 7, 9] + + iex> stream = Stream.take_every([1, 2, 3, 4, 5], 1) + iex> Enum.to_list(stream) + [1, 2, 3, 4, 5] + + iex> stream = Stream.take_every(1..1000, 0) + iex> Enum.to_list(stream) + [] """ @spec take_every(Enumerable.t, non_neg_integer) :: Enumerable.t - def take_every(enum, n) when n > 0 do - lazy enum, n, fn(f1) -> R.take_every(n, f1) end - end - + def take_every(enum, nth) def take_every(_enum, 0), do: %Stream{enum: []} + def take_every([], _nth), do: %Stream{enum: []} + + def take_every(enum, nth) when is_integer(nth) and nth > 0 do + lazy enum, nth, fn(f1) -> R.take_every(nth, f1) end + end @doc """ Lazily takes elements of the enumerable while the given @@ -553,7 +720,7 @@ defmodule Stream do iex> stream = Stream.take_while(1..100, &(&1 <= 5)) iex> Enum.to_list(stream) - [1,2,3,4,5] + [1, 2, 3, 4, 5] """ @spec take_while(Enumerable.t, (element -> as_boolean(term))) :: Enumerable.t @@ -561,6 +728,23 @@ defmodule Stream do lazy enum, fn(f1) -> R.take_while(fun, f1) end end + @doc """ + Creates a stream that emits a single value after `n` milliseconds. + + The value emitted is `0`. This operation will block the caller by + the given time until the item is streamed. + + ## Examples + + iex> Stream.timer(10) |> Enum.to_list + [0] + + """ + @spec timer(non_neg_integer) :: Enumerable.t + def timer(n) do + take(interval(n), 1) + end + @doc """ Transforms an existing stream. @@ -575,7 +759,7 @@ defmodule Stream do ## Examples - `Stream.transform/3` is a useful as it can be used as basis to implement + `Stream.transform/3` is useful as it can be used as the basis to implement many of the functions defined in this module. For example, we can implement `Stream.take(enum, n)` as follows: @@ -585,95 +769,156 @@ defmodule Stream do ...> if acc < n, do: {[i], acc + 1}, else: {:halt, acc} ...> end) iex> Enum.to_list(stream) - [1,2,3] + [1, 2, 3] """ - @spec transform(Enumerable.t, acc, fun) :: Enumerable.t when - fun: (element, acc -> {Enumerable.t, acc} | {:halt, acc}), - acc: any + @spec transform(Enumerable.t, acc, fun) :: Enumerable.t + when fun: (element, acc -> {Enumerable.t, acc} | {:halt, acc}), + acc: any def transform(enum, acc, reducer) do - &do_transform(enum, acc, reducer, &1, &2) + &do_transform(enum, fn -> acc end, reducer, &1, &2, nil) + end + + @doc """ + Transforms an existing stream with function-based start and finish. + + The accumulator is only calculated when transformation starts. It also + allows an after function to be given which is invoked when the stream + halts or completes. + + This function can be seen as a combination of `Stream.resource/3` with + `Stream.transform/3`. + """ + @spec transform(Enumerable.t, (() -> acc), fun, (acc -> term)) :: Enumerable.t + when fun: (element, acc -> {Enumerable.t, acc} | {:halt, acc}), + acc: any + def transform(enum, start_fun, reducer, after_fun) do + &do_transform(enum, start_fun, reducer, &1, &2, after_fun) end - defp do_transform(enumerables, user_acc, user, inner_acc, fun) do + defp do_transform(enumerables, user_acc, user, inner_acc, fun, after_fun) do inner = &do_transform_each(&1, &2, fun) step = &do_transform_step(&1, &2) next = &Enumerable.reduce(enumerables, &1, step) - do_transform(user_acc, user, fun, [], next, inner_acc, inner) - end - - defp do_transform(user_acc, user, fun, next_acc, next, inner_acc, inner) do - case next.({:cont, next_acc}) do - {:suspended, [val|next_acc], next} -> - try do - user.(val, user_acc) - catch - kind, reason -> - stacktrace = System.stacktrace - next.({:halt, next_acc}) - :erlang.raise(kind, reason, stacktrace) - else - {[], user_acc} -> - do_transform(user_acc, user, fun, next_acc, next, inner_acc, inner) - {list, user_acc} when is_list(list) -> - do_list_transform(user_acc, user, fun, next_acc, next, inner_acc, inner, &Enumerable.List.reduce(list, &1, fun)) - {:halt, _user_acc} -> - next.({:halt, next_acc}) - {:halted, elem(inner_acc, 1)} - {other, user_acc} -> - do_other_transform(user_acc, user, fun, next_acc, next, inner_acc, inner, &Enumerable.reduce(other, &1, inner)) - end - {reason, _} -> - {reason, elem(inner_acc, 1)} + do_transform(user_acc.(), user, fun, :cont, next, inner_acc, inner, after_fun) + end + + defp do_transform(user_acc, _user, _fun, _next_op, next, {:halt, inner_acc}, _inner, after_fun) do + next.({:halt, []}) + do_after(after_fun, user_acc) + {:halted, inner_acc} + end + + defp do_transform(user_acc, user, fun, next_op, next, {:suspend, inner_acc}, inner, after_fun) do + {:suspended, inner_acc, &do_transform(user_acc, user, fun, next_op, next, &1, inner, after_fun)} + end + + defp do_transform(user_acc, _user, _fun, :halt, _next, {_, inner_acc}, _inner, after_fun) do + do_after(after_fun, user_acc) + {:halted, inner_acc} + end + + defp do_transform(user_acc, user, fun, :cont, next, inner_acc, inner, after_fun) do + try do + next.({:cont, []}) + catch + kind, reason -> + stacktrace = System.stacktrace + do_after(after_fun, user_acc) + :erlang.raise(kind, reason, stacktrace) + else + {:suspended, vals, next} -> + do_transform_user(:lists.reverse(vals), user_acc, user, fun, :cont, next, inner_acc, inner, after_fun) + {_, vals} -> + do_transform_user(:lists.reverse(vals), user_acc, user, fun, :halt, next, inner_acc, inner, after_fun) end end - defp do_list_transform(user_acc, user, fun, next_acc, next, inner_acc, inner, reduce) do + defp do_transform_user([], user_acc, user, fun, next_op, next, inner_acc, inner, after_fun) do + do_transform(user_acc, user, fun, next_op, next, inner_acc, inner, after_fun) + end + + defp do_transform_user([val | vals], user_acc, user, fun, next_op, next, inner_acc, inner, after_fun) do + user.(val, user_acc) + catch + kind, reason -> + stacktrace = System.stacktrace + next.({:halt, []}) + do_after(after_fun, user_acc) + :erlang.raise(kind, reason, stacktrace) + else + {[], user_acc} -> + do_transform_user(vals, user_acc, user, fun, next_op, next, inner_acc, inner, after_fun) + {list, user_acc} when is_list(list) -> + do_list_transform(vals, user_acc, user, fun, next_op, next, inner_acc, inner, + &Enumerable.List.reduce(list, &1, fun), after_fun) + {:halt, user_acc} -> + next.({:halt, []}) + do_after(after_fun, user_acc) + {:halted, elem(inner_acc, 1)} + {other, user_acc} -> + do_enum_transform(vals, user_acc, user, fun, next_op, next, inner_acc, inner, + &Enumerable.reduce(other, &1, inner), after_fun) + end + + defp do_list_transform(vals, user_acc, user, fun, next_op, next, inner_acc, inner, reduce, after_fun) do try do reduce.(inner_acc) catch kind, reason -> - next.({:halt, next_acc}) - :erlang.raise(kind, reason, :erlang.get_stacktrace) + stacktrace = System.stacktrace + next.({:halt, []}) + do_after(after_fun, user_acc) + :erlang.raise(kind, reason, stacktrace) else {:done, acc} -> - do_transform(user_acc, user, fun, next_acc, next, {:cont, acc}, inner) + do_transform_user(vals, user_acc, user, fun, next_op, next, {:cont, acc}, inner, after_fun) {:halted, acc} -> - next.({:halt, next_acc}) + next.({:halt, []}) + do_after(after_fun, user_acc) {:halted, acc} {:suspended, acc, c} -> - {:suspended, acc, &do_list_transform(user_acc, user, fun, next_acc, next, &1, inner, c)} + {:suspended, acc, &do_list_transform(vals, user_acc, user, fun, next_op, next, &1, inner, c, after_fun)} end end - defp do_other_transform(user_acc, user, fun, next_acc, next, inner_acc, inner, reduce) do + defp do_enum_transform(vals, user_acc, user, fun, next_op, next, {op, inner_acc}, inner, reduce, after_fun) do try do - reduce.(inner_acc) + reduce.({op, [:outer | inner_acc]}) catch - {:stream_transform, h} -> - next.({:halt, next_acc}) - {:halted, h} kind, reason -> stacktrace = System.stacktrace - next.({:halt, next_acc}) + next.({:halt, []}) + do_after(after_fun, user_acc) :erlang.raise(kind, reason, stacktrace) else - {_, acc} -> - do_transform(user_acc, user, fun, next_acc, next, {:cont, acc}, inner) - {:suspended, acc, c} -> - {:suspended, acc, &do_other_transform(user_acc, user, fun, next_acc, next, &1, inner, c)} + # Only take into account outer halts when the op is not halt itself. + # Otherwise, we were the ones wishing to halt, so we should just stop. + {:halted, [:outer | acc]} when op != :halt -> + do_transform_user(vals, user_acc, user, fun, next_op, next, {:cont, acc}, inner, after_fun) + {:halted, [_ | acc]} -> + next.({:halt, []}) + do_after(after_fun, user_acc) + {:halted, acc} + {:done, [_ | acc]} -> + do_transform_user(vals, user_acc, user, fun, next_op, next, {:cont, acc}, inner, after_fun) + {:suspended, [_ | acc], c} -> + {:suspended, acc, &do_enum_transform(vals, user_acc, user, fun, next_op, next, &1, inner, c, after_fun)} end end - defp do_transform_each(x, acc, f) do + defp do_after(nil, _user_acc), do: :ok + defp do_after(fun, user_acc), do: fun.(user_acc) + + defp do_transform_each(x, [:outer | acc], f) do case f.(x, acc) do - {:halt, h} -> throw({:stream_transform, h}) - {_, _} = o -> o + {:halt, res} -> {:halt, [:inner | res]} + {op, res} -> {op, [:outer | res]} end end defp do_transform_step(x, acc) do - {:suspend, [x|acc]} + {:suspend, [x | acc]} end @doc """ @@ -686,33 +931,68 @@ defmodule Stream do ## Examples - iex> Stream.uniq([1, 2, 3, 2, 1]) |> Enum.to_list + iex> Stream.uniq([1, 2, 3, 3, 2, 1]) |> Enum.to_list [1, 2, 3] - iex> Stream.uniq([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list - [{1,:x}, {2,:y}] - """ @spec uniq(Enumerable.t) :: Enumerable.t - @spec uniq(Enumerable.t, (element -> term)) :: Enumerable.t - def uniq(enum, fun \\ fn x -> x end) do - lazy enum, [], fn f1 -> R.uniq(fun, f1) end + def uniq(enum) do + uniq_by(enum, fn x -> x end) + end + + @doc false + # TODO: Remove on 2.0 + # (hard-deprecated in elixir_dispatch) + def uniq(enum, fun) do + uniq_by(enum, fun) + end + + @doc """ + Creates a stream that only emits elements if they are unique, by removing the + elements for which function `fun` returned duplicate items. + + The function `fun` maps every element to a term which is used to + determine if two elements are duplicates. + + Keep in mind that, in order to know if an element is unique + or not, this function needs to store all unique values emitted + by the stream. Therefore, if the stream is infinite, the number + of items stored will grow infinitely, never being garbage collected. + + ## Example + + iex> Stream.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list + [{1, :x}, {2, :y}] + + iex> Stream.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end) |> Enum.to_list + [a: {:tea, 2}, c: {:coffee, 1}] + + """ + @spec uniq_by(Enumerable.t, (element -> term)) :: Enumerable.t + def uniq_by(enum, fun) do + lazy enum, %{}, fn f1 -> R.uniq_by(fun, f1) end end @doc """ Creates a stream where each item in the enumerable will be wrapped in a tuple alongside its index. + If an `offset` is given, we will index from the given offset instead of from zero. + ## Examples iex> stream = Stream.with_index([1, 2, 3]) iex> Enum.to_list(stream) - [{1,0},{2,1},{3,2}] + [{1, 0}, {2, 1}, {3, 2}] + + iex> stream = Stream.with_index([1, 2, 3], 3) + iex> Enum.to_list(stream) + [{1, 3}, {2, 4}, {3, 5}] """ - @spec with_index(Enumerable.t) :: Enumerable.t - def with_index(enum) do - lazy enum, 0, fn(f1) -> R.with_index(f1) end + @spec with_index(Enumerable.t, integer) :: Enumerable.t + def with_index(enum, offset \\ 0) do + lazy enum, offset, fn(f1) -> R.with_index(f1) end end ## Combiners @@ -724,7 +1004,7 @@ defmodule Stream do iex> stream = Stream.concat([1..3, 4..6, 7..9]) iex> Enum.to_list(stream) - [1,2,3,4,5,6,7,8,9] + [1, 2, 3, 4, 5, 6, 7, 8, 9] """ @spec concat(Enumerable.t) :: Enumerable.t @@ -739,13 +1019,13 @@ defmodule Stream do iex> stream = Stream.concat(1..3, 4..6) iex> Enum.to_list(stream) - [1,2,3,4,5,6] + [1, 2, 3, 4, 5, 6] iex> stream1 = Stream.cycle([1, 2, 3]) iex> stream2 = Stream.cycle([4, 5, 6]) iex> stream = Stream.concat(stream1, stream2) iex> Enum.take(stream, 6) - [1,2,3,1,2,3] + [1, 2, 3, 1, 2, 3] """ @spec concat(Enumerable.t, Enumerable.t) :: Enumerable.t @@ -763,19 +1043,39 @@ defmodule Stream do iex> concat = Stream.concat(1..3, 4..6) iex> cycle = Stream.cycle([:a, :b, :c]) iex> Stream.zip(concat, cycle) |> Enum.to_list - [{1,:a},{2,:b},{3,:c},{4,:a},{5,:b},{6,:c}] + [{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}] """ @spec zip(Enumerable.t, Enumerable.t) :: Enumerable.t - def zip(left, right) do + def zip(left, right), do: zip([left, right]) + + @doc """ + Zips corresponding elements from a collection of enumerables + into one stream of tuples. + + The zipping finishes as soon as any enumerable completes. + + ## Examples + + iex> concat = Stream.concat(1..3, 4..6) + iex> cycle = Stream.cycle(["foo", "bar", "baz"]) + iex> Stream.zip([concat, [:a, :b, :c], cycle]) |> Enum.to_list + [{1, :a, "foo"}, {2, :b, "bar"}, {3, :c, "baz"}] + + """ + @spec zip([Enumerable.t]) :: Enumerable.t + def zip(enumerables) do step = &do_zip_step(&1, &2) - left_fun = &Enumerable.reduce(left, &1, step) - right_fun = &Enumerable.reduce(right, &1, step) + enum_funs = Enum.map(enumerables, fn enum -> + {&Enumerable.reduce(enum, &1, step), :cont} + end) - # Return a function as a lazy enumerator. - &do_zip([{left_fun, []}, {right_fun, []}], &1, &2) + &do_zip(enum_funs, &1, &2) end + # This implementation of do_zip/3 works for any number of + # streams to zip, even if right now zip/2 only zips two streams. + defp do_zip(zips, {:halt, acc}, _fun) do do_zip_close(zips) {:halted, acc} @@ -787,7 +1087,7 @@ defmodule Stream do defp do_zip(zips, {:cont, acc}, callback) do try do - do_zip(zips, acc, callback, [], []) + do_zip_next_tuple(zips, acc, callback, [], []) catch kind, reason -> stacktrace = System.stacktrace @@ -796,34 +1096,47 @@ defmodule Stream do else {:next, buffer, acc} -> do_zip(buffer, acc, callback) - {:done, _} = o -> - o + {:done, _acc} = other -> + other end end - defp do_zip([{fun, fun_acc}|t], acc, callback, list, buffer) do - case fun.({:cont, fun_acc}) do - {:suspended, [i|fun_acc], fun} -> - do_zip(t, acc, callback, [i|list], [{fun, fun_acc}|buffer]) - {_, _} -> - do_zip_close(:lists.reverse(buffer) ++ t) + # do_zip_next_tuple/5 computes the next tuple formed by + # the next element of each zipped stream. + + defp do_zip_next_tuple([{_, :halt} | zips], acc, _callback, _yielded_elems, buffer) do + do_zip_close(:lists.reverse(buffer, zips)) + {:done, acc} + end + + defp do_zip_next_tuple([{fun, :cont} | zips], acc, callback, yielded_elems, buffer) do + case fun.({:cont, []}) do + {:suspended, [elem], fun} -> + do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], [{fun, :cont} | buffer]) + {_, [elem]} -> + do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], [{fun, :halt} | buffer]) + {_, []} -> + # The current zipped stream terminated, so we close all the streams + # and return {:halted, acc} (which is returned as is by do_zip/3). + do_zip_close(:lists.reverse(buffer, zips)) {:done, acc} end end - defp do_zip([], acc, callback, list, buffer) do - zipped = List.to_tuple(:lists.reverse(list)) + defp do_zip_next_tuple([] = _zips, acc, callback, yielded_elems, buffer) do + # "yielded_elems" is a reversed list of results for the current iteration of + # zipping: it needs to be reversed and converted to a tuple to have the next + # tuple in the list resulting from zipping. + zipped = List.to_tuple(:lists.reverse(yielded_elems)) {:next, :lists.reverse(buffer), callback.(zipped, acc)} end - defp do_zip_close([]), do: :ok - defp do_zip_close([{fun, acc}|t]) do - fun.({:halt, acc}) - do_zip_close(t) + defp do_zip_close(zips) do + :lists.foreach(fn {fun, _} -> fun.({:halt, []}) end, zips) end - defp do_zip_step(x, acc) do - {:suspend, [x|acc]} + defp do_zip_step(x, []) do + {:suspend, [x]} end ## Sources @@ -834,12 +1147,21 @@ defmodule Stream do ## Examples - iex> stream = Stream.cycle([1,2,3]) + iex> stream = Stream.cycle([1, 2, 3]) iex> Enum.take(stream, 5) - [1,2,3,1,2] + [1, 2, 3, 1, 2] """ @spec cycle(Enumerable.t) :: Enumerable.t + def cycle(enumerable) + + def cycle(enumerable) when is_list(enumerable) do + unfold {enumerable, enumerable}, fn + {source, [h | t]} -> {h, {source, t}} + {source = [h | t], []} -> {h, {source, t}} + end + end + def cycle(enumerable) do fn acc, fun -> inner = &do_cycle_each(&1, &2, fun) @@ -878,13 +1200,13 @@ defmodule Stream do end @doc """ - Emit a sequence of values, starting with `start_value`. Successive + Emits a sequence of values, starting with `start_value`. Successive values are generated by calling `next_fun` on the previous value. ## Examples iex> Stream.iterate(0, &(&1+1)) |> Enum.take(5) - [0,1,2,3,4] + [0, 1, 2, 3, 4] """ @spec iterate(element, (element -> element)) :: Enumerable.t @@ -903,12 +1225,14 @@ defmodule Stream do ## Examples - iex> Stream.repeatedly(&:random.uniform/0) |> Enum.take(3) - [0.4435846174457203, 0.7230402056221108, 0.94581636451987] + # Although not necessary, let's seed the random algorithm + iex> :rand.seed(:exsplus, {1, 2, 3}) + iex> Stream.repeatedly(&:rand.uniform/0) |> Enum.take(3) + [0.40502929729990744, 0.45336720247823126, 0.04094511692041057] """ @spec repeatedly((() -> element)) :: Enumerable.t - def repeatedly(generator_fun) when is_function(generator_fun, 0) do + def repeatedly(generator_fun) do &do_repeatedly(generator_fun, &1, &2) end @@ -927,14 +1251,15 @@ defmodule Stream do @doc """ Emits a sequence of values for the given resource. - Similar to `unfold/2` but the initial value is computed lazily via - `start_fun` and executes an `after_fun` at the end of enumeration - (both in cases of success and failure). + Similar to `transform/3` but the initial accumulated value is + computed lazily via `start_fun` and executes an `after_fun` at + the end of enumeration (both in cases of success and failure). Successive values are generated by calling `next_fun` with the previous accumulator (the initial value being the result returned - by `start_fun`) and it must return a tuple with the current and - next accumulator. The enumeration finishes if it returns `nil`. + by `start_fun`) and it must return a tuple containing a list + of items to be emitted and the next accumulator. The enumeration + finishes if it returns `{:halt, acc}`. As the name says, this function is useful to stream values from resources. @@ -944,14 +1269,14 @@ defmodule Stream do Stream.resource(fn -> File.open!("sample") end, fn file -> case IO.read(file, :line) do - data when is_binary(data) -> {data, file} - _ -> nil + data when is_binary(data) -> {[data], file} + _ -> {:halt, file} end end, fn file -> File.close(file) end) """ - @spec resource((() -> acc), (acc -> {element, acc} | nil), (acc -> term)) :: Enumerable.t + @spec resource((() -> acc), (acc -> {[element], acc} | {:halt, acc}), (acc -> term)) :: Enumerable.t def resource(start_fun, next_fun, after_fun) do &do_resource(start_fun.(), next_fun, &1, &2, after_fun) end @@ -967,9 +1292,11 @@ defmodule Stream do defp do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun) do try do + # Optimize the most common cases case next_fun.(next_acc) do - nil -> nil - {v, next_acc} -> {fun.(v, acc), next_acc} + {[], next_acc} -> {:opt, {:cont, acc}, next_acc} + {[v], next_acc} -> {:opt, fun.(v, acc), next_acc} + {_, _} = other -> other end catch kind, reason -> @@ -977,11 +1304,62 @@ defmodule Stream do after_fun.(next_acc) :erlang.raise(kind, reason, stacktrace) else - nil -> - after_fun.(next_acc) - {:done, acc} - {acc, next_acc} -> + {:opt, acc, next_acc} -> do_resource(next_acc, next_fun, acc, fun, after_fun) + {:halt, next_acc} -> + do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun) + {list, next_acc} when is_list(list) -> + do_list_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun, + &Enumerable.List.reduce(list, &1, fun)) + {enum, next_acc} -> + inner = &do_resource_each(&1, &2, fun) + do_enum_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun, + &Enumerable.reduce(enum, &1, inner)) + end + end + + defp do_list_resource(next_acc, next_fun, acc, fun, after_fun, reduce) do + try do + reduce.(acc) + catch + kind, reason -> + stacktrace = System.stacktrace + after_fun.(next_acc) + :erlang.raise(kind, reason, stacktrace) + else + {:done, acc} -> + do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun) + {:halted, acc} -> + do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun) + {:suspended, acc, c} -> + {:suspended, acc, &do_list_resource(next_acc, next_fun, &1, fun, after_fun, c)} + end + end + + defp do_enum_resource(next_acc, next_fun, {op, acc}, fun, after_fun, reduce) do + try do + reduce.({op, [:outer | acc]}) + catch + kind, reason -> + stacktrace = System.stacktrace + after_fun.(next_acc) + :erlang.raise(kind, reason, stacktrace) + else + {:halted, [:outer | acc]} -> + do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun) + {:halted, [:inner | acc]} -> + do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun) + {:done, [_ | acc]} -> + do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun) + {:suspended, [_ | acc], c} -> + {:suspended, acc, &do_enum_resource(next_acc, next_fun, &1, fun, after_fun, c)} + end + end + + defp do_resource_each(x, [:outer | acc], f) do + case f.(x, acc) do + {:halt, res} -> {:halt, [:inner | res]} + {op, res} -> {op, [:outer | res]} end end @@ -1022,18 +1400,18 @@ defmodule Stream do @compile {:inline, lazy: 2, lazy: 3, lazy: 4} - defp lazy(%Stream{funs: funs} = lazy, fun), - do: %{lazy | funs: [fun|funs] } + defp lazy(%Stream{done: nil, funs: funs} = lazy, fun), + do: %{lazy | funs: [fun | funs]} defp lazy(enum, fun), do: %Stream{enum: enum, funs: [fun]} - defp lazy(%Stream{funs: funs, accs: accs} = lazy, acc, fun), - do: %{lazy | funs: [fun|funs], accs: [acc|accs] } + defp lazy(%Stream{done: nil, funs: funs, accs: accs} = lazy, acc, fun), + do: %{lazy | funs: [fun | funs], accs: [acc | accs]} defp lazy(enum, acc, fun), do: %Stream{enum: enum, funs: [fun], accs: [acc]} defp lazy(%Stream{done: nil, funs: funs, accs: accs} = lazy, acc, fun, done), - do: %{lazy | funs: [fun|funs], accs: [acc|accs], done: done} + do: %{lazy | funs: [fun | funs], accs: [acc | accs], done: done} defp lazy(enum, acc, fun, done), do: %Stream{enum: enum, funs: [fun], accs: [acc], done: done} end @@ -1063,22 +1441,32 @@ defimpl Enumerable, for: Stream do end defp do_each(reduce, done, accs, {command, acc}) do - case reduce.({command, [acc|accs]}) do - {:suspended, [acc|accs], continuation} -> + case reduce.({command, [acc | accs]}) do + {:suspended, [acc | accs], continuation} -> {:suspended, acc, &do_each(continuation, done, accs, &1)} - {:halted, [acc|_]} -> - {:halted, acc} - {:done, [acc|_] = accs} -> - case done do - nil -> - {:done, acc} - {done, fun} -> - case done.(fun).(accs) do - {:cont, [acc|_]} -> {:done, acc} - {:halt, [acc|_]} -> {:halted, acc} - {:suspend, [acc|_]} -> {:suspended, acc, &({:done, elem(&1, 1)})} - end - end + {:halted, accs} -> + do_done {:halted, accs}, done + {:done, accs} -> + do_done {:done, accs}, done end end + + defp do_done({reason, [acc | _]}, nil), do: {reason, acc} + defp do_done({reason, [acc | t]}, {done, fun}) do + [h | _] = Enum.reverse(t) + case done.([acc, h], fun) do + {:cont, [acc | _]} -> {reason, acc} + {:halt, [acc | _]} -> {:halted, acc} + {:suspend, [acc | _]} -> {:suspended, acc, &({:done, elem(&1, 1)})} + end + end +end + +defimpl Inspect, for: Stream do + import Inspect.Algebra + + def inspect(%{enum: enum, funs: funs}, opts) do + inner = [enum: enum, funs: Enum.reverse(funs)] + concat ["#Stream<", to_doc(inner, opts), ">"] + end end diff --git a/lib/elixir/lib/stream/reducers.ex b/lib/elixir/lib/stream/reducers.ex index 18659600fd5..68c445842a0 100644 --- a/lib/elixir/lib/stream/reducers.ex +++ b/lib/elixir/lib/stream/reducers.ex @@ -2,13 +2,13 @@ defmodule Stream.Reducers do # Collection of reducers shared by Enum and Stream. @moduledoc false - defmacro chunk(n, step, limit, f \\ nil) do + defmacro chunk(amount, step, limit, fun \\ nil) do quote do - fn entry, acc(h, {buffer, count}, t) -> - buffer = [entry|buffer] - count = count + 1 + fn entry, acc(head, {buffer, count}, tail) -> + buffer = [entry | buffer] + count = count + 1 - new = + new_state = if count >= unquote(limit) do left = count - unquote(step) {Enum.take(buffer, left), left} @@ -16,148 +16,180 @@ defmodule Stream.Reducers do {buffer, count} end - if count == unquote(n) do - cont_with_acc(unquote(f), :lists.reverse(buffer), h, new, t) + if count == unquote(amount) do + next_with_acc(unquote(fun), :lists.reverse(buffer), head, new_state, tail) else - {:cont, acc(h, new, t)} + skip(acc(head, new_state, tail)) end end end end - defmacro chunk_by(callback, f \\ nil) do + defmacro chunk_by(callback, fun \\ nil) do + quote do + fn entry, acc(head, acc, tail) -> + case unquote(callback).(entry, acc) do + {:cont, emit, acc} -> next_with_acc(unquote(fun), emit, head, acc, tail) + {:cont, acc} -> skip(acc(head, acc, tail)) + end + end + end + end + + defmacro dedup(callback, fun \\ nil) do + quote do + fn(entry, acc(head, prev, tail) = acc) -> + value = unquote(callback).(entry) + case prev do + {:value, ^value} -> skip(acc) + _ -> next_with_acc(unquote(fun), entry, head, {:value, value}, tail) + end + end + end + end + + defmacro drop(fun \\ nil) do quote do fn - entry, acc(h, {buffer, value}, t) -> - new_value = unquote(callback).(entry) - if new_value == value do - {:cont, acc(h, {[entry|buffer], value}, t)} - else - cont_with_acc(unquote(f), :lists.reverse(buffer), h, {[entry], new_value}, t) - end - entry, acc(h, nil, t) -> - {:cont, acc(h, {[entry], unquote(callback).(entry)}, t)} + _entry, acc(head, amount, tail) when amount > 0 -> + skip(acc(head, amount - 1, tail)) + entry, acc(head, amount, tail) -> + next_with_acc(unquote(fun), entry, head, amount, tail) end end end - defmacro drop(f \\ nil) do + defmacro drop_every(nth, fun \\ nil) do quote do fn - _entry, acc(h, n, t) when n > 0 -> - {:cont, acc(h, n-1, t)} - entry, acc(h, n, t) -> - cont_with_acc(unquote(f), entry, h, n, t) + entry, acc(head, curr, tail) when curr in [unquote(nth), :first] -> + skip(acc(head, 1, tail)) + entry, acc(head, curr, tail) -> + next_with_acc(unquote(fun), entry, head, curr + 1, tail) end end end - defmacro drop_while(callback, f \\ nil) do + defmacro drop_while(callback, fun \\ nil) do quote do - fn entry, acc(h, bool, t) = orig -> + fn entry, acc(head, bool, tail) = original -> if bool and unquote(callback).(entry) do - {:cont, orig} + skip(original) else - cont_with_acc(unquote(f), entry, h, false, t) + next_with_acc(unquote(fun), entry, head, false, tail) end end end end - defmacro filter(callback, f \\ nil) do + defmacro filter(callback, fun \\ nil) do quote do fn(entry, acc) -> if unquote(callback).(entry) do - cont(unquote(f), entry, acc) + next(unquote(fun), entry, acc) else - {:cont, acc} + skip(acc) end end end end - defmacro filter_map(filter, mapper, f \\ nil) do + defmacro filter_map(filter, mapper, fun \\ nil) do quote do fn(entry, acc) -> if unquote(filter).(entry) do - cont(unquote(f), unquote(mapper).(entry), acc) + next(unquote(fun), unquote(mapper).(entry), acc) else - {:cont, acc} + skip(acc) end end end end - defmacro map(callback, f \\ nil) do + defmacro map(callback, fun \\ nil) do quote do fn(entry, acc) -> - cont(unquote(f), unquote(callback).(entry), acc) + next(unquote(fun), unquote(callback).(entry), acc) end end end - defmacro reject(callback, f \\ nil) do + defmacro map_every(nth, mapper, fun \\ nil) do + quote do + fn + entry, acc(head, curr, tail) when curr in [unquote(nth), :first] -> + next_with_acc(unquote(fun), unquote(mapper).(entry), head, 1, tail) + entry, acc(head, curr, tail) -> + next_with_acc(unquote(fun), entry, head, curr + 1, tail) + end + end + end + + defmacro reject(callback, fun \\ nil) do quote do fn(entry, acc) -> unless unquote(callback).(entry) do - cont(unquote(f), entry, acc) + next(unquote(fun), entry, acc) else - {:cont, acc} + skip(acc) end end end end - defmacro scan_2(callback, f \\ nil) do + defmacro scan2(callback, fun \\ nil) do quote do fn - entry, acc(h, :first, t) -> - cont_with_acc(unquote(f), entry, h, {:ok, entry}, t) - entry, acc(h, {:ok, acc}, t) -> + entry, acc(head, :first, tail) -> + next_with_acc(unquote(fun), entry, head, {:ok, entry}, tail) + entry, acc(head, {:ok, acc}, tail) -> value = unquote(callback).(entry, acc) - cont_with_acc(unquote(f), value, h, {:ok, value}, t) + next_with_acc(unquote(fun), value, head, {:ok, value}, tail) end end end - defmacro scan_3(callback, f \\ nil) do + defmacro scan3(callback, fun \\ nil) do quote do - fn(entry, acc(h, acc, t)) -> + fn(entry, acc(head, acc, tail)) -> value = unquote(callback).(entry, acc) - cont_with_acc(unquote(f), value, h, value, t) + next_with_acc(unquote(fun), value, head, value, tail) end end end - defmacro take(f \\ nil) do + defmacro take(fun \\ nil) do quote do - fn(entry, acc(h, n, t) = orig) -> - if n >= 1 do - cont_with_acc(unquote(f), entry, h, n-1, t) - else - {:halt, orig} + fn(entry, acc(head, curr, tail) = original) -> + case curr do + 0 -> + {:halt, original} + 1 -> + {_, acc} = next_with_acc(unquote(fun), entry, head, 0, tail) + {:halt, acc} + _ -> + next_with_acc(unquote(fun), entry, head, curr - 1, tail) end end end end - defmacro take_every(nth, f \\ nil) do + defmacro take_every(nth, fun \\ nil) do quote do fn - entry, acc(h, n, t) when n === :first - when n === unquote(nth) -> - cont_with_acc(unquote(f), entry, h, 1, t) - entry, acc(h, n, t) -> - {:cont, acc(h, n+1, t)} + entry, acc(head, curr, tail) when curr in [unquote(nth), :first] -> + next_with_acc(unquote(fun), entry, head, 1, tail) + entry, acc(head, curr, tail) -> + skip(acc(head, curr + 1, tail)) end end end - defmacro take_while(callback, f \\ nil) do + defmacro take_while(callback, fun \\ nil) do quote do fn(entry, acc) -> if unquote(callback).(entry) do - cont(unquote(f), entry, acc) + next(unquote(fun), entry, acc) else {:halt, acc} end @@ -165,23 +197,23 @@ defmodule Stream.Reducers do end end - defmacro uniq(callback, f \\ nil) do + defmacro uniq_by(callback, fun \\ nil) do quote do - fn(entry, acc(h, prev, t) = acc) -> + fn(entry, acc(head, prev, tail) = original) -> value = unquote(callback).(entry) - if :lists.member(value, prev) do - {:cont, acc} + if Map.has_key?(prev, value) do + skip(original) else - cont_with_acc(unquote(f), entry, h, [value|prev], t) + next_with_acc(unquote(fun), entry, head, Map.put(prev, value, true), tail) end end end end - defmacro with_index(f \\ nil) do + defmacro with_index(fun \\ nil) do quote do - fn(entry, acc(h, counter, t)) -> - cont_with_acc(unquote(f), {entry, counter}, h, counter + 1, t) + fn(entry, acc(head, counter, tail)) -> + next_with_acc(unquote(fun), {entry, counter}, head, counter + 1, tail) end end end diff --git a/lib/elixir/lib/string.ex b/lib/elixir/lib/string.ex index b0895253346..d37daeac09b 100644 --- a/lib/elixir/lib/string.ex +++ b/lib/elixir/lib/string.ex @@ -4,29 +4,15 @@ defmodule String do @moduledoc ~S""" A String in Elixir is a UTF-8 encoded binary. - ## String and binary operations - - The functions in this module act according to the - Unicode Standard, version 6.3.0. For example, - `capitalize/1`, `downcase/1`, `strip/1` are provided by this - module. - - In addition to this module, Elixir provides more low-level - operations that work directly with binaries. Some - of those can be found in the `Kernel` module, as: + ## Codepoints and grapheme cluster - * `Kernel.binary_part/3` - retrieves part of the binary - * `Kernel.bit_size/1` and `Kernel.byte_size/1` - size related functions - * `Kernel.is_bitstring/1` and `Kernel.is_binary/1` - type checking function - * Plus a number of functions for working with binaries (bytes) - [in the `:binary` module](http://erlang.org/doc/man/binary.html) + The functions in this module act according to the Unicode + Standard, version 9.0.0. - ## Codepoints and graphemes + As per the standard, a codepoint is a single Unicode Character, + which may be represented by one or more bytes. - As per the Unicode Standard, a codepoint is an Unicode - Character, which may be represented by one or more bytes. - For example, the character "é" is represented with two - bytes: + For example, the codepoint "é" is two bytes: iex> byte_size("é") 2 @@ -36,64 +22,143 @@ defmodule String do iex> String.length("é") 1 - Furthermore, this module also presents the concept of - graphemes, which are multiple characters that may be - "perceived as a single character" by readers. For example, - the same "é" character written above could be represented - by the letter "e" followed by the accent ́: + Furthermore, this module also presents the concept of grapheme cluster + (from now on referenced as graphemes). Graphemes can consist of multiple + codepoints that may be perceived as a single character by readers. For + example, "é" can be represented either as a single "e with acute" codepoint + or as the letter "e" followed by a "combining acute accent" (two codepoints): - iex> string = "\x{0065}\x{0301}" + iex> string = "\u0065\u0301" iex> byte_size(string) 3 iex> String.length(string) 1 + iex> String.codepoints(string) + ["e", "́"] + iex> String.graphemes(string) + ["é"] Although the example above is made of two characters, it is perceived by users as one. Graphemes can also be two characters that are interpreted as one by some languages. For example, some languages may - consider "ch" as a grapheme. However, since this information - depends on the locale, it is not taken into account by this - module. + consider "ch" as a single character. However, since this + information depends on the locale, it is not taken into account + by this module. In general, the functions in this module rely on the Unicode - Standard, but does not contain any of the locale specific - behaviour. + Standard, but do not contain any of the locale specific behaviour. More information about graphemes can be found in the [Unicode Standard Annex #29](http://www.unicode.org/reports/tr29/). - This current Elixir version implements Extended Grapheme Cluster + The current Elixir version implements Extended Grapheme Cluster algorithm. + ## String and binary operations + + To act according to the Unicode Standard, many functions + in this module run in linear time, as they need to traverse + the whole string considering the proper Unicode codepoints. + + For example, `String.length/1` will take longer as + the input grows. On the other hand, `Kernel.byte_size/1` always runs + in constant time (i.e. regardless of the input size). + + This means often there are performance costs in using the + functions in this module, compared to the more low-level + operations that work directly with binaries: + + * `Kernel.binary_part/3` - retrieves part of the binary + * `Kernel.bit_size/1` and `Kernel.byte_size/1` - size related functions + * `Kernel.is_bitstring/1` and `Kernel.is_binary/1` - type checking function + * Plus a number of functions for working with binaries (bytes) + in the [`:binary` module](http://www.erlang.org/doc/man/binary.html) + + There are many situations where using the `String` module can + be avoided in favor of binary functions or pattern matching. + For example, imagine you have a string `prefix` and you want to + remove this prefix from another string named `full`. + + One may be tempted to write: + + iex> take_prefix = fn full, prefix -> + ...> base = String.length(prefix) + ...> String.slice(full, base, String.length(full) - base) + ...> end + iex> take_prefix.("Mr. John", "Mr. ") + "John" + + Although the function above works, it performs poorly. To + calculate the length of the string, we need to traverse it + fully, so we traverse both `prefix` and `full` strings, then + slice the `full` one, traversing it again. + + A first attempt at improving it could be with ranges: + + iex> take_prefix = fn full, prefix -> + ...> base = String.length(prefix) + ...> String.slice(full, base..-1) + ...> end + iex> take_prefix.("Mr. John", "Mr. ") + "John" + + While this is much better (we don't traverse `full` twice), + it could still be improved. In this case, since we want to + extract a substring from a string, we can use `Kernel.byte_size/1` + and `Kernel.binary_part/3` as there is no chance we will slice in + the middle of a codepoint made of more than one byte: + + iex> take_prefix = fn full, prefix -> + ...> base = byte_size(prefix) + ...> binary_part(full, base, byte_size(full) - base) + ...> end + iex> take_prefix.("Mr. John", "Mr. ") + "John" + + Or simply use pattern matching: + + iex> take_prefix = fn full, prefix -> + ...> base = byte_size(prefix) + ...> <<_::binary-size(base), rest::binary>> = full + ...> rest + ...> end + iex> take_prefix.("Mr. John", "Mr. ") + "John" + + On the other hand, if you want to dynamically slice a string + based on an integer value, then using `String.slice/3` is the + best option as it guarantees we won't incorrectly split a valid + codepoint into multiple bytes. + ## Integer codepoints Although codepoints could be represented as integers, this module represents all codepoints as strings. For example: - iex> String.codepoints("josé") - ["j", "o", "s", "é"] + iex> String.codepoints("olá") + ["o", "l", "á"] There are a couple of ways to retrieve a character integer - codepoint. One may use the `?` special macro: + codepoint. One may use the `?` construct: - iex> ?j - 106 + iex> ?o + 111 - iex> ?é - 233 + iex> ?á + 225 Or also via pattern matching: - iex> << eacute :: utf8 >> = "é" - iex> eacute - 233 + iex> <> = "á" + iex> aacute + 225 As we have seen above, codepoints can be inserted into a string by their hexadecimal code: - "jos\x{0065}\x{0301}" #=> - "josé" + "ol\u0061\u0301" #=> + "olá" ## Self-synchronization @@ -103,55 +168,95 @@ defmodule String do codepoint needs to be rejected. This module relies on this behaviour to ignore such invalid - characters. For example, `length/1` is going to return + characters. For example, `length/1` will return a correct result even if an invalid codepoint is fed into it. In other words, this module expects invalid data to be detected - when retrieving data from the external source. For example, a - driver that reads strings from a database will be the one - responsible to check the validity of the encoding. + elsewhere, usually when retrieving data from the external source. + For example, a driver that reads strings from a database will be + responsible to check the validity of the encoding. `String.chunk/2` + can be used for breaking a string into valid and invalid parts. + + ## Patterns + + Many functions in this module work with patterns. For example, + `String.split/2` can split a string into multiple patterns given + a pattern. This pattern can be a string, a list of strings or + a compiled pattern: + + iex> String.split("foo bar", " ") + ["foo", "bar"] + + iex> String.split("foo bar!", [" ", "!"]) + ["foo", "bar", ""] + + iex> pattern = :binary.compile_pattern([" ", "!"]) + iex> String.split("foo bar!", pattern) + ["foo", "bar", ""] + + The compiled pattern is useful when the same match will + be done over and over again. Note though the compiled + pattern cannot be stored in a module attribute as the pattern + is generated at runtime and does not survive compile term. """ @type t :: binary @type codepoint :: t @type grapheme :: t + @type pattern :: t | [t] | :binary.cp @doc """ - Checks if a string is printable considering it is encoded - as UTF-8. Returns `true` if so, `false` otherwise. + Checks if a string contains only printable characters. + + Takes an optional `limit` as a second argument. `printable?/2` only + checks the printability of the string up to the `limit`. ## Examples iex> String.printable?("abc") true + iex> String.printable?("abc" <> <<0>>) + false + + iex> String.printable?("abc" <> <<0>>, 2) + true + """ @spec printable?(t) :: boolean + @spec printable?(t, non_neg_integer | :infinity) :: boolean + def printable?(string, counter \\ :infinity) + + def printable?(<<>>, _), do: true + def printable?(_, 0), do: true - def printable?(<< h :: utf8, t :: binary >>) - when h in ?\040..?\176 - when h in 0xA0..0xD7FF - when h in 0xE000..0xFFFD - when h in 0x10000..0x10FFFF do - printable?(t) + for char <- 0x20..0x7E do + def printable?(<>, counter) do + printable?(rest, decrement(counter)) + end + end + for char <- '\n\r\t\v\b\f\e\d\a' do + def printable?(<>, counter) do + printable?(rest, decrement(counter)) + end + end + def printable?(<>, counter) + when char in 0xA0..0xD7FF + when char in 0xE000..0xFFFD + when char in 0x10000..0x10FFFF do + printable?(rest, decrement(counter)) end - def printable?(<>), do: printable?(t) - def printable?(<>), do: printable?(t) - def printable?(<>), do: printable?(t) - def printable?(<>), do: printable?(t) - def printable?(<>), do: printable?(t) - def printable?(<>), do: printable?(t) - def printable?(<>), do: printable?(t) - def printable?(<>), do: printable?(t) - def printable?(<>), do: printable?(t) + def printable?(binary, _) when is_binary(binary), do: false - def printable?(<<>>), do: true - def printable?(b) when is_binary(b), do: false + defp decrement(:infinity), do: :infinity + defp decrement(counter), do: counter - 1 - @doc """ + @doc ~S""" Divides a string into substrings at each Unicode whitespace - occurrence with leading and trailing whitespace ignored. + occurrence with leading and trailing whitespace ignored. Groups + of whitespace are treated as a single occurrence. Divisions do + not occur on non-breaking whitespace. ## Examples @@ -161,25 +266,43 @@ defmodule String do iex> String.split("foo" <> <<194, 133>> <> "bar") ["foo", "bar"] - iex> String.split(" foo bar ") + iex> String.split(" foo bar ") ["foo", "bar"] + iex> String.split("no\u00a0break") + ["no\u00a0break"] + """ @spec split(t) :: [t] - defdelegate split(binary), to: String.Unicode + defdelegate split(binary), to: String.Break @doc ~S""" Divides a string into substrings based on a pattern. Returns a list of these substrings. The pattern can - be a string, a list of strings or a regular expression. + be a string, a list of strings, or a regular expression. The string is split into as many parts as possible by - default, but can be controlled via the `parts: num` option. - If you pass `parts: :infinity`, it will return all possible parts. + default, but can be controlled via the `:parts` option. Empty strings are only removed from the result if the - `trim` option is set to `true`. + `:trim` option is set to `true`. + + When the pattern used is a regular expression, the string is + split using `Regex.split/3`. + + ## Options + + * `:parts` (positive integer or `:infinity`) - the string + is split into at most as many parts as this options specifies. + If `:infinity`, the string will be split into all possible + parts. Defaults to `:infinity`. + + * `:trim` (boolean) - if `true`, empty strings are removed from + the resulting list. + + This function also accepts all options accepted by `Regex.split/3` + if `pattern` is a regular expression. ## Examples @@ -210,7 +333,10 @@ defmodule String do iex> String.split(" a b c ", ~r{\s}, trim: true) ["a", "b", "c"] - Splitting on empty patterns returns codepoints: + iex> String.split("abc", ~r{b}, include_captures: true) + ["a", "b", "c"] + + Splitting on empty patterns returns graphemes: iex> String.split("abc", ~r{}) ["a", "b", "c", ""] @@ -224,53 +350,108 @@ defmodule String do iex> String.split("abc", "", parts: 2) ["a", "bc"] + A precompiled pattern can also be given: + + iex> pattern = :binary.compile_pattern([" ", ","]) + iex> String.split("1,2 3,4", pattern) + ["1", "2", "3", "4"] + """ - @spec split(t, t | [t] | Regex.t) :: [t] - @spec split(t, t | [t] | Regex.t, Keyword.t) :: [t] - def split(binary, pattern, options \\ []) + @spec split(t, pattern | Regex.t, Keyword.t) :: [t] + def split(string, pattern, options \\ []) - def split("", _pattern, _options), do: [""] + def split(string, %Regex{} = pattern, options) when is_binary(string) do + Regex.split(pattern, string, options) + end - def split(binary, "", options), do: split(binary, ~r""u, options) + def split(string, pattern, []) when is_binary(string) and pattern != "" do + :binary.split(string, pattern, [:global]) + end - def split(binary, pattern, options) do - if Regex.regex?(pattern) do - Regex.split(pattern, binary, options) - else - splits = - case Keyword.get(options, :parts, :infinity) do - num when is_number(num) and num > 0 -> - split_parts(binary, pattern, num - 1) - _ -> - :binary.split(binary, pattern, [:global]) - end - - if Keyword.get(options, :trim, false) do - for split <- splits, split != "", do: split - else - splits - end + def split(string, pattern, options) when is_binary(string) do + parts = Keyword.get(options, :parts, :infinity) + trim = Keyword.get(options, :trim, false) + pattern = maybe_compile_pattern(pattern) + split_each(string, pattern, trim, parts_to_index(parts)) + end + + defp parts_to_index(:infinity), do: 0 + defp parts_to_index(n) when is_integer(n) and n > 0, do: n + + defp split_each("", _pattern, true, 1), do: [] + defp split_each(string, _pattern, _trim, 1) when is_binary(string), do: [string] + defp split_each(string, pattern, trim, count) do + case do_splitter(string, pattern, trim) do + {h, t} -> [h | split_each(t, pattern, trim, count - 1)] + nil -> [] end end - defp split_parts("", _pattern, _num), do: [""] - defp split_parts(binary, pattern, num), do: split_parts(binary, pattern, num, []) - defp split_parts("", _pattern, _num, parts), do: Enum.reverse([""|parts]) - defp split_parts(binary, _pattern, 0, parts), do: Enum.reverse([binary|parts]) - defp split_parts(binary, pattern, num, parts) do - case :binary.split(binary, pattern) do - [head] -> Enum.reverse([head|parts]) - [head, rest] -> split_parts(rest, pattern, num - 1, [head|parts]) + @doc """ + Returns an enumerable that splits a string on demand. + + This is in contrast to `split/3` which splits all + the string upfront. + + Note splitter does not support regular expressions + (as it is often more efficient to have the regular + expressions traverse the string at once than in + multiple passes). + + ## Options + + * :trim - when `true`, does not emit empty patterns + + ## Examples + + iex> String.splitter("1,2 3,4 5,6 7,8,...,99999", [" ", ","]) |> Enum.take(4) + ["1", "2", "3", "4"] + + iex> String.splitter("abcd", "") |> Enum.take(10) + ["a", "b", "c", "d", ""] + + iex> String.splitter("abcd", "", trim: true) |> Enum.take(10) + ["a", "b", "c", "d"] + + """ + @spec splitter(t, pattern, Keyword.t) :: Enumerable.t + def splitter(string, pattern, options \\ []) do + pattern = maybe_compile_pattern(pattern) + trim = Keyword.get(options, :trim, false) + Stream.unfold(string, &do_splitter(&1, pattern, trim)) + end + + defp do_splitter(:nomatch, _pattern, _), do: nil + defp do_splitter("", _pattern, true), do: nil + defp do_splitter("", _pattern, false), do: {"", :nomatch} + + defp do_splitter(bin, "", _trim) do + next_grapheme(bin) + end + + defp do_splitter(bin, pattern, trim) do + case :binary.split(bin, pattern) do + ["", second] when trim -> do_splitter(second, pattern, trim) + [first, second] -> {first, second} + [first] -> {first, :nomatch} end end + defp maybe_compile_pattern(""), do: "" + defp maybe_compile_pattern(pattern) when is_tuple(pattern), do: pattern + defp maybe_compile_pattern(pattern), do: :binary.compile_pattern(pattern) + @doc """ Splits a string into two at the specified offset. When the offset given is negative, location is counted from the end of the string. - The offset is capped to the length of the string. + The offset is capped to the length of the string. Returns a tuple with + two elements. - Returns a tuple with two elements. + Note: keep in mind this function splits on graphemes and for such it + has to linearly traverse the string. If you want to split a string or + a binary based on the number of bytes, use `Kernel.binary_part/3` + instead. ## Examples @@ -291,28 +472,87 @@ defmodule String do """ @spec split_at(t, integer) :: {t, t} - def split_at(string, offset) + def split_at(string, position) + + def split_at(string, position) when is_integer(position) and position >= 0 do + do_split_at(string, position) + end - def split_at(binary, index) when index == 0, do: - {"", binary} + def split_at(string, position) when is_integer(position) and position < 0 do + position = length(string) + position + case position >= 0 do + true -> do_split_at(string, position) + false -> {"", string} + end + end + + defp do_split_at(string, position) do + {byte_size, rest} = String.Unicode.split_at(string, position) + {binary_part(string, 0, byte_size), rest || ""} + end - def split_at(binary, index) when index > 0, do: - do_split_at(next_grapheme(binary), 0, index, "") + @doc ~S""" + Returns `true` if `string1` is canonically equivalent to 'string2'. - def split_at(binary, index) when index < 0, do: - do_split_at(next_grapheme(binary), 0, max(0, byte_size(binary)+index), "") + It performs Normalization Form Canonical Decomposition (NFD) on the + strings before comparing them. This function is equivalent to: - defp do_split_at(nil, _, _, acc), do: - {acc, ""} + String.normalize(string1, :nfd) == String.normalize(string2, :nfd) - defp do_split_at({grapheme, rest}, current_pos, target_pos, acc) when current_pos < target_pos, do: - do_split_at(next_grapheme(rest), current_pos+1, target_pos, acc <> grapheme) + Therefore, if you plan to compare multiple strings, multiple times + in a row, you may normalize them upfront and compare them directly + to avoid multiple normalization passes. - defp do_split_at({grapheme, rest}, pos, pos, acc), do: - {acc, grapheme <> rest} + ## Examples + + iex> String.equivalent?("abc", "abc") + true + + iex> String.equivalent?("man\u0303ana", "mañana") + true + + iex> String.equivalent?("abc", "ABC") + false + + iex> String.equivalent?("nø", "nó") + false + + """ + @spec equivalent?(t, t) :: boolean + def equivalent?(string1, string2) do + normalize(string1, :nfd) == normalize(string2, :nfd) + end @doc """ - Convert all characters on the given string to uppercase. + Converts all characters in `string` to Unicode normalization + form identified by `form`. + + ## Forms + + The supported forms are: + + * `:nfd` - Normalization Form Canonical Decomposition. + Characters are decomposed by canonical equivalence, and + multiple combining characters are arranged in a specific + order. + + * `:nfc` - Normalization Form Canonical Composition. + Characters are decomposed and then recomposed by canonical equivalence. + + ## Examples + + iex> String.normalize("yêṩ", :nfd) + "yêṩ" + + iex> String.normalize("leña", :nfc) + "leña" + + """ + @spec normalize(t, atom) :: t + defdelegate normalize(string, form), to: String.Normalizer + + @doc """ + Converts all characters in the given string to uppercase. ## Examples @@ -322,15 +562,15 @@ defmodule String do iex> String.upcase("ab 123 xpto") "AB 123 XPTO" - iex> String.upcase("josé") - "JOSÉ" + iex> String.upcase("olá") + "OLÁ" """ @spec upcase(t) :: t - defdelegate upcase(binary), to: String.Unicode + defdelegate upcase(binary), to: String.Casing @doc """ - Convert all characters on the given string to lowercase. + Converts all characters in the given string to lowercase. ## Examples @@ -340,16 +580,16 @@ defmodule String do iex> String.downcase("AB 123 XPTO") "ab 123 xpto" - iex> String.downcase("JOSÉ") - "josé" + iex> String.downcase("OLÁ") + "olá" """ @spec downcase(t) :: t - defdelegate downcase(binary), to: String.Unicode + defdelegate downcase(binary), to: String.Casing @doc """ Converts the first character in the given string to - uppercase and the remaining to lowercase. + uppercase and the remainder to lowercase. This relies on the titlecase information provided by the Unicode Standard. Note this function makes @@ -364,210 +604,478 @@ defmodule String do iex> String.capitalize("fin") "Fin" - iex> String.capitalize("josé") - "José" + iex> String.capitalize("olá") + "Olá" """ @spec capitalize(t) :: t def capitalize(string) when is_binary(string) do - {char, rest} = String.Unicode.titlecase_once(string) + {char, rest} = String.Casing.titlecase_once(string) char <> downcase(rest) end - @doc """ - Returns a string where trailing Unicode whitespace - has been removed. + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + defdelegate rstrip(binary), to: String.Break, as: :trim_trailing - ## Examples + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + def rstrip(string, char) when is_integer(char) do + replace_trailing(string, <>, "") + end - iex> String.rstrip(" abc ") - " abc" + @doc """ + Replaces all leading occurrences of `match` by `replacement` of `match` in `string`. - """ - @spec rstrip(t) :: t - defdelegate rstrip(binary), to: String.Unicode + Returns the string untouched if there are no occurrences. - @doc """ - Returns a string where trailing `char` have been removed. + If `match` is `""`, this function raises an `ArgumentError` exception: this + happens because this function replaces **all** the occurrences of `match` at + the beginning of `string`, and it's impossible to replace "multiple" + occurrences of `""`. ## Examples - iex> String.rstrip(" abc _", ?_) - " abc " + iex> String.replace_leading("hello world", "hello ", "") + "world" + iex> String.replace_leading("hello hello world", "hello ", "") + "world" + + iex> String.replace_leading("hello world", "hello ", "ola ") + "ola world" + iex> String.replace_leading("hello hello world", "hello ", "ola ") + "ola ola world" """ - @spec rstrip(t, char) :: t + @spec replace_leading(t, t, t) :: t | no_return + def replace_leading(string, match, replacement) + when is_binary(string) and is_binary(match) and is_binary(replacement) do + if match == "" do + raise ArgumentError, "cannot use an empty string as the match to replace" + end - def rstrip("", _char), do: "" + prefix_size = byte_size(match) + suffix_size = byte_size(string) - prefix_size + replace_leading(string, match, replacement, prefix_size, suffix_size, 0) + end - # Do a quick check before we traverse the whole - # binary. :binary.last is a fast operation (it - # does not traverse the whole binary). - def rstrip(string, char) when char in 0..127 do - if :binary.last(string) == char do - do_rstrip(string, "", char) - else - string + defp replace_leading(string, match, replacement, prefix_size, suffix_size, acc) when suffix_size >= 0 do + case string do + <> when prefix == match -> + replace_leading(suffix, match, replacement, prefix_size, suffix_size - prefix_size, acc + 1) + _ -> + duplicate(replacement, acc) <> string end end - def rstrip(string, char) when is_integer(char) do - do_rstrip(string, "", char) + defp replace_leading(string, _match, replacement, _prefix_size, _suffix_size, acc) do + duplicate(replacement, acc) <> string end - defp do_rstrip(<>, buffer, char) do - <>, char) :: binary>> + @doc """ + Replaces all trailing occurrences of `match` by `replacement` in `string`. + + Returns the string untouched if there are no occurrences. + + If `match` is `""`, this function raises an `ArgumentError` exception: this + happens because this function replaces **all** the occurrences of `match` at + the end of `string`, and it's impossible to replace "multiple" occurrences of + `""`. + + ## Examples + + iex> String.replace_trailing("hello world", " world", "") + "hello" + iex> String.replace_trailing("hello world world", " world", "") + "hello" + + iex> String.replace_trailing("hello world", " world", " mundo") + "hello mundo" + iex> String.replace_trailing("hello world world", " world", " mundo") + "hello mundo mundo" + + """ + @spec replace_trailing(t, t, t) :: t | no_return + def replace_trailing(string, match, replacement) + when is_binary(string) and is_binary(match) and is_binary(replacement) do + if match == "" do + raise ArgumentError, "cannot use an empty string as the match to replace" + end + + suffix_size = byte_size(match) + prefix_size = byte_size(string) - suffix_size + replace_trailing(string, match, replacement, prefix_size, suffix_size, 0) end - defp do_rstrip(<>, buffer, another_char) do - <> + defp replace_trailing(string, match, replacement, prefix_size, suffix_size, acc) when prefix_size >= 0 do + case string do + <> when suffix == match -> + replace_trailing(prefix, match, replacement, prefix_size - suffix_size, suffix_size, acc + 1) + _ -> + string <> duplicate(replacement, acc) + end end - defp do_rstrip(<<>>, _, _) do - <<>> + defp replace_trailing(string, _match, replacement, _prefix_size, _suffix_size, acc) do + string <> duplicate(replacement, acc) end @doc """ - Returns a string where leading Unicode whitespace - has been removed. + Replaces prefix in `string` by `replacement` if it matches `match`. + + Returns the string untouched if there is no match. If `match` is an empty + string (`""`), `replacement` is just prepended to `string`. ## Examples - iex> String.lstrip(" abc ") - "abc " + iex> String.replace_prefix("world", "hello ", "") + "world" + iex> String.replace_prefix("hello world", "hello ", "") + "world" + iex> String.replace_prefix("hello hello world", "hello ", "") + "hello world" + + iex> String.replace_prefix("world", "hello ", "ola ") + "world" + iex> String.replace_prefix("hello world", "hello ", "ola ") + "ola world" + iex> String.replace_prefix("hello hello world", "hello ", "ola ") + "ola hello world" + + iex> String.replace_prefix("world", "", "hello ") + "hello world" """ - defdelegate lstrip(binary), to: String.Unicode + @spec replace_prefix(t, t, t) :: t + def replace_prefix(string, match, replacement) + when is_binary(string) and is_binary(match) and is_binary(replacement) do + prefix_size = byte_size(match) + suffix_size = byte_size(string) - prefix_size + + case string do + <> when prefix == match -> + replacement <> suffix + _ -> + string + end + end @doc """ - Returns a string where leading `char` have been removed. + Replaces suffix in `string` by `replacement` if it matches `match`. + + Returns the string untouched if there is no match. If `match` is an empty + string (`""`), `replacement` is just appended to `string`. ## Examples - iex> String.lstrip("_ abc _", ?_) - " abc _" + iex> String.replace_suffix("hello", " world", "") + "hello" + iex> String.replace_suffix("hello world", " world", "") + "hello" + iex> String.replace_suffix("hello world world", " world", "") + "hello world" + + iex> String.replace_suffix("hello", " world", " mundo") + "hello" + iex> String.replace_suffix("hello world", " world", " mundo") + "hello mundo" + iex> String.replace_suffix("hello world world", " world", " mundo") + "hello world mundo" + + iex> String.replace_suffix("hello", "", " world") + "hello world" """ + @spec replace_suffix(t, t, t) :: t + def replace_suffix(string, match, replacement) + when is_binary(string) and is_binary(match) and is_binary(replacement) do + suffix_size = byte_size(match) + prefix_size = byte_size(string) - suffix_size + + case string do + <> when suffix == match -> + prefix <> replacement + _ -> + string + end + end + + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + defdelegate lstrip(binary), to: String.Break, as: :trim_leading - @spec lstrip(t, char) :: t + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + def lstrip(string, char) when is_integer(char) do + replace_leading(string, <>, "") + end - def lstrip(<>, char) when is_integer(char) do - <> + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + def strip(string) do + trim(string) end - def lstrip(other, char) when is_integer(char) do - other + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + def strip(string, char) do + trim(string, <>) end + @doc ~S""" + Returns a string where all leading Unicode whitespaces + have been removed. + + ## Examples + + iex> String.trim_leading("\n abc ") + "abc " + + """ + @spec trim_leading(t) :: t + defdelegate trim_leading(string), to: String.Break + @doc """ - Returns a string where leading/trailing Unicode whitespace + Returns a string where all leading `to_trim`s have been removed. + + ## Examples + + iex> String.trim_leading("__ abc _", "_") + " abc _" + + iex> String.trim_leading("1 abc", "11") + "1 abc" + + """ + @spec trim_leading(t, t) :: t + def trim_leading(string, to_trim) do + replace_leading(string, to_trim, "") + end + + @doc ~S""" + Returns a string where all trailing Unicode whitespaces has been removed. ## Examples - iex> String.strip(" abc ") - "abc" + iex> String.trim_trailing(" abc\n ") + " abc" """ - @spec strip(t) :: t + @spec trim_trailing(t) :: t + defdelegate trim_trailing(string), to: String.Break - def strip(string) do - rstrip(lstrip(string)) + @doc """ + Returns a string where all trailing `to_trim`s have been removed. + + ## Examples + + iex> String.trim_trailing("_ abc __", "_") + "_ abc " + + iex> String.trim_trailing("abc 1", "11") + "abc 1" + + """ + @spec trim_trailing(t, t) :: t + def trim_trailing(string, to_trim) do + replace_trailing(string, to_trim, "") + end + + @doc ~S""" + Returns a string where all leading and trailing Unicode whitespaces + have been removed. + + ## Examples + + iex> String.trim("\n abc\n ") + "abc" + + """ + @spec trim(t) :: t + def trim(string) do + string + |> trim_leading() + |> trim_trailing() end @doc """ - Returns a string where leading/trailing `char` have been + Returns a string where all leading and trailing `to_trim`s have been removed. ## Examples - iex> String.strip("a abc a", ?a) + iex> String.trim("a abc a", "a") " abc " """ - @spec strip(t, char) :: t - - def strip(string, char) do - rstrip(lstrip(string, char), char) + @spec trim(t, t) :: t + def trim(string, to_trim) do + string + |> trim_leading(to_trim) + |> trim_trailing(to_trim) end @doc ~S""" - Returns a new string of length `len` with `subject` right justified and - padded with `padding`. If `padding` is not present, it defaults to - whitespace. When `len` is less than the length of `subject`, `subject` is - returned. + Returns a new string padded with a leading filler + which is made of elements from the `padding`. + + Passing a list of strings as `padding` will take one element of the list + for every missing entry. If the list is shorter than the number of inserts, + the filling will start again from the beginning of the list. + Passing a string `padding` is equivalent to passing the list of graphemes in it. + If no `padding` is given, it defaults to whitespace. + + When `count` is less than or equal to the length of `string`, + given `string` is returned. + + Raises `ArgumentError` if the given `padding` contains non-string element. ## Examples - iex> String.rjust("abc", 5) + iex> String.pad_leading("abc", 5) " abc" - iex> String.rjust("abc", 5, ?-) - "--abc" + iex> String.pad_leading("abc", 4, "12") + "1abc" + + iex> String.pad_leading("abc", 6, "12") + "121abc" + + iex> String.pad_leading("abc", 5, ["1", "23"]) + "123abc" """ - @spec rjust(t, pos_integer) :: t - @spec rjust(t, pos_integer, char) :: t + @spec pad_leading(t, non_neg_integer, t | [t]) :: t + def pad_leading(string, count, padding \\ [" "]) - def rjust(subject, len) do - rjust(subject, len, ?\s) + def pad_leading(string, count, padding) when is_binary(padding) do + pad_leading(string, count, graphemes(padding)) end - def rjust(subject, len, padding) when is_integer(padding) do - do_justify(subject, len, padding, :right) + def pad_leading(string, count, [_ | _] = padding) + when is_binary(string) and is_integer(count) and count >= 0 do + pad(:leading, string, count, padding) end @doc ~S""" - Returns a new string of length `len` with `subject` left justified and padded - with `padding`. If `padding` is not present, it defaults to whitespace. When - `len` is less than the length of `subject`, `subject` is returned. + Returns a new string padded with a trailing filler + which is made of elements from the `padding`. + + Passing a list of strings as `padding` will take one element of the list + for every missing entry. If the list is shorter than the number of inserts, + the filling will start again from the beginning of the list. + Passing a string `padding` is equivalent to passing the list of graphemes in it. + If no `padding` is given, it defaults to whitespace. + + When `count` is less than or equal to the length of `string`, + given `string` is returned. + + Raises `ArgumentError` if the given `padding` contains non-string element. ## Examples - iex> String.ljust("abc", 5) + iex> String.pad_trailing("abc", 5) "abc " - iex> String.ljust("abc", 5, ?-) - "abc--" + iex> String.pad_trailing("abc", 4, "12") + "abc1" + + iex> String.pad_trailing("abc", 6, "12") + "abc121" + + iex> String.pad_trailing("abc", 5, ["1", "23"]) + "abc123" """ - @spec ljust(t, pos_integer) :: t - @spec ljust(t, pos_integer, char) :: t + @spec pad_trailing(t, non_neg_integer, t | [t]) :: t + def pad_trailing(string, count, padding \\ [" "]) - def ljust(subject, len) do - ljust(subject, len, ?\s) + def pad_trailing(string, count, padding) when is_binary(padding) do + pad_trailing(string, count, graphemes(padding)) end - def ljust(subject, len, padding) when is_integer(padding) do - do_justify(subject, len, padding, :left) + def pad_trailing(string, count, [_ | _] = padding) + when is_binary(string) and is_integer(count) and count >= 0 do + pad(:trailing, string, count, padding) end - defp do_justify(subject, 0, _padding, _type) do - subject + defp pad(kind, string, count, padding) do + string_len = length(string) + if string_len >= count do + string + else + filler = build_filler(count - string_len, padding, padding, 0, []) + case kind do + :leading -> [filler | string] + :trailing -> [string | filler] + end + |> IO.iodata_to_binary + end end - defp do_justify(subject, len, padding, type) when is_integer(padding) do - subject_len = length(subject) + defp build_filler(0, _source, _padding, _size, filler), do: filler + + defp build_filler(count, source, [], size, filler) do + rem_filler = + rem(count, size) + |> build_filler(source, source, 0, []) + filler = + filler + |> IO.iodata_to_binary + |> duplicate(div(count, size) + 1) + [filler | rem_filler] + end - cond do - subject_len >= len -> - subject - subject_len < len -> - fill = duplicate(<>, len - subject_len) + defp build_filler(count, source, [elem | rest], size, filler) + when is_binary(elem) do + build_filler(count - 1, source, rest, size + 1, [filler | elem]) + end - case type do - :left -> subject <> fill - :right -> fill <> subject - end - end + defp build_filler(_count, _source, [elem | _rest], _size, _filler) do + raise ArgumentError, "expected a string padding element, got: #{inspect(elem)}" + end + + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + def rjust(subject, len, pad \\ ?\s) when is_integer(pad) and is_integer(len) and len >= 0 do + pad(:leading, subject, len, [<>]) + end + + @doc false + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + def ljust(subject, len, pad \\ ?\s) when is_integer(pad) and is_integer(len) and len >= 0 do + pad(:trailing, subject, len, [<>]) end @doc ~S""" - Returns a new binary based on `subject` by replacing the parts - matching `pattern` by `replacement`. By default, it replaces - all entries, except if the `global` option is set to `false`. + Returns a new string created by replacing occurrences of `pattern` in + `subject` with `replacement`. + + The `pattern` may be a string or a regular expression. - A `pattern` may be a string or a regex. + By default it replaces all occurrences but this behaviour can be controlled + through the `:global` option; see the "Options" section below. + + ## Options + + * `:global` - (boolean) if `true`, all occurrences of `pattern` are replaced + with `replacement`, otherwise only the first occurrence is + replaced. Defaults to `true` + + * `:insert_replaced` - (integer or list of integers) specifies the position + where to insert the replaced part inside the `replacement`. If any + position given in the `:insert_replaced` option is larger than the + replacement string, or is negative, an `ArgumentError` is raised. See the + examples below ## Examples @@ -577,17 +1085,22 @@ defmodule String do iex> String.replace("a,b,c", ",", "-", global: false) "a-b,c" - The pattern can also be a regex. In those cases, one can give `\N` - in the `replacement` string to access a specific capture in the regex: + When the pattern is a regular expression, one can give `\N` or + `\g{N}` in the `replacement` string to access a specific capture in the + regular expression: - iex> String.replace("a,b,c", ~r/,(.)/, ",\\1\\1") + iex> String.replace("a,b,c", ~r/,(.)/, ",\\1\\g{1}") "a,bb,cc" - Notice we had to escape the escape character `\`. By giving `&`, - one can inject the whole matched pattern in the replacement string. + Notice we had to escape the backslash escape character (i.e., we used `\\N` + instead of just `\N` to escape the backslash; same thing for `\\g{N}`). By + giving `\0`, one can inject the whole matched pattern in the replacement + string. - When strings are used as a pattern, a developer can also use the - replaced part inside the `replacement` via the `:insert_replaced` option: + When the pattern is a string, a developer can use the replaced part inside + the `replacement` by using the `:insert_replaced` option and specifying the + position(s) inside the `replacement` where the string pattern will be + inserted: iex> String.replace("a,b,c", "b", "[]", insert_replaced: 1) "a,[b],c" @@ -599,9 +1112,7 @@ defmodule String do "a[,,]b[,,]c" """ - @spec replace(t, t, t) :: t - @spec replace(t, t, t, Keyword.t) :: t - + @spec replace(t, pattern | Regex.t, t, Keyword.t) :: t def replace(subject, pattern, replacement, options \\ []) when is_binary(replacement) do if Regex.regex?(pattern) do Regex.replace(pattern, subject, replacement, global: options[:global]) @@ -612,17 +1123,21 @@ defmodule String do end defp translate_replace_options(options) do - opts = if Keyword.get(options, :global) != false, do: [:global], else: [] + global = + if Keyword.get(options, :global) != false, + do: [:global], + else: [] - if insert = Keyword.get(options, :insert_replaced) do - opts = [{:insert_replaced, insert}|opts] - end + insert = + if insert = Keyword.get(options, :insert_replaced), + do: [{:insert_replaced, insert}], + else: [] - opts + global ++ insert end - @doc """ - Reverses the given string. Works on graphemes. + @doc ~S""" + Reverses the graphemes in given string. ## Examples @@ -635,6 +1150,21 @@ defmodule String do iex> String.reverse("hello ∂og") "go∂ olleh" + Keep in mind reversing the same string twice does + not necessarily yield the original string: + + iex> "̀e" + "̀e" + iex> String.reverse("̀e") + "è" + iex> String.reverse String.reverse("̀e") + "è" + + In the first example the accent is before the vowel, so + it is considered two graphemes. However, when you reverse + it once, you have the vowel followed by the accent, which + becomes one grapheme. Reversing it again will keep it as + one single grapheme. """ @spec reverse(t) :: t def reverse(string) do @@ -642,13 +1172,13 @@ defmodule String do end defp do_reverse({grapheme, rest}, acc) do - do_reverse(next_grapheme(rest), [grapheme|acc]) + do_reverse(next_grapheme(rest), [grapheme | acc]) end defp do_reverse(nil, acc), do: IO.iodata_to_binary(acc) @doc """ - Returns a binary `subject` duplicated `n` times. + Returns a string `subject` duplicated `n` times. ## Examples @@ -662,7 +1192,7 @@ defmodule String do "abcabc" """ - @spec duplicate(t, pos_integer) :: t + @spec duplicate(t, non_neg_integer) :: t def duplicate(subject, n) when is_integer(n) and n >= 0 do :binary.copy(subject, n) end @@ -670,26 +1200,34 @@ defmodule String do @doc """ Returns all codepoints in the string. + For details about codepoints and graphemes, see the `String` module documentation. + ## Examples - iex> String.codepoints("josé") - ["j", "o", "s", "é"] + iex> String.codepoints("olá") + ["o", "l", "á"] iex> String.codepoints("оптими зации") - ["о","п","т","и","м","и"," ","з","а","ц","и","и"] + ["о", "п", "т", "и", "м", "и", " ", "з", "а", "ц", "и", "и"] iex> String.codepoints("ἅἪῼ") - ["ἅ","Ἢ","ῼ"] + ["ἅ", "Ἢ", "ῼ"] + + iex> String.codepoints("\u00e9") + ["é"] + + iex> String.codepoints("\u0065\u0301") + ["e", "́"] """ @spec codepoints(t) :: [codepoint] defdelegate codepoints(string), to: String.Unicode @doc """ - Returns the next codepoint in a String. + Returns the next codepoint in a string. The result is a tuple with the codepoint and the - remaining of the string or `nil` in case + remainder of the string or `nil` in case the string reached its end. As with other functions in the String module, this @@ -699,8 +1237,8 @@ defmodule String do ## Examples - iex> String.next_codepoint("josé") - {"j", "osé"} + iex> String.next_codepoint("olá") + {"o", "lá"} """ @compile {:inline, next_codepoint: 1} @@ -708,7 +1246,7 @@ defmodule String do defdelegate next_codepoint(string), to: String.Unicode @doc ~S""" - Checks whether `str` contains only valid characters. + Checks whether `string` contains only valid characters. ## Examples @@ -718,64 +1256,42 @@ defmodule String do iex> String.valid?("ø") true - iex> String.valid?(<<0xffff :: 16>>) + iex> String.valid?(<<0xFFFF :: 16>>) false - iex> String.valid?("asd" <> <<0xffff :: 16>>) + iex> String.valid?(<<0xEF, 0xB7, 0x90>>) + true + + iex> String.valid?("asd" <> <<0xFFFF :: 16>>) false """ @spec valid?(t) :: boolean + def valid?(string) - noncharacters = Enum.to_list(?\x{FDD0}..?\x{FDEF}) ++ - [ ?\x{0FFFE}, ?\x{0FFFF}, ?\x{1FFFE}, ?\x{1FFFF}, ?\x{2FFFE}, ?\x{2FFFF}, - ?\x{3FFFE}, ?\x{3FFFF}, ?\x{4FFFE}, ?\x{4FFFF}, ?\x{5FFFE}, ?\x{5FFFF}, - ?\x{6FFFE}, ?\x{6FFFF}, ?\x{7FFFE}, ?\x{7FFFF}, ?\x{8FFFE}, ?\x{8FFFF}, - ?\x{9FFFE}, ?\x{9FFFF}, ?\x{10FFFE}, ?\x{10FFFF} ] - - for noncharacter <- noncharacters do - def valid?(<< unquote(noncharacter) :: utf8, _ :: binary >>), do: false - end - - def valid?(<<_ :: utf8, t :: binary>>), do: valid?(t) + def valid?(<<_::utf8, t::binary>>), do: valid?(t) def valid?(<<>>), do: true def valid?(_), do: false - @doc ~S""" - Checks whether `str` is a valid character. - - All characters are codepoints, but some codepoints - are not valid characters. They may be reserved, private, - or other. - - More info at: http://en.wikipedia.org/wiki/Mapping_of_Unicode_characters#Noncharacters - - ## Examples - - iex> String.valid_character?("a") - true - - iex> String.valid_character?("ø") - true - - iex> String.valid_character?("\x{ffff}") - false - - """ - @spec valid_character?(t) :: boolean - - def valid_character?(<<_ :: utf8>> = codepoint), do: valid?(codepoint) - def valid_character?(_), do: false + @doc false + # TODO: Remove on 2.0 + # (hard-deprecated in elixir_dispatch) + def valid_character?(string) do + case string do + <<_::utf8>> -> valid?(string) + _ -> false + end + end @doc ~S""" Splits the string into chunks of characters that share a common trait. The trait can be one of two options: - * `:valid` – the string is split into chunks of valid and invalid character - sequences + * `:valid` - the string is split into chunks of valid and invalid + character sequences - * `:printable` – the string is split into chunks of printable and + * `:printable` - the string is split into chunks of printable and non-printable character sequences Returns a list of binaries each of which contains only one kind of @@ -786,13 +1302,13 @@ defmodule String do ## Examples iex> String.chunk(<>, :valid) - ["abc\000"] + ["abc\0"] - iex> String.chunk(<>, :valid) - ["abc\000", <<0x0ffff::utf8>>] + iex> String.chunk(<>, :valid) + ["abc\0", <<0xFFFF::utf16>>] - iex> String.chunk(<>, :printable) - ["abc", <<0, 0x0ffff::utf8>>] + iex> String.chunk(<>, :printable) + ["abc", <<0, 0x0FFFF::utf8>>] """ @spec chunk(t, :valid | :printable) :: [t] @@ -801,23 +1317,22 @@ defmodule String do def chunk("", _), do: [] - def chunk(str, trait) when trait in [:valid, :printable] do - {cp, _} = next_codepoint(str) + def chunk(string, trait) when trait in [:valid, :printable] do + {cp, _} = next_codepoint(string) pred_fn = make_chunk_pred(trait) - do_chunk(str, pred_fn.(cp), pred_fn) + do_chunk(string, pred_fn.(cp), pred_fn) end - - defp do_chunk(str, flag, pred_fn), do: do_chunk(str, [], <<>>, flag, pred_fn) + defp do_chunk(string, flag, pred_fn), do: do_chunk(string, [], <<>>, flag, pred_fn) defp do_chunk(<<>>, acc, <<>>, _, _), do: Enum.reverse(acc) defp do_chunk(<<>>, acc, chunk, _, _), do: Enum.reverse(acc, [chunk]) - defp do_chunk(str, acc, chunk, flag, pred_fn) do - {cp, rest} = next_codepoint(str) + defp do_chunk(string, acc, chunk, flag, pred_fn) do + {cp, rest} = next_codepoint(string) if pred_fn.(cp) != flag do - do_chunk(rest, [chunk|acc], cp, not flag, pred_fn) + do_chunk(rest, [chunk | acc], cp, not flag, pred_fn) else do_chunk(rest, acc, chunk <> cp, flag, pred_fn) end @@ -827,39 +1342,71 @@ defmodule String do defp make_chunk_pred(:printable), do: &printable?/1 @doc """ - Returns unicode graphemes in the string as per Extended Grapheme - Cluster algorithm outlined in the [Unicode Standard Annex #29, + Returns Unicode graphemes in the string as per Extended Grapheme + Cluster algorithm. + + The algorithm is outlined in the [Unicode Standard Annex #29, Unicode Text Segmentation](http://www.unicode.org/reports/tr29/). + For details about codepoints and graphemes, see the `String` module documentation. + ## Examples - iex> String.graphemes("Ā̀stute") - ["Ā̀","s","t","u","t","e"] + iex> String.graphemes("Ńaïve") + ["Ń", "a", "ï", "v", "e"] + + iex> String.graphemes("\u00e9") + ["é"] + + iex> String.graphemes("\u0065\u0301") + ["é"] """ @spec graphemes(t) :: [grapheme] - defdelegate graphemes(string), to: String.Graphemes + defdelegate graphemes(string), to: String.Unicode + + @compile {:inline, next_grapheme: 1, next_grapheme_size: 1} @doc """ - Returns the next grapheme in a String. + Returns the next grapheme in a string. The result is a tuple with the grapheme and the - remaining of the string or `nil` in case + remainder of the string or `nil` in case the String reached its end. ## Examples - iex> String.next_grapheme("josé") - {"j", "osé"} + iex> String.next_grapheme("olá") + {"o", "lá"} """ - @compile {:inline, next_grapheme: 1} @spec next_grapheme(t) :: {grapheme, t} | nil - defdelegate next_grapheme(string), to: String.Graphemes + def next_grapheme(binary) do + case next_grapheme_size(binary) do + {size, rest} -> {:binary.part(binary, 0, size), rest} + nil -> nil + end + end + + @doc """ + Returns the size of the next grapheme. + + The result is a tuple with the next grapheme size and + the remainder of the string or `nil` in case the string + reached its end. + + ## Examples + + iex> String.next_grapheme_size("olá") + {1, "lá"} + + """ + @spec next_grapheme_size(t) :: {pos_integer, t} | nil + defdelegate next_grapheme_size(string), to: String.Unicode @doc """ - Returns the first grapheme from an utf8 string, - nil if the string is empty. + Returns the first grapheme from a UTF-8 string, + `nil` if the string is empty. ## Examples @@ -879,7 +1426,7 @@ defmodule String do end @doc """ - Returns the last grapheme from an utf8 string, + Returns the last grapheme from a UTF-8 string, `nil` if the string is empty. ## Examples @@ -903,7 +1450,7 @@ defmodule String do defp do_last(nil, last_char), do: last_char @doc """ - Returns the number of unicode graphemes in an utf8 string. + Returns the number of Unicode graphemes in a UTF-8 string. ## Examples @@ -915,18 +1462,10 @@ defmodule String do """ @spec length(t) :: non_neg_integer - def length(string) do - do_length(next_grapheme(string)) - end - - defp do_length({_, rest}) do - 1 + do_length(next_grapheme(rest)) - end - - defp do_length(nil), do: 0 + defdelegate length(string), to: String.Unicode @doc """ - Returns the grapheme in the `position` of the given utf8 `string`. + Returns the grapheme at the `position` of the given UTF-8 `string`. If `position` is greater than `string` length, then it returns `nil`. ## Examples @@ -949,33 +1488,35 @@ defmodule String do """ @spec at(t, integer) :: grapheme | nil - def at(string, position) when position >= 0 do - do_at(next_grapheme(string), position, 0) + def at(string, position) when is_integer(position) and position >= 0 do + do_at(string, position) end - def at(string, position) when position < 0 do - real_pos = length(string) - abs(position) - case real_pos >= 0 do - true -> do_at(next_grapheme(string), real_pos, 0) + def at(string, position) when is_integer(position) and position < 0 do + position = length(string) + position + case position >= 0 do + true -> do_at(string, position) false -> nil end end - defp do_at({_ , rest}, desired_pos, current_pos) when desired_pos > current_pos do - do_at(next_grapheme(rest), desired_pos, current_pos + 1) - end - - defp do_at({char, _}, desired_pos, current_pos) when desired_pos == current_pos do - char + defp do_at(string, position) do + case String.Unicode.split_at(string, position) do + {_, nil} -> nil + {_, rest} -> first(rest) + end end - defp do_at(nil, _, _), do: nil - @doc """ - Returns a substring starting at the offset given by the first, and - a length given by the second. + Returns a substring starting at the offset `start`, and of + length `len`. + If the offset is greater than string length, then it returns `""`. + Remember this function works with Unicode graphemes and considers + the slices to represent grapheme offsets. If you want to split + on raw bytes, check `Kernel.binary_part/3` instead. + ## Examples iex> String.slice("elixir", 1, 3) @@ -1010,13 +1551,18 @@ defmodule String do end def slice(string, start, len) when start >= 0 and len >= 0 do - do_slice(next_grapheme(string), start, start + len - 1, 0, "") + case String.Unicode.split_at(string, start) do + {_, nil} -> "" + {start_bytes, rest} -> + {len_bytes, _} = String.Unicode.split_at(rest, len) + binary_part(string, start_bytes, len_bytes) + end end def slice(string, start, len) when start < 0 and len >= 0 do - real_start_pos = length(string) - abs(start) - case real_start_pos >= 0 do - true -> do_slice(next_grapheme(string), real_start_pos, real_start_pos + len - 1, 0, "") + start = length(string) + start + case start >= 0 do + true -> slice(string, start, len) false -> "" end end @@ -1028,6 +1574,14 @@ defmodule String do If the start of the range is not a valid offset for the given string or if the range is in reverse order, returns `""`. + If the start or end of the range is negative, the whole string + is traversed first in order to convert the negative indices into + positive ones. + + Remember this function works with Unicode graphemes and considers + the slices to represent grapheme offsets. If you want to split + on raw bytes, check `Kernel.binary_part/3` instead. + ## Examples iex> String.slice("elixir", 1..3) @@ -1060,117 +1614,125 @@ defmodule String do iex> String.slice("a", 1..1500) "" - iex> String.slice("a", 2..1500) - "" - """ @spec slice(t, Range.t) :: t def slice(string, range) - def slice(string, first..last) when first >= 0 and last >= 0 do - do_slice(next_grapheme(string), first, last, 0, "") - end - - def slice(string, first..last) do - total = length(string) - - if first < 0 do - first = total + first - end + def slice("", _.._), do: "" - if last < 0 do - last = total + last + def slice(string, first..-1) when first >= 0 do + case String.Unicode.split_at(string, first) do + {_, nil} -> + "" + {start_bytes, _} -> + binary_part(string, start_bytes, byte_size(string) - start_bytes) end + end - if first >= 0 do - do_slice(next_grapheme(string), first, last, 0, "") + def slice(string, first..last) when first >= 0 and last >= 0 do + if last >= first do + slice(string, first, last - first + 1) else "" end end - defp do_slice(_, start_pos, last_pos, _, _) when start_pos > last_pos do - "" - end + def slice(string, first..last) do + {bytes, length} = do_acc_bytes(next_grapheme_size(string), [], 0) - defp do_slice({_, rest}, start_pos, last_pos, current_pos, acc) when current_pos < start_pos do - do_slice(next_grapheme(rest), start_pos, last_pos, current_pos + 1, acc) - end + first = add_if_negative(first, length) + last = add_if_negative(last, length) - defp do_slice({char, rest}, start_pos, last_pos, current_pos, acc) when current_pos >= start_pos and current_pos < last_pos do - do_slice(next_grapheme(rest), start_pos, last_pos, current_pos + 1, acc <> char) + if first < 0 or first > last or first > length do + "" + else + last = min(last + 1, length) + bytes = Enum.drop(bytes, length - last) + first = last - first + {length_bytes, start_bytes} = Enum.split(bytes, first) + binary_part(string, Enum.sum(start_bytes), Enum.sum(length_bytes)) + end end - defp do_slice({char, _}, start_pos, last_pos, current_pos, acc) when current_pos >= start_pos and current_pos == last_pos do - acc <> char + defp add_if_negative(value, to_add) when value < 0, do: value + to_add + defp add_if_negative(value, _to_add), do: value + + defp do_acc_bytes({size, rest}, bytes, length) do + do_acc_bytes(next_grapheme_size(rest), [size | bytes], length + 1) end - - defp do_slice(nil, _, _, _, acc) do - acc + + defp do_acc_bytes(nil, bytes, length) do + {bytes, length} end @doc """ - Returns `true` if `string` starts with any of the prefixes given, otherwise - `false`. `prefixes` can be either a single prefix or a list of prefixes. + Returns `true` if `string` starts with any of the prefixes given. + + `prefix` can be either a single prefix or a list of prefixes. ## Examples iex> String.starts_with? "elixir", "eli" true - iex> String.starts_with? "elixir", ["erlang", "elixir"] true - iex> String.starts_with? "elixir", ["erlang", "ruby"] false - """ - @spec starts_with?(t, t | [t]) :: boolean + An empty string will always match: - def starts_with?(string, prefixes) when is_list(prefixes) do - Enum.any?(prefixes, &do_starts_with(string, &1)) - end + iex> String.starts_with? "elixir", "" + true + iex> String.starts_with? "elixir", ["", "other"] + true - def starts_with?(string, prefix) do - do_starts_with(string, prefix) + """ + @spec starts_with?(t, t | [t]) :: boolean + def starts_with?(string, []) when is_binary(string) do + false end - defp do_starts_with(string, "") when is_binary(string) do - true + def starts_with?(string, prefix) when is_binary(string) and is_list(prefix) do + "" in prefix or Kernel.match?({0, _}, :binary.match(string, prefix)) end - defp do_starts_with(string, prefix) when is_binary(prefix) do - Kernel.match?({0, _}, :binary.match(string, prefix)) + def starts_with?(string, prefix) when is_binary(string) do + "" == prefix or Kernel.match?({0, _}, :binary.match(string, prefix)) end @doc """ - Returns `true` if `string` ends with any of the suffixes given, otherwise - `false`. `suffixes` can be either a single suffix or a list of suffixes. + Returns `true` if `string` ends with any of the suffixes given. + + `suffixes` can be either a single suffix or a list of suffixes. ## Examples iex> String.ends_with? "language", "age" true - iex> String.ends_with? "language", ["youth", "age"] true - iex> String.ends_with? "language", ["youth", "elixir"] false + An empty suffix will always match: + + iex> String.ends_with? "language", "" + true + iex> String.ends_with? "language", ["", "other"] + true + """ @spec ends_with?(t, t | [t]) :: boolean - - def ends_with?(string, suffixes) when is_list(suffixes) do + def ends_with?(string, suffixes) when is_binary(string) and is_list(suffixes) do Enum.any?(suffixes, &do_ends_with(string, &1)) end - def ends_with?(string, suffix) do + def ends_with?(string, suffix) when is_binary(string) do do_ends_with(string, suffix) end - defp do_ends_with(string, "") when is_binary(string) do + defp do_ends_with(_string, "") do true end @@ -1182,7 +1744,7 @@ defmodule String do end @doc """ - Check if `string` matches the given regular expression. + Checks if `string` matches the given regular expression. ## Examples @@ -1199,54 +1761,63 @@ defmodule String do end @doc """ - Check if `string` contains any of the given `contents`. + Checks if `string` contains any of the given `contents`. - `matches` can be either a single string or a list of strings. + `contents` can be either a single string or a list of strings. ## Examples iex> String.contains? "elixir of life", "of" true - iex> String.contains? "elixir of life", ["life", "death"] true - iex> String.contains? "elixir of life", ["death", "mercury"] false - """ - @spec contains?(t, t | [t]) :: boolean + An empty string will always match: - def contains?(string, contents) when is_list(contents) do - Enum.any?(contents, &do_contains(string, &1)) - end + iex> String.contains? "elixir of life", "" + true + iex> String.contains? "elixir of life", ["", "other"] + true + + The argument can also be a precompiled pattern: + + iex> pattern = :binary.compile_pattern(["life", "death"]) + iex> String.contains? "elixir of life", pattern + true - def contains?(string, content) do - do_contains(string, content) + """ + @spec contains?(t, pattern) :: boolean + def contains?(string, []) when is_binary(string) do + false end - defp do_contains(string, "") when is_binary(string) do - true + def contains?(string, contents) when is_binary(string) and is_list(contents) do + "" in contents or :binary.match(string, contents) != :nomatch end - defp do_contains(string, match) when is_binary(match) do - :nomatch != :binary.match(string, match) + def contains?(string, contents) when is_binary(string) do + "" == contents or :binary.match(string, contents) != :nomatch end @doc """ - Converts a string into a char list. + Converts a string into a charlist. + + Specifically, this functions takes a UTF-8 encoded binary and returns a list of its integer + codepoints. It is similar to `codepoints/1` except that the latter returns a list of codepoints as + strings. + + In case you need to work with bytes, take a look at the + [`:binary` module](http://www.erlang.org/doc/man/binary.html). ## Examples - iex> String.to_char_list("æß") + iex> String.to_charlist("æß") 'æß' - - Notice that this function expect a list of integer representing - UTF-8 codepoints. If you have a raw binary, you must instead use - [the `:binary` module](http://erlang.org/doc/man/binary.html). """ - @spec to_char_list(t) :: char_list - def to_char_list(string) when is_binary(string) do + @spec to_charlist(t) :: charlist + def to_charlist(string) when is_binary(string) do case :unicode.characters_to_list(string) do result when is_list(result) -> result @@ -1262,8 +1833,16 @@ defmodule String do @doc """ Converts a string to an atom. - Currently Elixir does not support conversions from strings - which contains Unicode codepoints greater than 0xFF. + Warning: this function creates atoms dynamically and atoms are + not garbage collected. Therefore, `string` should not be an + untrusted value, such as input received from a socket or during + a web request. Consider using `to_existing_atom/1` instead. + + By default, the maximum number of atoms is `1_048_576`. This limit + can be raised or lowered using the VM option `+t`. + + The maximum atom size is of 255 characters. Prior to OTP 20, + only latin1 characters are allowed. Inlined by the compiler. @@ -1281,14 +1860,14 @@ defmodule String do @doc """ Converts a string to an existing atom. - Currently Elixir does not support conversions from strings - which contains Unicode codepoints greater than 0xFF. + The maximum atom size is of 255 characters. Prior to OTP 20, + only latin1 characters are allowed. Inlined by the compiler. ## Examples - iex> :my_atom + iex> _ = :my_atom iex> String.to_existing_atom("my_atom") :my_atom @@ -1302,7 +1881,7 @@ defmodule String do end @doc """ - Returns a integer whose text representation is `string`. + Returns an integer whose text representation is `string`. Inlined by the compiler. @@ -1328,7 +1907,7 @@ defmodule String do 1023 """ - @spec to_integer(String.t, pos_integer) :: integer + @spec to_integer(String.t, 2..36) :: integer def to_integer(string, base) do :erlang.binary_to_integer(string, base) end @@ -1336,6 +1915,10 @@ defmodule String do @doc """ Returns a float whose text representation is `string`. + `string` must be the string representation of a float including a decimal point. + In order to parse a string without decimal point as a float then `Float.parse/1` + should be used. Otherwise, an `ArgumentError` will be raised. + Inlined by the compiler. ## Examples @@ -1343,9 +1926,135 @@ defmodule String do iex> String.to_float("2.2017764e+0") 2.2017764 + iex> String.to_float("3.0") + 3.0 + + String.to_float("3") + #=> ** (ArgumentError) argument error + """ @spec to_float(String.t) :: float def to_float(string) do :erlang.binary_to_float(string) end + + @doc """ + Returns a float value between 0 (equates to no similarity) and 1 (is an exact match) + representing [Jaro](https://en.wikipedia.org/wiki/Jaro–Winkler_distance) + distance between `string1` and `string2`. + + The Jaro distance metric is designed and best suited for short strings such as person names. + + ## Examples + + iex> String.jaro_distance("dwayne", "duane") + 0.8222222222222223 + iex> String.jaro_distance("even", "odd") + 0.0 + + """ + @spec jaro_distance(t, t) :: float + def jaro_distance(string1, string2) + + def jaro_distance(string, string), do: 1.0 + def jaro_distance(_string, ""), do: 0.0 + def jaro_distance("", _string), do: 0.0 + + def jaro_distance(string1, string2) do + {chars1, len1} = chars_and_length(string1) + {chars2, len2} = chars_and_length(string2) + + case match(chars1, len1, chars2, len2) do + {0, _trans} -> 0.0 + {comm, trans} -> + ((comm / len1) + + (comm / len2) + + ((comm - trans) / comm)) / 3 + end + end + + @compile {:inline, chars_and_length: 1} + defp chars_and_length(string) do + chars = graphemes(string) + {chars, Kernel.length(chars)} + end + + defp match(chars1, len1, chars2, len2) do + if len1 < len2 do + match(chars1, chars2, div(len2, 2) - 1) + else + match(chars2, chars1, div(len1, 2) - 1) + end + end + + defp match(chars1, chars2, lim) do + match(chars1, chars2, {0, lim}, {0, 0, -1}, 0) + end + + defp match([char | rest], chars, range, state, idx) do + {chars, state} = submatch(char, chars, range, state, idx) + + case range do + {lim, lim} -> match(rest, tl(chars), range, state, idx + 1) + {pre, lim} -> match(rest, chars, {pre + 1, lim}, state, idx + 1) + end + end + + defp match([], _, _, {comm, trans, _}, _), do: {comm, trans} + + defp submatch(char, chars, {pre, _} = range, state, idx) do + case detect(char, chars, range) do + nil -> {chars, state} + {subidx, chars} -> + {chars, proceed(state, idx - pre + subidx)} + end + end + + defp detect(char, chars, {pre, lim}) do + detect(char, chars, pre + 1 + lim, 0, []) + end + + defp detect(_char, _chars, 0, _idx, _acc), do: nil + defp detect(_char, [], _lim, _idx, _acc), do: nil + + defp detect(char, [char | rest], _lim, idx, acc), + do: {idx, Enum.reverse(acc, [nil | rest])} + + defp detect(char, [other | rest], lim, idx, acc), + do: detect(char, rest, lim - 1, idx + 1, [other | acc]) + + defp proceed({comm, trans, former}, current) do + if current < former do + {comm + 1, trans + 1, current} + else + {comm + 1, trans, current} + end + end + + @doc """ + Returns a keyword list that represents an edit script. + + Check `List.myers_difference/2` for more information. + + ## Examples + + iex> string1 = "fox hops over the dog" + iex> string2 = "fox jumps over the lazy cat" + iex> String.myers_difference(string1, string2) + [eq: "fox ", del: "ho", ins: "jum", eq: "ps over the ", del: "dog", ins: "lazy cat"] + + """ + @spec myers_difference(t, t) :: [{:eq | :ins | :del, t}] | nil + def myers_difference(string1, string2) do + List.myers_difference(graphemes(string1), graphemes(string2)) + |> Enum.map(fn {kind, chars} -> + {kind, IO.iodata_to_binary(chars)} + end) + end + + # TODO: Remove by 2.0 + # (hard-deprecated in elixir_dispatch) + @doc false + @spec to_char_list(t) :: charlist + def to_char_list(string), do: String.to_charlist(string) end diff --git a/lib/elixir/lib/string/chars.ex b/lib/elixir/lib/string/chars.ex index 0d02f457602..0caf3729828 100644 --- a/lib/elixir/lib/string/chars.ex +++ b/lib/elixir/lib/string/chars.ex @@ -2,19 +2,24 @@ import Kernel, except: [to_string: 1] defprotocol String.Chars do @moduledoc ~S""" - The String.Chars protocol is responsible for - converting a structure to a Binary (only if applicable). + The `String.Chars` protocol is responsible for + converting a structure to a binary (only if applicable). + The only function required to be implemented is - `to_string` which does the conversion. + `to_string/1`, which does the conversion. - The `to_string` function automatically imported - by Kernel invokes this protocol. String - interpolation also invokes to_string in its + The `to_string/1` function automatically imported + by `Kernel` invokes this protocol. String + interpolation also invokes `to_string/1` in its arguments. For example, `"foo#{bar}"` is the same as `"foo" <> to_string(bar)`. """ - def to_string(thing) + @doc """ + Converts `term` to a string. + """ + @spec to_string(t) :: String.t + def to_string(term) end defimpl String.Chars, for: Atom do @@ -28,30 +33,30 @@ defimpl String.Chars, for: Atom do end defimpl String.Chars, for: BitString do - def to_string(thing) when is_binary(thing) do - thing + def to_string(term) when is_binary(term) do + term end - def to_string(thing) do + def to_string(term) do raise Protocol.UndefinedError, protocol: @protocol, - value: thing, + value: term, description: "cannot convert a bitstring to a string" end end defimpl String.Chars, for: List do - def to_string(char_list), do: List.to_string(char_list) + def to_string(charlist), do: List.to_string(charlist) end defimpl String.Chars, for: Integer do - def to_string(thing) do - Integer.to_string(thing) + def to_string(term) do + Integer.to_string(term) end end defimpl String.Chars, for: Float do - def to_string(thing) do - IO.iodata_to_binary(:io_lib_format.fwrite_g(thing)) + def to_string(term) do + IO.iodata_to_binary(:io_lib_format.fwrite_g(term)) end end diff --git a/lib/elixir/lib/string_io.ex b/lib/elixir/lib/string_io.ex index 8728e75d248..ca4e253d9dc 100644 --- a/lib/elixir/lib/string_io.ex +++ b/lib/elixir/lib/string_io.ex @@ -1,6 +1,9 @@ defmodule StringIO do @moduledoc """ - This module provides an IO device that wraps a string. + Controls an IO device process that wraps a string. + + A `StringIO` IO device can be passed as a "device" to + most of the functions in the `IO` module. ## Examples @@ -15,6 +18,9 @@ defmodule StringIO do @doc """ Creates an IO device. + `string` will be the initial input of the newly created + device. + If the `:capture_prompt` option is set to `true`, prompts (specified as arguments to `IO.get*` functions) are captured. @@ -36,11 +42,12 @@ defmodule StringIO do """ @spec open(binary, Keyword.t) :: {:ok, pid} def open(string, options \\ []) when is_binary(string) do - :gen_server.start_link(__MODULE__, {string, options}, []) + GenServer.start_link(__MODULE__, {string, options}, []) end @doc """ - Returns current buffers. + Returns the current input/output buffers for the given IO + device. ## Examples @@ -52,11 +59,30 @@ defmodule StringIO do """ @spec contents(pid) :: {binary, binary} def contents(pid) when is_pid(pid) do - :gen_server.call(pid, :contents) + GenServer.call(pid, :contents) + end + + @doc """ + Flushes the output buffer and returns its current contents. + + ## Examples + + iex> {:ok, pid} = StringIO.open("in") + iex> IO.write(pid, "out") + iex> StringIO.flush(pid) + "out" + iex> StringIO.contents(pid) + {"in", ""} + + """ + @spec flush(pid) :: binary + def flush(pid) when is_pid(pid) do + GenServer.call(pid, :flush) end @doc """ - Stops the IO device and returns remaining buffers. + Stops the IO device and returns the remaining input/output + buffers. ## Examples @@ -68,7 +94,7 @@ defmodule StringIO do """ @spec close(pid) :: {:ok, {binary, binary}} def close(pid) when is_pid(pid) do - :gen_server.call(pid, :close) + GenServer.call(pid, :close) end ## callbacks @@ -91,6 +117,10 @@ defmodule StringIO do {:reply, {input, output}, s} end + def handle_call(:flush, _from, %{output: output} = s) do + {:reply, output, %{s | output: ""}} + end + def handle_call(:close, _from, %{input: input, output: output} = s) do {:stop, :normal, {:ok, {input, output}}, s} end @@ -105,21 +135,20 @@ defmodule StringIO do s end - defp io_request({:put_chars, chars}, %{output: output} = s) do - {:ok, %{s | output: << output :: binary, IO.chardata_to_string(chars) :: binary >>}} + defp io_request({:put_chars, chars} = req, s) do + put_chars(:latin1, chars, req, s) end - defp io_request({:put_chars, m, f, as}, %{output: output} = s) do - chars = apply(m, f, as) - {:ok, %{s | output: << output :: binary, IO.chardata_to_string(chars) :: binary >>}} + defp io_request({:put_chars, m, f, as} = req, s) do + put_chars(:latin1, apply(m, f, as), req, s) end - defp io_request({:put_chars, _encoding, chars}, s) do - io_request({:put_chars, chars}, s) + defp io_request({:put_chars, encoding, chars} = req, s) do + put_chars(encoding, chars, req, s) end - defp io_request({:put_chars, _encoding, mod, func, args}, s) do - io_request({:put_chars, mod, func, args}, s) + defp io_request({:put_chars, encoding, mod, func, args} = req, s) do + put_chars(encoding, apply(mod, func, args), req, s) end defp io_request({:get_chars, prompt, n}, s) when n >= 0 do @@ -174,19 +203,25 @@ defmodule StringIO do {{:error, :request}, s} end + ## put_chars + + defp put_chars(encoding, chars, req, %{output: output} = s) do + case :unicode.characters_to_binary(chars, encoding, :unicode) do + string when is_binary(string) -> + {:ok, %{s | output: output <> string}} + {_, _, _} -> + {{:error, req}, s} + end + end + ## get_chars - defp get_chars(encoding, prompt, n, - %{input: input, output: output, capture_prompt: capture_prompt} = s) do + defp get_chars(encoding, prompt, n, %{input: input} = s) do case do_get_chars(input, encoding, n) do {:error, _} = error -> {error, s} {result, input} -> - if capture_prompt do - output = << output :: binary, IO.chardata_to_string(prompt) :: binary >> - end - - {result, %{s | input: input, output: output}} + {result, state_after_read(s, input, prompt)} end end @@ -199,7 +234,7 @@ defmodule StringIO do end defp do_get_chars(input, :latin1, n) do - <> = input + <> = input {chars, rest} end @@ -209,7 +244,7 @@ defmodule StringIO do {buf_count, split_pos} when buf_count < n or split_pos == :none -> {input, ""} {_buf_count, split_pos} -> - <> = input + <> = input {chars, rest} end catch @@ -220,34 +255,23 @@ defmodule StringIO do ## get_line - defp get_line(encoding, prompt, - %{input: input, output: output, capture_prompt: capture_prompt} = s) do - case :unicode.characters_to_list(input, encoding) do - {:error, _, _} -> - {{:error, :collect_line}, s} - {:incomplete, _, _} -> - {{:error, :collect_line}, s} - chars -> - {result, input} = do_get_line(chars, encoding) + defp get_line(encoding, prompt, %{input: input} = s) do + case bytes_until_eol(input, encoding, 0) do + {:split, 0} -> + {:eof, state_after_read(s, "", prompt)} + {:split, count} -> + {result, remainder} = :erlang.split_binary(input, count) - if capture_prompt do - output = << output :: binary, IO.chardata_to_string(prompt) :: binary >> - end + {result, state_after_read(s, remainder, prompt)} + {:replace_split, count} -> + {result, remainder} = :erlang.split_binary(input, count) - {result, %{s | input: input, output: output}} + {binary_part(result, 0, byte_size(result) - 2) <> "\n", state_after_read(s, remainder, prompt)} + :error + -> {{:error, :collect_line}, s} end end - defp do_get_line('', _encoding) do - {:eof, ""} - end - - defp do_get_line(chars, encoding) do - {line, rest} = collect_line(chars) - {:unicode.characters_to_binary(line, encoding), - :unicode.characters_to_binary(rest, encoding)} - end - ## get_until defp get_until(encoding, prompt, mod, fun, args, @@ -260,17 +284,20 @@ defmodule StringIO do chars -> {result, input, count} = do_get_until(chars, encoding, mod, fun, args) - if capture_prompt do - output = << output :: binary, :binary.copy(IO.chardata_to_string(prompt), count) :: binary >> - end - input = case input do :eof -> "" _ -> :unicode.characters_to_binary(input, encoding) end - {result, %{s | input: input, output: output}} + s = + if capture_prompt do + %{s | output: <>} + else + s + end + + {result, %{s | input: input}} end end @@ -289,11 +316,10 @@ defmodule StringIO do {line, rest} = collect_line(chars) case apply(mod, fun, [continuation, line | args]) do - {:done, result, rest1} -> - unless rest1 == :eof do - rest = rest1 ++ rest - end + {:done, result, :eof} -> {result, rest, count + 1} + {:done, result, extra} -> + {result, extra ++ rest, count + 1} {:more, next_continuation} -> do_get_until(rest, encoding, mod, fun, args, next_continuation, count + 1) end @@ -301,7 +327,7 @@ defmodule StringIO do ## io_requests - defp io_requests([r|rs], {:ok, s}) do + defp io_requests([r | rs], {:ok, s}) do io_requests(rs, io_request(r, s)) end @@ -311,6 +337,28 @@ defmodule StringIO do ## helpers + defp state_after_read(%{capture_prompt: false} = s, remainder, _prompt) do + %{s | input: remainder} + end + + defp state_after_read(%{capture_prompt: true, output: output} = s, remainder, prompt) do + %{s | input: remainder, output: <>} + end + + defp bytes_until_eol("", _, count), do: {:split, count} + defp bytes_until_eol(<<"\r\n"::binary, _::binary>>, _, count), do: {:replace_split, count + 2} + defp bytes_until_eol(<<"\n"::binary, _::binary>>, _, count), do: {:split, count + 1} + + defp bytes_until_eol(<>, :unicode, count) do + bytes_until_eol(tail, :unicode, count + byte_size(<>)) + end + + defp bytes_until_eol(<<_, tail::binary>>, :latin1, count) do + bytes_until_eol(tail, :latin1, count + 1) + end + + defp bytes_until_eol(<<_::binary>>, _, _), do: :error + defp collect_line(chars) do collect_line(chars, []) end @@ -320,15 +368,15 @@ defmodule StringIO do end defp collect_line([?\r, ?\n | rest], stack) do - {:lists.reverse([?\n|stack]), rest} + {:lists.reverse([?\n | stack]), rest} end defp collect_line([?\n | rest], stack) do - {:lists.reverse([?\n|stack]), rest} + {:lists.reverse([?\n | stack]), rest} end - defp collect_line([h|t], stack) do - collect_line(t, [h|stack]) + defp collect_line([h | t], stack) do + collect_line(t, [h | stack]) end defp io_reply(from, reply_as, reply) do diff --git a/lib/elixir/lib/supervisor.ex b/lib/elixir/lib/supervisor.ex index 9945e2ce5c9..c1d8355a7b6 100644 --- a/lib/elixir/lib/supervisor.ex +++ b/lib/elixir/lib/supervisor.ex @@ -1,17 +1,17 @@ defmodule Supervisor do - @moduledoc """ + @moduledoc ~S""" A behaviour module for implementing supervision functionality. - A supervisor is a process which supervises other processes called - child processes. Supervisors are used to build an hierarchical process - structure called a supervision tree, a nice way to structure fault-tolerant - applications. + A supervisor is a process which supervises other processes, which we refer + to as *child processes*. Supervisors are used to build a hierarchical process + structure called a *supervision tree*. Supervision trees are a nice way to + structure fault-tolerant applications. - A supervisor implemented using this module will have a standard set - of interface functions and include functionality for tracing and error - reporting. It will also fit into an supervision tree. + A supervisor implemented using this module has a standard set + of interface functions and includes functionality for tracing and error + reporting. It also fits into a supervision tree. - ## Example + ## Examples In order to define a supervisor, we need to first define a child process that is going to be supervised. In order to do so, we will define a GenServer @@ -20,16 +20,16 @@ defmodule Supervisor do defmodule Stack do use GenServer - def start_link(state) do - GenServer.start_link(__MODULE__, state, [name: :sup_stack]) + def start_link(state, opts \\ []) do + GenServer.start_link(__MODULE__, state, opts) end - def handle_call(:pop, _from, [h|t]) do + def handle_call(:pop, _from, [h | t]) do {:reply, h, t} end - def handle_cast({:push, h}, _from, t) do - {:noreply, [h|t]} + def handle_cast({:push, h}, t) do + {:noreply, [h | t]} end end @@ -38,57 +38,64 @@ defmodule Supervisor do # Import helpers for defining supervisors import Supervisor.Spec - # We are going to supervise the Stack server which will - # be started with a single argument [:hello] + # Supervise the Stack server which will be started with + # two arguments. The initial stack, [:hello], and a + # keyword list containing the GenServer options that + # set the registered name of the server to MyStack. children = [ - worker(Stack, [[:hello]]) + worker(Stack, [[:hello], [name: MyStack]]) ] - # Start the supervisor with our one child + # Start the supervisor with our child {:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one) - Notice that when starting the GenServer, we have registered it - with name `:sup_stack`, which allows us to call it directly and + # There is one child worker started + Supervisor.count_children(pid) + #=> %{active: 1, specs: 1, supervisors: 0, workers: 1} + + Notice that when starting the GenServer, we are registering it + with name `MyStack`, which allows us to call it directly and get what is on the stack: - GenServer.call(:sup_stack, :pop) + GenServer.call(MyStack, :pop) #=> :hello - GenServer.cast(:sup_stack, {:push, :world}) + GenServer.cast(MyStack, {:push, :world}) #=> :ok - GenServer.call(:sup_stack, :pop) + GenServer.call(MyStack, :pop) #=> :world However, there is a bug in our stack server. If we call `:pop` and - the stack is empty, it is going to crash because no clause matches. - Let's try it: + the stack is empty, it is going to crash because no clause matches: - GenServer.call(:sup_stack, :pop) - =ERROR REPORT==== + GenServer.call(MyStack, :pop) + ** (exit) exited in: GenServer.call(MyStack, :pop, 5000) Luckily, since the server is being supervised by a supervisor, the - supervisor will automatically start a new one, with the default stack - of `[:hello]` like before: + supervisor will automatically start a new one, with the initial stack + of `[:hello]`: - GenServer.call(:sup_stack, :pop) == :hello + GenServer.call(MyStack, :pop) + #=> :hello Supervisors support different strategies; in the example above, we have chosen `:one_for_one`. Furthermore, each supervisor can have many workers and supervisors as children, each of them with their specific configuration, shutdown values, and restart strategies. - Continue reading this moduledoc to learn more about supervision strategies - and then follow to the `Supervisor.Spec` module documentation to learn - about the specification for workers and supervisors. + The rest of this documentation will cover supervision strategies; also read + the documentation for the `Supervisor.Spec` module to learn about the + specification for workers and supervisors. ## Module-based supervisors - In the example above, a supervisor was dynamically created by passing - the supervision structure to `start_link/2`. However, supervisors - can also be created by explicitly defining a supervision module: + In the example above, a supervisor was started by passing the supervision + structure to `start_link/2`. However, supervisors can also be created by + explicitly defining a supervision module: defmodule MyApp.Supervisor do + # Automatically imports Supervisor.Spec use Supervisor def start_link do @@ -100,24 +107,28 @@ defmodule Supervisor do worker(Stack, [[:hello]]) ] + # supervise/2 is imported from Supervisor.Spec supervise(children, strategy: :one_for_one) end end You may want to use a module-based supervisor if: - * You need to do some particular action on supervisor - initialization, like setting up a ETS table. + * You need to perform some particular action on supervisor + initialization, like setting up an ETS table. * You want to perform partial hot-code swapping of the - tree. For example, if you add or remove a children, + tree. For example, if you add or remove children, the module-based supervision will add and remove the - new children directly, while the dynamic supervision + new children directly, while dynamic supervision requires the whole tree to be restarted in order to perform such swaps. ## Strategies + Supervisors support different supervision strategies (through the `:strategy` + option, as seen above): + * `:one_for_one` - if a child process terminates, only that process is restarted. @@ -126,30 +137,112 @@ defmodule Supervisor do the terminated one) are restarted. * `:rest_for_one` - if a child process terminates, the "rest" of - the child processes, i.e. the child processes after the terminated + the child processes, i.e., the child processes after the terminated one in start order, are terminated. Then the terminated child process and the rest of the child processes are restarted. * `:simple_one_for_one` - similar to `:one_for_one` but suits better when dynamically attaching children. This strategy requires the - supervisor specification to contain only one children. Many functions + supervisor specification to contain only one child. Many functions in this module behave slightly differently when this strategy is used. - ## Name Registration + ## Simple one for one + + The `:simple_one_for_one` supervisor is useful when you want to dynamically + start and stop supervised children. For example, imagine you want to + dynamically create multiple stacks. We can do so by defining a `:simple_one_for_one` + supervisor: + + # Import helpers for defining supervisors + import Supervisor.Spec + + # This time, we don't pass any argument because + # the argument will be given when we start the child + children = [ + worker(Stack, [], restart: :transient) + ] + + # Start the supervisor with our one child as a template + {:ok, sup_pid} = Supervisor.start_link(children, strategy: :simple_one_for_one) + + # No child worker is active yet until start_child is called + Supervisor.count_children(sup_pid) + #=> %{active: 0, specs: 1, supervisors: 0, workers: 0} + + There are a couple differences here: + + * the simple one for one specification can define only one child which + works as a template for when we call `start_child/2` + + * we have defined the child to have a restart strategy of `:transient`. This + means that, if the child process exits due to a `:normal`, `:shutdown`, + or `{:shutdown, term}` reason, it won't be restarted. This is useful + as it allows our workers to politely shutdown and be removed from the + `:simple_one_for_one` supervisor, without being restarted. You can find + more information about restart strategies in the documentation for the + `Supervisor.Spec` module + + With the supervisor defined, let's dynamically start stacks: + + {:ok, pid} = Supervisor.start_child(sup_pid, [[:hello, :world], []]) + GenServer.call(pid, :pop) #=> :hello + GenServer.call(pid, :pop) #=> :world + + {:ok, pid} = Supervisor.start_child(sup_pid, [[:something, :else], []]) + GenServer.call(pid, :pop) #=> :something + GenServer.call(pid, :pop) #=> :else + + Supervisor.count_children(sup_pid) + #=> %{active: 2, specs: 1, supervisors: 0, workers: 2} + + ## Exit reasons + + From the example above, you may have noticed that the `:transient` restart + strategy for the worker does not restart the child in case it exits with + reason `:normal`, `:shutdown` or `{:shutdown, term}`. + + So one may ask: which exit reason should I choose when exiting my worker? + There are three options: + + * `:normal` - in such cases, the exit won't be logged, there is no restart + in transient mode, and linked processes do not exit + + * `:shutdown` or `{:shutdown, term}` - in such cases, the exit won't be + logged, there is no restart in transient mode, and linked processes exit + with the same reason unless they're trapping exits + + * any other term - in such cases, the exit will be logged, there are + restarts in transient mode, and linked processes exit with the same reason + unless they're trapping exits + + ## Name registration A supervisor is bound to the same name registration rules as a `GenServer`. - Read more about it in the `GenServer` docs. + Read more about these rules in the documentation for `GenServer`. + """ @doc false defmacro __using__(_) do quote location: :keep do - @behaviour :supervisor + @behaviour Supervisor import Supervisor.Spec + + @doc false + def init(arg) end end + @doc """ + Callback invoked to start the supervisor and during hot code upgrades. + """ + # TODO: Support {:ok, [child_spec], Keyword.t} + # TODO: Document options here and update Supervisor.Spec + @callback init(args :: term) :: + {:ok, {:supervisor.sup_flags, [Supervisor.Spec.spec]}} | + :ignore + @typedoc "Return values of `start_link` functions" @type on_start :: {:ok, pid} | :ignore | {:error, {:already_started, pid} | {:shutdown, term} | term} @@ -163,11 +256,13 @@ defmodule Supervisor do @typedoc "The Supervisor name" @type name :: atom | {:global, term} | {:via, module, term} + @typedoc "Option values used by the `start*` functions" + @type option :: {:name, name} | {:strategy, Supervisor.Spec.strategy} | + {:max_restarts, non_neg_integer} | + {:max_seconds, pos_integer} + @typedoc "Options used by the `start*` functions" - @type options :: [name: name, - strategy: Supervisor.Spec.strategy, - max_restarts: non_neg_integer, - max_seconds: non_neg_integer] + @type options :: [option, ...] @typedoc "The supervisor reference" @type supervisor :: pid | name | {atom, node} @@ -175,138 +270,159 @@ defmodule Supervisor do @doc """ Starts a supervisor with the given children. - A strategy is required to be given as an option. Furthermore, - the `:max_restarts` and `:max_seconds` value can be configured - as described in `Supervisor.Spec.supervise/2` docs. + A strategy is required to be provided through the `:strategy` option. + Furthermore, the `:max_restarts` and `:max_seconds` options can be + configured as described in the documentation for `Supervisor.Spec.supervise/2`. The options can also be used to register a supervisor name. - the supported values are described under the `Name Registration` + The supported values are described under the "Name registration" section in the `GenServer` module docs. If the supervisor and its child processes are successfully created - (i.e. if the start function of all child processes returns `{:ok, child}`, - `{:ok, child, info}`, or `:ignore`) the function returns - `{:ok, pid}`, where `pid` is the pid of the supervisor. If there - already exists a process with the specified name, the function returns - `{:error, {:already_started, pid}}`, where pid is the pid of that - process. - - If any of the child process start functions fail or return an error tuple or - an erroneous value, the supervisor will first terminate all already - started child processes with reason `:shutdown` and then terminate - itself and return `{:error, {:shutdown, reason}}`. - - Note that the `Supervisor` is linked to the parent process - and will exit not only on crashes but also if the parent process - exits with `:normal` reason. + (i.e., if the start function of each child process returns `{:ok, child}`, + `{:ok, child, info}`, or `:ignore`) this function returns + `{:ok, pid}`, where `pid` is the PID of the supervisor. If a process with the + specified name already exists, the function returns `{:error, + {:already_started, pid}}`, where `pid` is the PID of that process. + + If the start function of any of the child processes fails or returns an error + tuple or an erroneous value, the supervisor first terminates with reason + `:shutdown` all the child processes that have already been started, and then + terminates itself and returns `{:error, {:shutdown, reason}}`. + + Note that a supervisor started with this function is linked to the parent + process and exits not only on crashes but also if the parent process exits + with `:normal` reason. """ - @spec start_link([tuple], options) :: on_start + @spec start_link([Supervisor.Spec.spec], options) :: on_start def start_link(children, options) when is_list(children) do - spec = Supervisor.Spec.supervise(children, options) - start_link(Supervisor.Default, spec, options) + sup_keys = [:strategy, :max_seconds, :max_restarts] + {sup_opts, start_opts} = Keyword.split(options, sup_keys) + spec = Supervisor.Spec.supervise(children, sup_opts) + start_link(Supervisor.Default, spec, start_opts) end @doc """ - Starts a supervisor module with the given `arg`. + Starts a supervisor process with the given `module` and `arg`. - To start the supervisor, the `init/1` callback will be invoked - in the given module. The `init/1` callback must return a - supervision specification which can be created with the help - of `Supervisor.Spec` module. + To start the supervisor, the `c:init/1` callback will be invoked in the given + `module`, with `arg` as its argument. The `c:init/1` callback must return a + supervisor specification which can be created with the help of the functions + in the `Supervisor.Spec` module (especially `Supervisor.Spec.supervise/2`). - If the `init/1` callback returns `:ignore`, this function returns + If the `c:init/1` callback returns `:ignore`, this function returns `:ignore` as well and the supervisor terminates with reason `:normal`. If it fails or returns an incorrect value, this function returns `{:error, term}` where `term` is a term with information about the error, and the supervisor terminates with reason `term`. The `:name` option can also be given in order to register a supervisor - name, the supported values are described under the `Name Registration` + name, the supported values are described in the "Name registration" section in the `GenServer` module docs. - - Other failure conditions are specified in `start_link/2` docs. """ - @spec start_link(module, term, options) :: on_start + @spec start_link(module, term) :: on_start + @spec start_link(module, term, GenServer.options) :: on_start def start_link(module, arg, options \\ []) when is_list(options) do case Keyword.get(options, :name) do nil -> :supervisor.start_link(module, arg) atom when is_atom(atom) -> :supervisor.start_link({:local, atom}, module, arg) - other when is_tuple(other) -> - :supervisor.start_link(other, module, arg) + {:global, _term} = tuple -> + :supervisor.start_link(tuple, module, arg) + {:via, via_module, _term} = tuple when is_atom(via_module) -> + :supervisor.start_link(tuple, module, arg) + other -> + raise ArgumentError, """ + expected :name option to be one of: + + * nil + * atom + * {:global, term} + * {:via, module, term} + + Got: #{inspect(other)} + """ end end @doc """ - Dynamically adds and starts a child specification to the supervisor. + Dynamically adds a child specification to `supervisor` and starts that child. `child_spec` should be a valid child specification (unless the supervisor is a `:simple_one_for_one` supervisor, see below). The child process will be started as defined in the child specification. In the case of `:simple_one_for_one`, the child specification defined in - the supervisor will be used and instead of a `child_spec`, an arbitrary list + the supervisor is used and instead of a `child_spec`, an arbitrary list of terms is expected. The child process will then be started by appending the given list to the existing function arguments in the child specification. - If there already exists a child specification with the specified id, - `child_spec` is discarded and the function returns an error with `:already_started` - or `:already_present` if the corresponding child process is running or not. + If a child specification with the specified id already exists, `child_spec` is + discarded and this function returns an error with `:already_started` or + `:already_present` if the corresponding child process is running or not, + respectively. - If the child process start function returns `{:ok, child}` or `{:ok, child, info}`, - the child specification and pid is added to the supervisor and the function returns - the same value. + If the child process start function returns `{:ok, child}` or `{:ok, child, + info}`, then child specification and PID are added to the supervisor and + this function returns the same value. - If the child process start function returns `:ignore, the child specification is - added to the supervisor, the pid is set to undefined and the function returns - `{:ok, :undefined}`. + If the child process start function returns `:ignore`, the child specification + is added to the supervisor, the PID is set to `:undefined` and this function + returns `{:ok, :undefined}`. - If the child process start function returns an error tuple or an erroneous value, - or if it fails, the child specification is discarded and the function returns - `{:error, error}` where `error` is a term containing information about the error - and child specification. + If the child process start function returns an error tuple or an erroneous + value, or if it fails, the child specification is discarded and this function + returns `{:error, error}` where `error` is a term containing information about + the error and child specification. """ @spec start_child(supervisor, Supervisor.Spec.spec | [term]) :: on_start_child - defdelegate start_child(supervisor, child_spec_or_args), to: :supervisor + def start_child(supervisor, child_spec_or_args) do + call(supervisor, {:start_child, child_spec_or_args}) + end @doc """ - Terminates the given pid or child id. + Terminates the given children, identified by PID or child id. - If the supervisor is not a `simple_one_for_one`, the child id is expected - and the process, if there is one, is terminated; the child specification is + If the supervisor is not a `:simple_one_for_one`, the child id is expected + and the process, if there's one, is terminated; the child specification is kept unless the child is temporary. - In case of a `simple_one_for_one` supervisor, a pid is expected. If the child - specification identifier is given instead of a `pid`, the function will - return `{:error, :simple_one_for_one}`. + In case of a `:simple_one_for_one` supervisor, a PID is expected. If the child + specification identifier is given instead of a `pid`, this function returns + `{:error, :simple_one_for_one}`. A non-temporary child process may later be restarted by the supervisor. The child process can also be restarted explicitly by calling `restart_child/2`. Use `delete_child/2` to remove the child specification. - If successful, the function returns `:ok`. If there is no child specification or - pid, the function returns `{:error, :not_found}`. + If successful, this function returns `:ok`. If there is no child specification + for the given child id or there is no process with the given PID, this + function returns `{:error, :not_found}`. """ @spec terminate_child(supervisor, pid | Supervisor.Spec.child_id) :: :ok | {:error, error} when error: :not_found | :simple_one_for_one - defdelegate terminate_child(supervisor, pid_or_child_id), to: :supervisor + def terminate_child(supervisor, pid_or_child_id) do + call(supervisor, {:terminate_child, pid_or_child_id}) + end @doc """ Deletes the child specification identified by `child_id`. - The corresponding child process must not be running, use `terminate_child/2` - to terminate it. + The corresponding child process must not be running; use `terminate_child/2` + to terminate it if it's running. - If successful, the function returns `:ok`. This function may error with an - appropriate error tuple if the `child_id` is not found, or if the current - process is running or being restarted. + If successful, this function returns `:ok`. This function may return an error + with an appropriate error tuple if the `child_id` is not found, or if the + current process is running or being restarted. - This operation is not supported by `simple_one_for_one` supervisors. + This operation is not supported by `:simple_one_for_one` supervisors. """ @spec delete_child(supervisor, Supervisor.Spec.child_id) :: :ok | {:error, error} when error: :not_found | :simple_one_for_one | :running | :restarting - defdelegate delete_child(supervisor, child_id), to: :supervisor + def delete_child(supervisor, child_id) do + call(supervisor, {:delete_child, child_id}) + end @doc """ Restarts a child process identified by `child_id`. @@ -317,54 +433,59 @@ defmodule Supervisor do Note that for temporary children, the child specification is automatically deleted when the child terminates, and thus it is not possible to restart such children. - If the child process start function returns `{:ok, child}` or - `{:ok, child, info}`, the pid is added to the supervisor and the function returns - the same value. + If the child process start function returns `{:ok, child}` or `{:ok, child, info}`, + the PID is added to the supervisor and this function returns the same value. - If the child process start function returns `:ignore`, the pid remains set to - `:undefined` and the function returns `{:ok, :undefined}`. + If the child process start function returns `:ignore`, the PID remains set to + `:undefined` and this function returns `{:ok, :undefined}`. - This function may error with an appropriate error tuple if the `child_id` is not - found, or if the current process is running or being restarted. + This function may return an error with an appropriate error tuple if the + `child_id` is not found, or if the current process is running or being + restarted. If the child process start function returns an error tuple or an erroneous value, - or if it fails, the function returns `{:error, error}`. + or if it fails, this function returns `{:error, error}`. - This operation is not supported by `simple_one_for_one` supervisors. + This operation is not supported by `:simple_one_for_one` supervisors. """ @spec restart_child(supervisor, Supervisor.Spec.child_id) :: {:ok, child} | {:ok, child, term} | {:error, error} when error: :not_found | :simple_one_for_one | :running | :restarting | term - defdelegate restart_child(supervisor, child_id), to: :supervisor + def restart_child(supervisor, child_id) do + call(supervisor, {:restart_child, child_id}) + end @doc """ - Returns a list with information about all children. + Returns a list with information about all children of the given supervisor. Note that calling this function when supervising a large number of children under low memory conditions can cause an out of memory exception. - This function returns a list of tuples containing: + This function returns a list of `{id, child, type, modules}` tuples, where: * `id` - as defined in the child specification or `:undefined` in the case of a `simple_one_for_one` supervisor - * `child` - the pid of the corresponding child process, the atom - `:restarting` if the process is about to be restarted, or `:undefined` if - there is no such process + * `child` - the PID of the corresponding child process, `:restarting` if the + process is about to be restarted, or `:undefined` if there is no such + process + + * `type` - `:worker` or `:supervisor`, as specified by the child specification - * `type` - `:worker` or `:supervisor` as defined in the child specification + * `modules` - as specified by the child specification - * `modules` – as defined in the child specification """ @spec which_children(supervisor) :: [{Supervisor.Spec.child_id | :undefined, child | :restarting, Supervisor.Spec.worker, Supervisor.Spec.modules}] - defdelegate which_children(supervisor), to: :supervisor + def which_children(supervisor) do + call(supervisor, :which_children) + end @doc """ - Returns a map containing count values for the supervisor. + Returns a map containing count values for the given supervisor. The map contains the following keys: @@ -373,17 +494,38 @@ defmodule Supervisor do * `:active` - the count of all actively running child processes managed by this supervisor - * `:supervisors` - the count of all supervisors whether or not the child - process is still alive + * `:supervisors` - the count of all supervisors whether or not these + child supervisors are still alive - * `:workers` - the count of all workers, whether or not the child process - is still alive + * `:workers` - the count of all workers, whether or not these child workers + are still alive """ @spec count_children(supervisor) :: - [specs: non_neg_integer, active: non_neg_integer, - supervisors: non_neg_integer, workers: non_neg_integer] + %{specs: non_neg_integer, active: non_neg_integer, + supervisors: non_neg_integer, workers: non_neg_integer} def count_children(supervisor) do - :supervisor.count_children(supervisor) |> :maps.from_list + call(supervisor, :count_children) |> :maps.from_list + end + + @doc """ + Synchronously stops the given supervisor with the given `reason`. + + It returns `:ok` if the supervisor terminates with the given + reason. If it terminates with another reason, the call exits. + + This function keeps OTP semantics regarding error reporting. + If the reason is any other than `:normal`, `:shutdown` or + `{:shutdown, _}`, an error report is logged. + """ + @spec stop(supervisor, reason :: term, timeout) :: :ok + def stop(supervisor, reason \\ :normal, timeout \\ :infinity) do + :gen.stop(supervisor, reason, timeout) + end + + @compile {:inline, call: 2} + + defp call(supervisor, req) do + GenServer.call(supervisor, req, :infinity) end end diff --git a/lib/elixir/lib/supervisor/default.ex b/lib/elixir/lib/supervisor/default.ex index 863bce65907..6e6903db63b 100644 --- a/lib/elixir/lib/supervisor/default.ex +++ b/lib/elixir/lib/supervisor/default.ex @@ -1,13 +1,13 @@ defmodule Supervisor.Default do @moduledoc false - @behaviour :supervisor @doc """ - Supevisor callback that simply returns the given args. + Supervisor callback that simply returns the given args. - This is the supervisor used by `Supervisor.start_link/2`. + This is the supervisor used by `Supervisor.start_link/2` + and others. """ def init(args) do args end -end \ No newline at end of file +end diff --git a/lib/elixir/lib/supervisor/spec.ex b/lib/elixir/lib/supervisor/spec.ex index df9f2625b22..cf57392f2c6 100644 --- a/lib/elixir/lib/supervisor/spec.ex +++ b/lib/elixir/lib/supervisor/spec.ex @@ -1,11 +1,11 @@ defmodule Supervisor.Spec do @moduledoc """ - Convenience functions for defining a supervision specification. + Convenience functions for defining supervisor specifications. ## Example - By using the functions in this module one can define a supervisor - and start it with `Supervisor.start_link/2`: + By using the functions in this module one can specify the children + to be used under a supervisor, started with `Supervisor.start_link/2`: import Supervisor.Spec @@ -16,7 +16,7 @@ defmodule Supervisor.Spec do Supervisor.start_link(children, strategy: :one_for_one) - In many situations, it may be handy to define supervisors backed + Sometimes, it may be handy to define supervisors backed by a module: defmodule MySupervisor do @@ -37,42 +37,35 @@ defmodule Supervisor.Spec do Notice in this case we don't have to explicitly import `Supervisor.Spec` as `use Supervisor` automatically does so. - - Explicit supervisors as above are required when there is a need to: - - 1. Partialy change the supervision tree during hot-code swaps. - - 2. Define supervisors inside other supervisors. - - 3. Perform actions inside the supervision `init/1` callback. - - For example, you may want to start an ETS table that is linked to - the supervisor (i.e. if the supervision tree needs to be restarted, - the ETS table must be restarted too). + Defining a module-based supervisor can be useful, for example, + to perform initialization tasks in the `c:init/1` callback. ## Supervisor and worker options - In the example above, we have defined workers and supervisors - and each accepts the following options: + In the example above, we defined specs for workers and supervisors. + These specs (both for workers as well as supervisors) accept the + following options: * `:id` - a name used to identify the child specification internally by the supervisor; defaults to the given module - name + name for the child worker/supervisor * `:function` - the function to invoke on the child to start it - * `:restart` - defines when the child process should restart + * `:restart` - an atom that defines when a terminated child process should + be restarted (see the "Restart values" section below) - * `:shutdown` - defines how a child process should be terminated + * `:shutdown` - an atom that defines how a child process should be + terminated (see the "Shutdown values" section below) * `:modules` - it should be a list with one element `[module]`, where module is the name of the callback module only if the child process is a `Supervisor` or `GenServer`; if the child - process is a `GenEvent`, modules should be `:dynamic` + process is a `GenEvent`, `:modules` should be `:dynamic` - ### Restart values + ### Restart values (:restart) - The following restart values are supported: + The following restart values are supported in the `:restart` option: * `:permanent` - the child process is always restarted @@ -80,27 +73,35 @@ defmodule Supervisor.Spec do when the supervisor's strategy is `:rest_for_one` or `:one_for_all`) * `:transient` - the child process is restarted only if it - terminates abnormally, i.e. with another exit reason than + terminates abnormally, i.e., with an exit reason other than `:normal`, `:shutdown` or `{:shutdown, term}` - ### Shutdown values + Notice that supervisor that reached maximum restart intensity will exit with `:shutdown` reason. + In this case the supervisor will only be restarted if its child specification was defined with + the `:restart` option is set to `:permanent` (the default). + + ### Shutdown values (:shutdown) - The following shutdown values are supported: + The following shutdown values are supported in the `:shutdown` option: * `:brutal_kill` - the child process is unconditionally terminated - using `exit(child, :kill)`. + using `Process.exit(child, :kill)` - * `:infinity` - if the child process is a supervisor, it is a mechanism - to give the subtree enough time to shutdown. It can also be used with - workers with care. + * `:infinity` - if the child process is a supervisor, this is a mechanism + to give the subtree enough time to shutdown; it can also be used with + workers with care + + * any integer - the value of `:shutdown` can also be any integer meaning + that the supervisor tells the child process to terminate by calling + `Process.exit(child, :shutdown)` and then waits for an exit signal back. + If no exit signal is received within the specified time (the value of this + option, in milliseconds), the child process is unconditionally terminated + using `Process.exit(child, :kill)` - * Finally, it can also be any integer meaning that the supervisor tells - the child process to terminate by calling `Process.exit(child, :shutdown)` - and then waits for an exit signal back. If no exit signal is received - within the specified time (in miliseconds), the child process is - unconditionally terminated using `Process.exit(child, :kill)`. """ + # TODO: Update and provide a digest of strategies once we include DynamicSupervisor. + @typedoc "Supported strategies" @type strategy :: :simple_one_for_one | :one_for_one | :one_for_all | :rest_for_one @@ -108,7 +109,7 @@ defmodule Supervisor.Spec do @type restart :: :permanent | :transient | :temporary @typedoc "Supported shutdown values" - @type shutdown :: :brutal_kill | :infinity | non_neg_integer + @type shutdown :: timeout | :brutal_kill @typedoc "Supported worker values" @type worker :: :worker | :supervisor @@ -131,11 +132,13 @@ defmodule Supervisor.Spec do Receives a list of children (workers or supervisors) to supervise and a set of options. - Returns a tuple containing the supervisor specification. + Returns a tuple containing the supervisor specification. This tuple can be + used as the return value of the `c:init/1` callback when implementing a + module-based supervisor. ## Examples - supervise children, strategy: :one_for_one + supervise(children, strategy: :one_for_one) ## Options @@ -145,31 +148,33 @@ defmodule Supervisor.Spec do in the `Supervisor` module docs. * `:max_restarts` - the maximum amount of restarts allowed in - a time frame. Defaults to 5. + a time frame. Defaults to `3`. * `:max_seconds` - the time frame in which `:max_restarts` applies. - Defaults to 5. + Defaults to `5`. - The `:strategy` option is required and by default maximum 5 restarts - are allowed within 5 seconds. Please check the `Supervisor` module for - a complete description of the available strategies. + The `:strategy` option is required and by default a maximum of 3 restarts is + allowed within 5 seconds. Check the `Supervisor` module for a detailed + description of the available strategies. """ @spec supervise([spec], strategy: strategy, max_restarts: non_neg_integer, - max_seconds: non_neg_integer) :: {:ok, tuple} + max_seconds: pos_integer) :: {:ok, tuple} + # TODO: Make it return a tuple of format {:ok, children, opts} + # TODO: Deprecate once the new tuple format has been established def supervise(children, options) do unless strategy = options[:strategy] do raise ArgumentError, "expected :strategy option to be given" end - maxR = Keyword.get(options, :max_restarts, 5) + maxR = Keyword.get(options, :max_restarts, 3) maxS = Keyword.get(options, :max_seconds, 5) assert_unique_ids(Enum.map(children, &elem(&1, 0))) {:ok, {{strategy, maxR, maxS}, children}} end - defp assert_unique_ids([id|rest]) do + defp assert_unique_ids([id | rest]) do if id in rest do raise ArgumentError, "duplicated id #{inspect id} found in the supervisor specification, " <> @@ -187,7 +192,7 @@ defmodule Supervisor.Spec do Defines the given `module` as a worker which will be started with the given arguments. - worker ExUnit.Runner, [], restart: :permanent + worker(ExUnit.Runner, [], restart: :permanent) By default, the function `start_link` is invoked on the given module. Overall, the default values for the options are: @@ -198,8 +203,8 @@ defmodule Supervisor.Spec do shutdown: 5000, modules: [module]] - Check `Supervisor.Spec` module docs for more information on - the options. + Check the documentation for the `Supervisor.Spec` module for more + information on the options. """ @spec worker(module, [term], [restart: restart, shutdown: shutdown, id: term, function: atom, modules: modules]) :: spec @@ -211,7 +216,7 @@ defmodule Supervisor.Spec do Defines the given `module` as a supervisor which will be started with the given arguments. - supervisor ExUnit.Runner, [], restart: :permanent + supervisor(ExUnit.Runner, [], restart: :permanent) By default, the function `start_link` is invoked on the given module. Overall, the default values for the options are: @@ -222,8 +227,8 @@ defmodule Supervisor.Spec do shutdown: :infinity, modules: [module]] - Check `Supervisor.Spec` module docs for more information on - the options. + Check the documentation for the `Supervisor.Spec` module for more + information on the options. """ @spec supervisor(module, [term], [restart: restart, shutdown: shutdown, id: term, function: atom, modules: modules]) :: spec @@ -232,6 +237,7 @@ defmodule Supervisor.Spec do child(:supervisor, module, args, options) end + # TODO: Do and expose proper child validation defp child(type, module, args, options) do id = Keyword.get(options, :id, module) modules = Keyword.get(options, :modules, modules(module)) @@ -243,6 +249,7 @@ defmodule Supervisor.Spec do restart, shutdown, type, modules} end + # TODO: Remove GenEvent when there is no more GenEvent v2.0 defp modules(GenEvent), do: :dynamic defp modules(module), do: [module] end diff --git a/lib/elixir/lib/system.ex b/lib/elixir/lib/system.ex index 6b35d7abf32..d42abb18149 100644 --- a/lib/elixir/lib/system.ex +++ b/lib/elixir/lib/system.ex @@ -1,89 +1,208 @@ defmodule System do @moduledoc """ - The System module provides access to variables used or - maintained by the VM and to functions that interact directly + The `System` module provides functions that interact directly with the VM or the host system. + + ## Time + + The `System` module also provides functions that work with time, + returning different times kept by the system with support for + different time units. + + One of the complexities in relying on system times is that they + may be adjusted. For example, when you enter and leave daylight + saving time, the system clock will be adjusted, often adding + or removing one hour. We call such changes "time warps". In + order to understand how such changes may be harmful, imagine + the following code: + + ## DO NOT DO THIS + prev = System.os_time() + # ... execute some code ... + next = System.os_time() + diff = next - prev + + If, while the code is executing, the system clock changes, + some code that executed in 1 second may be reported as taking + over 1 hour! To address such concerns, the VM provides a + monotonic time via `System.monotonic_time/0` which never + decreases and does not leap: + + ## DO THIS + prev = System.monotonic_time() + # ... execute some code ... + next = System.monotonic_time() + diff = next - prev + + Generally speaking, the VM provides three time measurements: + + * `os_time/0` - the time reported by the OS. This time may be + adjusted forwards or backwards in time with no limitation; + + * `system_time/0` - the VM view of the `os_time/0`. The system time and OS + time may not match in case of time warps although the VM works towards + aligning them. This time is not monotonic (i.e., it may decrease) + as its behaviour is configured [by the VM time warp + mode](http://www.erlang.org/doc/apps/erts/time_correction.html#Time_Warp_Modes); + + * `monotonic_time/0` - a monotonically increasing time provided + by the Erlang VM. + + The time functions in this module work in the `:native` unit + (unless specified otherwise), which is OS dependent. Most of + the time, all calculations are done in the `:native` unit, to + avoid loss of precision, with `convert_time_unit/3` being + invoked at the end to convert to a specific time unit like + `:millisecond` or `:microsecond`. See the `t:time_unit/0` type for + more information. + + For a more complete rundown on the VM support for different + times, see the [chapter on time and time + correction](http://www.erlang.org/doc/apps/erts/time_correction.html) + in the Erlang docs. """ - defp strip_re(iodata, pattern) do - :re.replace(iodata, pattern, "", [return: :binary]) + @typedoc """ + The time unit to be passed to functions like `monotonic_time/1` and others. + + The `:second`, `:millisecond`, `:microsecond` and `:nanosecond` time + units controls the return value of the functions that accept a time unit. + + A time unit can also be a strictly positive integer. In this case, it + represents the "parts per second": the time will be returned in `1 / + parts_per_second` seconds. For example, using the `:millisecond` time unit + is equivalent to using `1000` as the time unit (as the time will be returned + in 1/1000 seconds - milliseconds). + + Keep in mind the Erlang API prior to version 19.1 will use `:milli_seconds`, + `:micro_seconds` and `:nano_seconds` as time units although Elixir normalizes + their spelling to match the SI convention. + """ + @type time_unit :: + :second + | :millisecond + | :microsecond + | :nanosecond + | pos_integer + # TODO: Deprecate these in Elixir 2.0 + | :seconds + | :milliseconds + | :microseconds + | :nanoseconds + + @base_dir :filename.join(__DIR__, "../../..") + @version_file :filename.join(@base_dir, "VERSION") + + defp strip(iodata) do + :re.replace(iodata, "^[\s\r\n\t]+|[\s\r\n\t]+$", "", [:global, return: :binary]) end defp read_stripped(path) do case :file.read_file(path) do {:ok, binary} -> - strip_re(binary, "^\s+|\s+$") - _ -> "" + strip(binary) + _ -> + "" end end - # Read and strip the version from the `VERSION` file. + # Read and strip the version from the VERSION file. defmacrop get_version do - case read_stripped(:filename.join(__DIR__, "../../../VERSION")) do + case read_stripped(@version_file) do "" -> raise RuntimeError, message: "could not read the version number from VERSION" data -> data end end - # Tries to run `git describe --always --tags`. In the case of success returns - # the most recent tag. If that is not available, tries to read the commit hash - # from .git/HEAD. If that fails, returns an empty string. - defmacrop get_describe do - dirpath = :filename.join(__DIR__, "../../../.git") - case :file.read_file_info(dirpath) do - {:ok, _} -> - if :os.find_executable('git') do - data = :os.cmd('git describe --always --tags') - strip_re(data, "\n") - else - read_stripped(:filename.join(".git", "HEAD")) - end - _ -> "" - end + # Tries to run "git rev-parse --short HEAD". In the case of success returns + # the short revision hash. If that fails, returns an empty string. + defmacrop get_revision do + null = + case :os.type do + {:win32, _} -> 'NUL' + _ -> '/dev/null' + end + + 'git rev-parse --short HEAD 2> ' + |> Kernel.++(null) + |> :os.cmd() + |> strip end + defp revision, do: get_revision() + # Get the date at compilation time. defmacrop get_date do IO.iodata_to_binary :httpd_util.rfc1123_date end + @doc """ + Returns the endianness. + """ + def endianness do + :erlang.system_info(:endian) + end + + @doc """ + Returns the endianness the system was compiled with. + """ + @endianness :erlang.system_info(:endian) + def compiled_endianness do + @endianness + end + @doc """ Elixir version information. Returns Elixir's version as binary. """ @spec version() :: String.t - def version, do: get_version + def version, do: get_version() @doc """ Elixir build information. - Returns a keyword list with Elixir version, git tag info and compilation date. + Returns a keyword list with Elixir version, Git short revision hash and compilation date. """ @spec build_info() :: map def build_info do - %{version: version, tag: get_describe, date: get_date} + %{build: build(), + date: get_date(), + revision: revision(), + version: version()} + end + + # Returns a string of the build info + defp build do + {:ok, v} = Version.parse(version()) + + cond do + ([] == v.pre) or ("" == revision()) -> + version() + true -> + "#{version()} (#{revision()})" + end end @doc """ - List command line arguments. + Lists command line arguments. Returns the list of command line arguments passed to the program. """ @spec argv() :: [String.t] def argv do - :elixir_code_server.call :argv + :elixir_config.get(:argv) end @doc """ - Modify command line arguments. + Modifies command line arguments. Changes the list of command line arguments. Use it with caution, as it destroys any previous argv information. """ @spec argv([String.t]) :: :ok def argv(args) do - :elixir_code_server.cast({:argv, args}) + :elixir_config.put(:argv, args) end @doc """ @@ -94,18 +213,27 @@ defmodule System do """ def cwd do case :file.get_cwd do - {:ok, base} -> IO.chardata_to_string(base) + {:ok, base} -> IO.chardata_to_string(fix_drive_letter(base)) _ -> nil end end + defp fix_drive_letter([l, ?:, ?/ | rest] = original) when l in ?A..?Z do + case :os.type() do + {:win32, _} -> [l + ?a - ?A, ?:, ?/ | rest] + _ -> original + end + end + + defp fix_drive_letter(original), do: original + @doc """ Current working directory, exception on error. Returns the current working directory or raises `RuntimeError`. """ def cwd! do - cwd || + cwd() || raise RuntimeError, message: "could not get a current working directory, the current location is not accessible" end @@ -113,13 +241,9 @@ defmodule System do User home directory. Returns the user home directory (platform independent). - Returns `nil` if no user home is set. """ def user_home do - case :os.type() do - {:win32, _} -> get_windows_home - _ -> get_unix_home - end + :elixir_config.get(:home) end @doc """ @@ -129,24 +253,10 @@ defmodule System do instead of returning `nil` if no user home is set. """ def user_home! do - user_home || + user_home() || raise RuntimeError, message: "could not find the user home, please set the HOME environment variable" end - defp get_unix_home do - get_env("HOME") - end - - defp get_windows_home do - :filename.absname( - get_env("USERPROFILE") || ( - hd = get_env("HOMEDRIVE") - hp = get_env("HOMEPATH") - hd && hp && hd <> hp - ) - ) - end - @doc ~S""" Writable temporary directory. @@ -176,7 +286,7 @@ defmodule System do instead of returning `nil` if no temp dir is set. """ def tmp_dir! do - tmp_dir || + tmp_dir() || raise RuntimeError, message: "could not get a writable temporary directory, " <> "please set the TMPDIR environment variable" end @@ -202,73 +312,46 @@ defmodule System do end @doc """ - Register a program exit handler function. - - Registers a function that will be invoked - at the end of program execution. Useful for - invoking a hook in "script" mode. + Registers a program exit handler function. - The function must receive the exit status code - as an argument. - """ - def at_exit(fun) when is_function(fun, 1) do - :elixir_code_server.cast {:at_exit, fun} - end + Registers a function that will be invoked at the end of program execution. + Useful for invoking a hook in "script" mode. - @doc """ - Execute a system command. + The handler always executes in a different process from the one it was + registered in. As a consequence, any resources managed by the calling process + (ETS tables, open files, etc.) won't be available by the time the handler + function is invoked. - Executes `command` in a command shell of the target OS, - captures the standard output of the command and returns - the result as a binary. - - If `command` is a char list, a char list is returned. - Otherwise a string, correctly encoded in UTF-8, is expected. + The function must receive the exit status code as an argument. """ - @spec cmd(String.t) :: String.t - @spec cmd(char_list) :: char_list - - def cmd(command) when is_list(command) do - :os.cmd(command) - end - - def cmd(command) when is_binary(command) do - List.to_string :os.cmd(String.to_char_list(command)) + def at_exit(fun) when is_function(fun, 1) do + :elixir_config.update :at_exit, &[fun | &1] end @doc """ - Locate an executable on the system. + Locates an executable on the system. This function looks up an executable program given its name using the environment variable PATH on Unix and Windows. It also considers the proper executable extension for each OS, so for Windows it will try to lookup files with `.com`, `.cmd` or similar extensions. - - If `program` is a char list, a char list is returned. - Returns a binary otherwise. """ @spec find_executable(binary) :: binary | nil - @spec find_executable(char_list) :: char_list | nil - - def find_executable(program) when is_list(program) do - :os.find_executable(program) || nil - end - def find_executable(program) when is_binary(program) do - case :os.find_executable(String.to_char_list(program)) do + case :os.find_executable(String.to_charlist(program)) do false -> nil other -> List.to_string(other) end end @doc """ - System environment variables. + Returns all system environment variables. - Returns a list of all environment variables. Each variable is given as a - `{name, value}` tuple where both `name` and `value` are strings. + The returned value is a map containing name-value pairs. + Variable names and their values are strings. """ - @spec get_env() :: %{String.t => String.t} + @spec get_env() :: %{optional(String.t) => String.t} def get_env do Enum.into(:os.getenv, %{}, fn var -> var = IO.chardata_to_string var @@ -278,15 +361,15 @@ defmodule System do end @doc """ - Environment variable value. + Returns the value of the given environment variable. - Returns the value of the environment variable - `varname` as a binary, or `nil` if the environment + The returned value of the environment variable + `varname` is a string, or `nil` if the environment variable is undefined. """ - @spec get_env(binary) :: binary | nil + @spec get_env(String.t) :: String.t | nil def get_env(varname) when is_binary(varname) do - case :os.getenv(String.to_char_list(varname)) do + case :os.getenv(String.to_charlist(varname)) do false -> nil other -> List.to_string(other) end @@ -298,31 +381,31 @@ defmodule System do Returns the process identifier of the current Erlang emulator in the format most commonly used by the operating system environment. - See http://www.erlang.org/doc/man/os.html#getpid-0 for more info. + For more information, see [`:os.getpid/0`](http://www.erlang.org/doc/man/os.html#getpid-0). """ @spec get_pid() :: binary def get_pid, do: IO.iodata_to_binary(:os.getpid) @doc """ - Set an environment variable value. + Sets an environment variable value. Sets a new `value` for the environment variable `varname`. """ @spec put_env(binary, binary) :: :ok def put_env(varname, value) when is_binary(varname) and is_binary(value) do - :os.putenv String.to_char_list(varname), String.to_char_list(value) + :os.putenv String.to_charlist(varname), String.to_charlist(value) :ok end @doc """ - Set multiple environment variables. + Sets multiple environment variables. Sets a new value for each environment variable corresponding to each key in `dict`. """ - @spec put_env(Dict.t) :: :ok - def put_env(dict) do - Enum.each dict, fn {key, val} -> put_env key, val end + @spec put_env(Enumerable.t) :: :ok + def put_env(enum) do + Enum.each enum, fn {key, val} -> put_env key, val end end @doc """ @@ -332,7 +415,7 @@ defmodule System do """ @spec delete_env(String.t) :: :ok def delete_env(varname) do - :os.unsetenv(String.to_char_list(varname)) + :os.unsetenv(String.to_charlist(varname)) :ok end @@ -350,10 +433,13 @@ defmodule System do end @doc """ - Halt the Erlang runtime system. + Immediately halts the Erlang runtime system. - Halts the Erlang runtime system where the argument `status` must be a - non-negative integer, the atom `:abort` or a binary. + Terminates the Erlang runtime system without properly shutting down + applications and ports. Please see `stop/1` for a careful shutdown of the + system. + + `status` must be a non-negative integer, the atom `:abort` or a binary. * If an integer, the runtime system exits with the integer value which is returned to the operating system. @@ -361,13 +447,13 @@ defmodule System do * If `:abort`, the runtime system aborts producing a core dump, if that is enabled in the operating system. - * If a string, an erlang crash dump is produced with status as slogan, + * If a string, an Erlang crash dump is produced with status as slogan, and then the runtime system exits with status code 1. Note that on many platforms, only the status codes 0-255 are supported by the operating system. - For more information, check: http://www.erlang.org/doc/man/erlang.html#halt-1 + For more information, see [`:erlang.halt/1`](http://www.erlang.org/doc/man/erlang.html#halt-1). ## Examples @@ -376,7 +462,6 @@ defmodule System do System.halt(:abort) """ - @spec halt() :: no_return @spec halt(non_neg_integer | binary | :abort) :: no_return def halt(status \\ 0) @@ -385,6 +470,405 @@ defmodule System do end def halt(status) when is_binary(status) do - :erlang.halt(String.to_char_list(status)) + :erlang.halt(String.to_charlist(status)) + end + + @doc """ + Carefully stops the Erlang runtime system. + + All applications are taken down smoothly, all code is unloaded, and all ports + are closed before the system terminates by calling `halt/1`. + + `status` must be a non-negative integer value which is returned by the + runtime system to the operating system. + + Note that on many platforms, only the status codes 0-255 are supported + by the operating system. + + For more information, see [`:init.stop/1`](http://erlang.org/doc/man/init.html#stop-1). + + ## Examples + + System.stop(0) + System.stop(1) + + """ + @spec stop(non_neg_integer | binary) :: no_return + def stop(status \\ 0) + + def stop(status) when is_integer(status) do + :init.stop(status) + end + + def stop(status) when is_binary(status) do + :init.stop(String.to_charlist(status)) + end + + @doc ~S""" + Executes the given `command` with `args`. + + `command` is expected to be an executable available in PATH + unless an absolute path is given. + + `args` must be a list of binaries which the executable will receive + as its arguments as is. This means that: + + * environment variables will not be interpolated + * wildcard expansion will not happen (unless `Path.wildcard/2` is used + explicitly) + * arguments do not need to be escaped or quoted for shell safety + + This function returns a tuple containing the collected result + and the command exit status. + + Internally, this function uses a `Port` for interacting with the + outside world. However, if you plan to run a long-running program, + ports guarantee stdin/stdout devices will be closed but it does not + automatically terminate the program. The documentation for the + `Port` module describes this problem and possible solutions under + the "Zombie processes" section. + + ## Examples + + iex> System.cmd "echo", ["hello"] + {"hello\n", 0} + + iex> System.cmd "echo", ["hello"], env: [{"MIX_ENV", "test"}] + {"hello\n", 0} + + iex> System.cmd "echo", ["hello"], into: IO.stream(:stdio, :line) + hello + {%IO.Stream{}, 0} + + ## Options + + * `:into` - injects the result into the given collectable, defaults to `""` + * `:cd` - the directory to run the command in + * `:env` - an enumerable of tuples containing environment key-value as binary + * `:arg0` - sets the command arg0 + * `:stderr_to_stdout` - redirects stderr to stdout when `true` + * `:parallelism` - when `true`, the VM will schedule port tasks to improve + parallelism in the system. If set to `false`, the VM will try to perform + commands immediately, improving latency at the expense of parallelism. + The default can be set on system startup by passing the "+spp" argument + to `--erl`. + + ## Error reasons + + If invalid arguments are given, `ArgumentError` is raised by + `System.cmd/3`. `System.cmd/3` also expects a strict set of + options and will raise if unknown or invalid options are given. + + Furthermore, `System.cmd/3` may fail with one of the POSIX reasons + detailed below: + + * `:system_limit` - all available ports in the Erlang emulator are in use + + * `:enomem` - there was not enough memory to create the port + + * `:eagain` - there are no more available operating system processes + + * `:enametoolong` - the external command given was too long + + * `:emfile` - there are no more available file descriptors + (for the operating system process that the Erlang emulator runs in) + + * `:enfile` - the file table is full (for the entire operating system) + + * `:eacces` - the command does not point to an executable file + + * `:enoent` - the command does not point to an existing file + + ## Shell commands + + If you desire to execute a trusted command inside a shell, with pipes, + redirecting and so on, please check + [`:os.cmd/1`](http://www.erlang.org/doc/man/os.html#cmd-1). + """ + @spec cmd(binary, [binary], Keyword.t) :: + {Collectable.t, exit_status :: non_neg_integer} + def cmd(command, args, opts \\ []) when is_binary(command) and is_list(args) do + cmd = String.to_charlist(command) + + cmd = + if Path.type(cmd) == :absolute do + cmd + else + :os.find_executable(cmd) || :erlang.error(:enoent, [command, args, opts]) + end + + {into, opts} = cmd_opts(opts, [:use_stdio, :exit_status, :binary, :hide, args: args], "") + {initial, fun} = Collectable.into(into) + try do + do_cmd Port.open({:spawn_executable, cmd}, opts), initial, fun + catch + kind, reason -> + stacktrace = System.stacktrace + fun.(initial, :halt) + :erlang.raise(kind, reason, stacktrace) + else + {acc, status} -> {fun.(acc, :done), status} + end + end + + defp do_cmd(port, acc, fun) do + receive do + {^port, {:data, data}} -> + do_cmd(port, fun.(acc, {:cont, data}), fun) + {^port, {:exit_status, status}} -> + {acc, status} + end + end + + defp cmd_opts([{:into, any} | t], opts, _into), + do: cmd_opts(t, opts, any) + + defp cmd_opts([{:cd, bin} | t], opts, into) when is_binary(bin), + do: cmd_opts(t, [{:cd, bin} | opts], into) + + defp cmd_opts([{:arg0, bin} | t], opts, into) when is_binary(bin), + do: cmd_opts(t, [{:arg0, bin} | opts], into) + + defp cmd_opts([{:stderr_to_stdout, true} | t], opts, into), + do: cmd_opts(t, [:stderr_to_stdout | opts], into) + + defp cmd_opts([{:stderr_to_stdout, false} | t], opts, into), + do: cmd_opts(t, opts, into) + + defp cmd_opts([{:parallelism, bool} | t], opts, into) when is_boolean(bool), + do: cmd_opts(t, [{:parallelism, bool} | opts], into) + + defp cmd_opts([{:env, enum} | t], opts, into), + do: cmd_opts(t, [{:env, validate_env(enum)} | opts], into) + + defp cmd_opts([{key, val} | _], _opts, _into), + do: raise(ArgumentError, "invalid option #{inspect key} with value #{inspect val}") + + defp cmd_opts([], opts, into), + do: {into, opts} + + defp validate_env(enum) do + Enum.map enum, fn + {k, nil} -> + {String.to_charlist(k), false} + {k, v} -> + {String.to_charlist(k), String.to_charlist(v)} + other -> + raise ArgumentError, "invalid environment key-value #{inspect other}" + end + end + + @doc """ + Returns the current monotonic time in the `:native` time unit. + + This time is monotonically increasing and starts in an unspecified + point in time. + + Inlined by the compiler into `:erlang.monotonic_time/0`. + """ + @spec monotonic_time() :: integer + def monotonic_time do + :erlang.monotonic_time() + end + + @doc """ + Returns the current monotonic time in the given time unit. + + This time is monotonically increasing and starts in an unspecified + point in time. + """ + @spec monotonic_time(time_unit) :: integer + def monotonic_time(unit) do + :erlang.monotonic_time(normalize_time_unit(unit)) + end + + @doc """ + Returns the current system time in the `:native` time unit. + + It is the VM view of the `os_time/0`. They may not match in + case of time warps although the VM works towards aligning + them. This time is not monotonic. + + Inlined by the compiler into `:erlang.system_time/0`. + """ + @spec system_time() :: integer + def system_time do + :erlang.system_time() + end + + @doc """ + Returns the current system time in the given time unit. + + It is the VM view of the `os_time/0`. They may not match in + case of time warps although the VM works towards aligning + them. This time is not monotonic. + """ + @spec system_time(time_unit) :: integer + def system_time(unit) do + :erlang.system_time(normalize_time_unit(unit)) + end + + @doc """ + Converts `time` from time unit `from_unit` to time unit `to_unit`. + + The result is rounded via the floor function. + + `convert_time_unit/3` accepts an additional time unit (other than the + ones in the `t:time_unit/0` type) called `:native`. `:native` is the time + unit used by the Erlang runtime system. It's determined when the runtime + starts and stays the same until the runtime is stopped. To determine what + the `:native` unit amounts to in a system, you can call this function to + convert 1 second to the `:native` time unit (i.e., + `System.convert_time_unit(1, :second, :native)`). + """ + @spec convert_time_unit(integer, time_unit | :native, time_unit | :native) :: integer + def convert_time_unit(time, from_unit, to_unit) do + :erlang.convert_time_unit(time, normalize_time_unit(from_unit), normalize_time_unit(to_unit)) + end + + @doc """ + Returns the current time offset between the Erlang VM monotonic + time and the Erlang VM system time. + + The result is returned in the `:native` time unit. + + See `time_offset/1` for more information. + + Inlined by the compiler into `:erlang.time_offset/0`. + """ + @spec time_offset() :: integer + def time_offset do + :erlang.time_offset() + end + + @doc """ + Returns the current time offset between the Erlang VM monotonic + time and the Erlang VM system time. + + The result is returned in the given time unit `unit`. The returned + offset, added to an Erlang monotonic time (e.g., obtained with + `monotonic_time/1`), gives the Erlang system time that corresponds + to that monotonic time. + """ + @spec time_offset(time_unit) :: integer + def time_offset(unit) do + :erlang.time_offset(normalize_time_unit(unit)) + end + + @doc """ + Returns the current OS time. + + The result is returned in the `:native` time unit. + + This time may be adjusted forwards or backwards in time + with no limitation and is not monotonic. + + Inlined by the compiler into `:os.system_time/0`. + """ + @spec os_time() :: integer + def os_time do + :os.system_time() + end + + @doc """ + Returns the current OS time in the given time `unit`. + + This time may be adjusted forwards or backwards in time + with no limitation and is not monotonic. + """ + @spec os_time(time_unit) :: integer + def os_time(unit) do + :os.system_time(normalize_time_unit(unit)) + end + + @doc """ + Returns the OTP release number. + """ + @spec otp_release :: String.t + def otp_release do + :erlang.list_to_binary :erlang.system_info(:otp_release) + end + + @doc """ + Returns the number of schedulers in the VM. + """ + @spec schedulers :: pos_integer + def schedulers do + :erlang.system_info(:schedulers) + end + + @doc """ + Returns the number of schedulers online in the VM. + """ + @spec schedulers_online :: pos_integer + def schedulers_online do + :erlang.system_info(:schedulers_online) + end + + @doc """ + Generates and returns an integer that is unique in the current runtime + instance. + + "Unique" means that this function, called with the same list of `modifiers`, + will never return the same integer more than once on the current runtime + instance. + + If `modifiers` is `[]`, then a unique integer (that can be positive or negative) is returned. + Other modifiers can be passed to change the properties of the returned integer: + + * `:positive` - the returned integer is guaranteed to be positive. + * `:monotonic` - the returned integer is monotonically increasing. This + means that, on the same runtime instance (but even on different + processes), integers returned using the `:monotonic` modifier will always + be strictly less than integers returned by successive calls with the + `:monotonic` modifier. + + All modifiers listed above can be combined; repeated modifiers in `modifiers` + will be ignored. + + Inlined by the compiler into `:erlang.unique_integer/1`. + """ + @spec unique_integer([:positive | :monotonic]) :: integer + def unique_integer(modifiers \\ []) do + :erlang.unique_integer(modifiers) + end + + defp normalize_time_unit(:native), + do: :native + + # TODO: Remove these mappings once Elixir requires Erlang/OTP 19.1 + defp normalize_time_unit(:second), + do: :seconds + defp normalize_time_unit(:millisecond), + do: :milli_seconds + defp normalize_time_unit(:microsecond), + do: :micro_seconds + defp normalize_time_unit(:nanosecond), + do: :nano_seconds + + # TODO: Warn on Elixir 1.5 + defp normalize_time_unit(:seconds), + do: :seconds + defp normalize_time_unit(:milliseconds), + do: :milli_seconds + defp normalize_time_unit(:microseconds), + do: :micro_seconds + defp normalize_time_unit(:nanoseconds), + do: :nano_seconds + + defp normalize_time_unit(unit) when is_integer(unit) and unit > 0, + do: unit + + # TODO: Warn on Elixir 1.5 + defp normalize_time_unit(erlang_unit) + when erlang_unit in [:milli_seconds, :micro_seconds, :nano_seconds] do + erlang_unit + end + + defp normalize_time_unit(other) do + raise ArgumentError, + "unsupported time unit. Expected :second, :millisecond, " <> + ":microsecond, :nanosecond, or a positive integer, " <> + "got #{inspect other}" end end diff --git a/lib/elixir/lib/task.ex b/lib/elixir/lib/task.ex index 0ba6df0cf89..798917c6d5c 100644 --- a/lib/elixir/lib/task.ex +++ b/lib/elixir/lib/task.ex @@ -1,102 +1,160 @@ defmodule Task do @moduledoc """ - Conveniences for spawning and awaiting for tasks. + Conveniences for spawning and awaiting tasks. Tasks are processes meant to execute one particular - action throughout their life-cycle, often with little or no + action throughout their lifetime, often with little or no communication with other processes. The most common use case - for tasks is to compute a value asynchronously: + for tasks is to convert sequential code into concurrent code + by computing a value asynchronously: task = Task.async(fn -> do_some_work() end) res = do_some_other_work() res + Task.await(task) - Tasks spawned with `async` can be awaited on by its caller - process (and only its caller) as shown in the example above. + Tasks spawned with `async` can be awaited on by their caller + process (and only their caller) as shown in the example above. They are implemented by spawning a process that sends a message to the caller once the given computation is performed. - Besides `async/1` and `await/2`, tasks can also be - started as part of supervision trees and dynamically spawned - in remote nodes. We will explore all three scenarios next. + Besides `async/1` and `await/2`, tasks can also be + started as part of a supervision tree and dynamically spawned + on remote nodes. We will explore all three scenarios next. ## async and await - The most common way to spawn a task is with `Task.async/1`. A new - process will be created, linked and monitored by the caller. Once - the task action finishes, a message will be sent to the caller - with the result. + One of the common uses of tasks is to convert sequential code + into concurrent code with `Task.async/1` while keeping its semantics. + When invoked, a new process will be created, linked and monitored + by the caller. Once the task action finishes, a message will be sent + to the caller with the result. - `Task.await/2` is used to read the message sent by the task. On - `await`, Elixir will also setup a monitor to verify if the process - exited for any abnormal reason (or in case exits are being - trapped by the caller). + `Task.await/2` is used to read the message sent by the task. - ## Supervised tasks + There are two important things to consider when using `async`: + + 1. If you are using async tasks, you **must await** a reply + as they are *always* sent. If you are not expecting a reply, + consider using `Task.start_link/1` detailed below. + + 2. async tasks link the caller and the spawned process. This + means that, if the caller crashes, the task will crash + too and vice-versa. This is on purpose: if the process + meant to receive the result no longer exists, there is + no purpose in completing the computation. - It is also possible to spawn a task inside a supervision tree - with `start_link/1` and `start_link/3`: + If this is not desired, use `Task.start/1` or consider starting + the task under a `Task.Supervisor` using `async_nolink` or + `start_child`. - Task.start_link(fn -> IO.puts "ok" end) + `Task.yield/2` is an alternative to `await/2` where the caller will + temporarily block, waiting until the task replies or crashes. If the + result does not arrive within the timeout, it can be called again at a + later moment. This allows checking for the result of a task multiple + times. If a reply does not arrive within the desired time, + `Task.shutdown/2` can be used to stop the task. - Such tasks can be mounted in your supervision tree as: + ## Supervised tasks + + It is also possible to spawn a task under a supervisor: import Supervisor.Spec children = [ + # worker(Task, [fn -> IO.puts "ok" end]) ] + Internally the supervisor will invoke `Task.start_link/1`. + Since these tasks are supervised and not directly linked to the caller, they cannot be awaited on. Note `start_link/1`, unlike `async/1`, returns `{:ok, pid}` (which is the result expected by supervision trees). - ## Supervision trees + By default, most supervision strategies will try to restart + a worker after it exits regardless of the reason. If you design + the task to terminate normally (as in the example with `IO.puts/2` + above), consider passing `restart: :transient` in the options + to `Supervisor.Spec.worker/3`. - The `Task.Supervisor` module allows developers to start supervisors - that dynamically supervise tasks: + ## Dynamically supervised tasks - {:ok, pid} = Task.Supervisor.start_link() - Task.Supervisor.async(pid, fn -> do_work() end) - - `Task.Supervisor` also makes it possible to spawn tasks in remote nodes as - long as the supervisor is registered locally or globally: + The `Task.Supervisor` module allows developers to dynamically + create multiple supervised tasks. - # In the remote node - Task.Supervisor.start_link(name: :tasks_sup) + A short example is: - # In the client - Task.Supervisor.async({:tasks_sup, :remote@local}, fn -> do_work() end) + {:ok, pid} = Task.Supervisor.start_link() + task = Task.Supervisor.async(pid, fn -> + # Do something + end) + Task.await(task) - `Task.Supervisor` is more often started in your supervision tree as: + However, in the majority of cases, you want to add the task supervisor + to your supervision tree: import Supervisor.Spec children = [ - supervisor(Task.Supervisor, [[name: :tasks_sup]]) + supervisor(Task.Supervisor, [[name: MyApp.TaskSupervisor]]) ] - Check `Task.Supervisor` for other operations supported by the Task supervisor. + Now you can dynamically start supervised tasks: + + Task.Supervisor.start_child(MyApp.TaskSupervisor, fn -> + # Do something + end) + + Or even use the async/await pattern: + + Task.Supervisor.async(MyApp.TaskSupervisor, fn -> + # Do something + end) |> Task.await() + + Finally, check `Task.Supervisor` for other supported operations. + + ## Distributed tasks + + Since Elixir provides a Task supervisor, it is easy to use one + to dynamically spawn tasks across nodes: + + # On the remote node + Task.Supervisor.start_link(name: MyApp.DistSupervisor) + + # On the client + Task.Supervisor.async({MyApp.DistSupervisor, :remote@local}, + MyMod, :my_fun, [arg1, arg2, arg3]) + + Note that, when working with distributed tasks, one should use the `Task.Supervisor.async/4` function + that expects explicit module, function and arguments, instead of `Task.Supervisor.async/2` that + works with anonymous functions. That's because anonymous functions expect + the same module version to exist on all involved nodes. Check the `Agent` module + documentation for more information on distributed processes as the limitations + described there apply to the whole ecosystem. """ @doc """ The Task struct. - It contains two fields: + It contains these fields: - * `:pid` - the process reference of the task process; it may be a pid - or a tuple containing the process and node names + * `:pid` - the PID of the task process; `nil` if the task does + not use a task process * `:ref` - the task monitor reference + * `:owner` - the PID of the process that started the task + """ - defstruct pid: nil, ref: nil + defstruct pid: nil, ref: nil, owner: nil + + @type t :: %__MODULE__{} @doc """ Starts a task as part of a supervision tree. """ - @spec start_link(fun) :: {:ok, pid} + @spec start_link((() -> any)) :: {:ok, pid} def start_link(fun) do start_link(:erlang, :apply, [fun, []]) end @@ -106,67 +164,270 @@ defmodule Task do """ @spec start_link(module, atom, [term]) :: {:ok, pid} def start_link(mod, fun, args) do - Task.Supervised.start_link(get_info(self), {mod, fun, args}) + Task.Supervised.start_link(get_info(self()), {mod, fun, args}) + end + + @doc """ + Starts a task. + + This is only used when the task is used for side-effects + (i.e. no interest in the returned result) and it should not + be linked to the current process. + """ + @spec start((() -> any)) :: {:ok, pid} + def start(fun) do + start(:erlang, :apply, [fun, []]) end @doc """ - Starts a task that can be awaited on. + Starts a task. + + This is only used when the task is used for side-effects + (i.e. no interest in the returned result) and it should not + be linked to the current process. + """ + @spec start(module, atom, [term]) :: {:ok, pid} + def start(mod, fun, args) do + Task.Supervised.start(get_info(self()), {mod, fun, args}) + end + + @doc """ + Starts a task that must be awaited on. This function spawns a process that is linked to and monitored by the caller process. A `Task` struct is returned containing the relevant information. - ## Task's message format + Read the `Task` module documentation for more info on general + usage of `async/1` and `async/3`. - The reply sent by the task will be in the format `{ref, msg}`, - where `ref` is the monitoring reference held by the task. + See also `async/3`. """ - @spec async(fun) :: t + @spec async((() -> any)) :: t def async(fun) do async(:erlang, :apply, [fun, []]) end @doc """ - Starts a task that can be awaited on. + Starts a task that must be awaited on. + + A `Task` struct is returned containing the relevant information. + Developers must eventually call `Task.await/2` or `Task.yield/2` + followed by `Task.shutdown/2` on the returned task. + + Read the `Task` module documentation for more info on general + usage of `async/1` and `async/3`. - Similar to `async/1`, but the task is specified by the given - module, function and arguments. + ## Linking + + This function spawns a process that is linked to and monitored + by the caller process. The linking part is important because it + aborts the task if the parent process dies. It also guarantees + the code before async/await has the same properties after you + add the async call. For example, imagine you have this: + + x = heavy_fun() + y = some_fun() + x + y + + Now you want to make the `heavy_fun()` async: + + x = Task.async(&heavy_fun/0) + y = some_fun() + Task.await(x) + y + + As before, if `heavy_fun/0` fails, the whole computation will + fail, including the parent process. If you don't want the task + to fail then you must change the `heavy_fun/0` code in the + same way you would achieve it if you didn't have the async call. + For example, to either return `{:ok, val} | :error` results or, + in more extreme cases, by using `try/rescue`. In other words, + an asynchronous task should be thought of as an extension of a + process rather than a mechanism to isolate it from all errors. + + If you don't want to link the caller to the task, then you + must use a supervised task with `Task.Supervisor` and call + `Task.Supervisor.async_nolink/2`. + + In any case, avoid any of the following: + + * Setting `:trap_exit` to `true` - trapping exits should be + used only in special circumstances as it would make your + process immune to not only exits from the task but from + any other processes. + + Moreover, even when trapping exits, calling `await` will + still exit if the task has terminated without sending its + result back. + + * Unlinking the task process started with `async`/`await`. + If you unlink the processes and the task does not belong + to any supervisor, you may leave dangling tasks in case + the parent dies. + + ## Message format + + The reply sent by the task will be in the format `{ref, result}`, + where `ref` is the monitor reference held by the task struct + and `result` is the return value of the task function. """ @spec async(module, atom, [term]) :: t def async(mod, fun, args) do mfa = {mod, fun, args} - pid = :proc_lib.spawn_link(Task.Supervised, :async, [self, get_info(self), mfa]) + owner = self() + pid = Task.Supervised.spawn_link(owner, get_info(owner), mfa) ref = Process.monitor(pid) - send(pid, {self(), ref}) - %Task{pid: pid, ref: ref} + send(pid, {owner, ref}) + %Task{pid: pid, ref: ref, owner: owner} + end + + @doc """ + Returns a stream that runs the given `module`, `function`, and `args` + concurrently on each item in `enumerable`. + + Each item will be prepended to the given `args` and processed by its + own task. The tasks will be linked to an intermediate process that is + then linked to the current process. This means a failure in a task + terminates the current process and a failure in the current process + terminates all tasks. + + When streamed, each task will emit `{:ok, val}` upon successful + completion or `{:exit, val}` if the caller is trapping exits. Results + are emitted in the same order as the original `enumerable`. + + The level of concurrency can be controlled via the `:max_concurrency` + option and defaults to `System.schedulers_online/0`. A timeout + can also be given as an option representing the maximum amount of + time to wait without a task reply. + + Finally, consider using `Task.Supervisor.async_stream/6` to start tasks + under a supervisor. If you find yourself trapping exits to handle exits + inside the async stream, consider using `Task.Supervisor.async_stream_nolink/6` + to start tasks that are not linked to the current process. + + ## Options + + * `:max_concurrency` - sets the maximum number of tasks to run + at the same time. Defaults to `System.schedulers_online/0`. + * `:timeout` - the maximum amount of time (in milliseconds) each + task is allowed to execute for. Defaults to `5000`. + * `:on_timeout` - what do to when a task times out. The possible + values are: + * `:exit` (default) - the process that spawned the tasks exits. + * `:kill_task` - the task that timed out is killed. The value + emitted for that task is `{:exit, :timeout}`. + + ## Example + + Let's build a stream and then enumerate it: + + stream = Task.async_stream(collection, Mod, :expensive_fun, []) + Enum.to_list(stream) + + The concurrency can be increased or decreased using the `:max_concurrency` + option. For example, if the tasks are IO heavy, the value can be increased: + + max_concurrency = System.schedulers_online * 2 + stream = Task.async_stream(collection, Mod, :expensive_fun, [], max_concurrency: max_concurrency) + Enum.to_list(stream) + + """ + @spec async_stream(Enumerable.t, module, atom, [term], Keyword.t) :: Enumerable.t + def async_stream(enumerable, module, function, args, options \\ []) + when is_atom(module) and is_atom(function) and is_list(args) do + build_stream(enumerable, {module, function, args}, options) end - defp get_info(self) do - {node(), - case Process.info(self, :registered_name) do - {:registered_name, []} -> self() - {:registered_name, name} -> name - end} + @doc """ + Returns a stream that runs the given function `fun` concurrently + on each item in `enumerable`. + + Each `enumerable` item is passed as argument to the given function `fun` and + processed by its own task. The tasks will be linked to the current process, + similarly to `async/1`. + + ## Example + + Count the codepoints in each string asynchronously, then add the counts together using reduce. + + iex> strings = ["long string", "longer string", "there are many of these"] + iex> stream = Task.async_stream(strings, fn text -> text |> String.codepoints |> Enum.count end) + iex> Enum.reduce(stream, 0, fn {:ok, num}, acc -> num + acc end) + 47 + + See `async_stream/5` for discussion, options, and more examples. + """ + @spec async_stream(Enumerable.t, (term -> term), Keyword.t) :: Enumerable.t + def async_stream(enumerable, fun, options \\ []) when is_function(fun, 1) do + build_stream(enumerable, fun, options) + end + + defp build_stream(enumerable, fun, options) do + &Task.Supervised.stream(enumerable, &1, &2, fun, options, fn owner, mfa -> + {:link, Task.Supervised.spawn_link(owner, get_info(owner), mfa)} + end) + end + + # Returns a tuple with the node where this is executed and either the + # registered name of the given pid or the pid of where this is executed. Used + # when exiting from tasks to print out from where the task was started. + defp get_info(pid) do + self_or_name = + case Process.info(pid, :registered_name) do + {:registered_name, []} -> self() + {:registered_name, name} -> name + end + + {node(), self_or_name} end @doc """ - Awaits for a task reply. + Awaits a task reply and returns it. A timeout, in milliseconds, can be given with default value of `5000`. In case the task process dies, this function will exit with the same reason as the task. + + If the timeout is exceeded, `await` will exit; however, + the task will continue to run. When the calling process exits, its + exit signal will terminate the task if it is not trapping exits. + + This function assumes the task's monitor is still active or the monitor's + `:DOWN` message is in the message queue. If it has been demonitored, or the + message already received, this function will wait for the duration of the + timeout awaiting the message. + + This function can only be called once for any given task. If you want + to be able to check multiple times if a long-running task has finished + its computation, use `yield/2` instead. + + ## Compatibility with OTP behaviours + + It is not recommended to `await` a long-running task inside an OTP + behaviour such as `GenServer`. Instead, you should match on the message + coming from a task inside your `GenServer.handle_info/2` callback. + + ## Examples + + iex> task = Task.async(fn -> 1 + 1 end) + iex> Task.await(task) + 2 + """ @spec await(t, timeout) :: term | no_return - def await(%Task{ref: ref}=task, timeout \\ 5000) do + def await(task, timeout \\ 5000) + + def await(%Task{owner: owner} = task, _) when owner != self() do + raise ArgumentError, invalid_owner_error(task) + end + + def await(%Task{ref: ref} = task, timeout) do receive do {^ref, reply} -> Process.demonitor(ref, [:flush]) reply - {:DOWN, ^ref, _, _, :noconnection} -> - mfa = {__MODULE__, :await, [task, timeout]} - exit({{:nodedown, node(task.pid)}, mfa}) - {:DOWN, ^ref, _, _, reason} -> - exit({reason, {__MODULE__, :await, [task, timeout]}}) + {:DOWN, ^ref, _, proc, reason} -> + exit({reason(reason, proc), {__MODULE__, :await, [task, timeout]}}) after timeout -> Process.demonitor(ref, [:flush]) @@ -174,47 +435,324 @@ defmodule Task do end end - @doc """ - Receives a group of tasks and a message and finds - a task that matches the given message. - - This function returns a tuple with the task and the - returned value in case the message matches a task that - exited with success, it raises in case the found task - failed or `nil` if no task was found. - - This function is useful in situations where multiple - tasks are spawned and their results are collected - later on. For example, a `GenServer` can spawn tasks, - store the tasks in a list and later use `Task.find/2` - to see if incoming messages are from any of the tasks. - """ - @spec find([t], any) :: {term, t} | nil | no_return - def find(tasks, msg) - + @doc false + # TODO: Remove on 2.0 + # (hard-deprecated in elixir_dispatch) def find(tasks, {ref, reply}) when is_reference(ref) do Enum.find_value tasks, fn - %Task{ref: task_ref} = t when ref == task_ref -> + %Task{ref: ^ref} = task -> Process.demonitor(ref, [:flush]) - {reply, t} + {reply, task} %Task{} -> nil end end - def find(tasks, {:DOWN, ref, _, _, reason} = msg) when is_reference(ref) do - find = fn(%Task{ref: task_ref}) -> task_ref == ref end - case Enum.find(tasks, find) do - %Task{pid: pid} when reason == :noconnection -> - exit({{:nodedown, node(pid)}, {__MODULE__, :find, [tasks, msg]}}) - %Task{} -> - exit({reason, {__MODULE__, :find, [tasks, msg]}}) - nil -> - nil + def find(tasks, {:DOWN, ref, _, proc, reason} = msg) when is_reference(ref) do + find = fn %Task{ref: task_ref} -> task_ref == ref end + if Enum.find(tasks, find) do + exit({reason(reason, proc), {__MODULE__, :find, [tasks, msg]}}) end end def find(_tasks, _msg) do nil end + + @doc ~S""" + Temporarily blocks the current process waiting for a task reply. + + Returns `{:ok, reply}` if the reply is received, `nil` if + no reply has arrived, or `{:exit, reason}` if the task has already + exited. Keep in mind that normally a task failure also causes + the process owning the task to exit. Therefore this function can + return `{:exit, reason}` only if + + * the task process exited with the reason `:normal` + * it isn't linked to the caller + * the caller is trapping exits + + A timeout, in milliseconds, can be given with default value + of `5000`. If the time runs out before a message from + the task is received, this function will return `nil` + and the monitor will remain active. Therefore `yield/2` can be + called multiple times on the same task. + + This function assumes the task's monitor is still active or the + monitor's `:DOWN` message is in the message queue. If it has been + demonitored or the message already received, this function will wait + for the duration of the timeout awaiting the message. + + If you intend to shut the task down if it has not responded within `timeout` + milliseconds, you should chain this together with `shutdown/1`, like so: + + case Task.yield(task, timeout) || Task.shutdown(task) do + {:ok, result} -> + result + nil -> + Logger.warn "Failed to get a result in #{timeout}ms" + nil + end + + That ensures that if the task completes after the `timeout` but before `shutdown/1` + has been called, you will still get the result, since `shutdown/1` is designed to + handle this case and return the result. + """ + @spec yield(t, timeout) :: {:ok, term} | {:exit, term} | nil + def yield(task, timeout \\ 5_000) + + def yield(%Task{owner: owner} = task, _) when owner != self() do + raise ArgumentError, invalid_owner_error(task) + end + + def yield(%Task{ref: ref} = task, timeout) do + receive do + {^ref, reply} -> + Process.demonitor(ref, [:flush]) + {:ok, reply} + {:DOWN, ^ref, _, proc, :noconnection} -> + exit({reason(:noconnection, proc), {__MODULE__, :yield, [task, timeout]}}) + {:DOWN, ^ref, _, _, reason} -> + {:exit, reason} + after + timeout -> + nil + end + end + + @doc """ + Yields to multiple tasks in the given time interval. + + This function receives a list of tasks and waits for their + replies in the given time interval. It returns a list + of tuples of two elements, with the task as the first element + and the yielded result as the second. + + Similarly to `yield/2`, each task's result will be + + * `{:ok, term}` if the task has successfully reported its + result back in the given time interval + * `{:exit, reason}` if the task has died + * `nil` if the task keeps running past the timeout + + Check `yield/2` for more information. + + ## Example + + `Task.yield_many/2` allows developers to spawn multiple tasks + and retrieve the results received in a given timeframe. + If we combine it with `Task.shutdown/2`, it allows us to gather + those results and cancel the tasks that have not replied in time. + + Let's see an example. + + tasks = + for i <- 1..10 do + Task.async(fn -> + Process.sleep(i * 1000) + i + end) + end + + tasks_with_results = Task.yield_many(tasks, 5000) + + results = Enum.map(tasks_with_results, fn {task, res} -> + # Shutdown the tasks that did not reply nor exit + res || Task.shutdown(task, :brutal_kill) + end) + + # Here we are matching only on {:ok, value} and + # ignoring {:exit, _} (crashed tasks) and `nil` (no replies) + for {:ok, value} <- results do + IO.inspect value + end + + In the example above, we create tasks that sleep from 1 + up to 10 seconds and return the amount of seconds they slept. + If you execute the code all at once, you should see 1 up to 5 + printed, as those were the tasks that have replied in the + given time. All other tasks will have been shut down using + the `Task.shutdown/2` call. + """ + @spec yield_many([t], timeout) :: [{t, {:ok, term} | {:exit, term} | nil}] + def yield_many(tasks, timeout \\ 5000) do + timeout_ref = make_ref() + timer_ref = Process.send_after(self(), timeout_ref, timeout) + try do + yield_many(tasks, timeout_ref, :infinity) + catch + {:noconnection, reason} -> + exit({reason, {__MODULE__, :yield_many, [tasks, timeout]}}) + after + Process.cancel_timer(timer_ref) + receive do: (^timeout_ref -> :ok), after: (0 -> :ok) + end + end + + defp yield_many([%Task{ref: ref, owner: owner} = task | rest], timeout_ref, timeout) do + if owner != self() do + raise ArgumentError, invalid_owner_error(task) + end + + receive do + {^ref, reply} -> + Process.demonitor(ref, [:flush]) + [{task, {:ok, reply}} | yield_many(rest, timeout_ref, timeout)] + + {:DOWN, ^ref, _, proc, :noconnection} -> + throw({:noconnection, reason(:noconnection, proc)}) + + {:DOWN, ^ref, _, _, reason} -> + [{task, {:exit, reason}} | yield_many(rest, timeout_ref, timeout)] + + ^timeout_ref -> + [{task, nil} | yield_many(rest, timeout_ref, 0)] + + after + timeout -> + [{task, nil} | yield_many(rest, timeout_ref, 0)] + + end + end + + defp yield_many([], _timeout_ref, _timeout) do + [] + end + + @doc """ + Unlinks and shuts down the task, and then checks for a reply. + + Returns `{:ok, reply}` if the reply is received while shutting down the task, + `{:exit, reason}` if the task died, otherwise `nil`. + + The shutdown method is either a timeout or `:brutal_kill`. In case + of a `timeout`, a `:shutdown` exit signal is sent to the task process + and if it does not exit within the timeout, it is killed. With `:brutal_kill` + the task is killed straight away. In case the task terminates abnormally + (possibly killed by another process), this function will exit with the same reason. + + It is not required to call this function when terminating the caller, unless + exiting with reason `:normal` or if the task is trapping exits. If the caller is + exiting with a reason other than `:normal` and the task is not trapping exits, the + caller's exit signal will stop the task. The caller can exit with reason + `:shutdown` to shutdown all of its linked processes, including tasks, that + are not trapping exits without generating any log messages. + + If a task's monitor has already been demonitored or received and there is not + a response waiting in the message queue this function will return + `{:exit, :noproc}` as the result or exit reason can not be determined. + """ + @spec shutdown(t, timeout | :brutal_kill) :: {:ok, term} | {:exit, term} | nil + def shutdown(task, shutdown \\ 5_000) + + def shutdown(%Task{pid: nil} = task, _) do + raise ArgumentError, "task #{inspect task} does not have an associated task process" + end + + def shutdown(%Task{owner: owner} = task, _) when owner != self() do + raise ArgumentError, invalid_owner_error(task) + end + + def shutdown(%Task{pid: pid} = task, :brutal_kill) do + mon = Process.monitor(pid) + exit(pid, :kill) + + case shutdown_receive(task, mon, :brutal_kill, :infinity) do + {:down, proc, :noconnection} -> + exit({reason(:noconnection, proc), {__MODULE__, :shutdown, [task, :brutal_kill]}}) + {:down, _, reason} -> + {:exit, reason} + result -> + result + end + end + + def shutdown(%Task{pid: pid} = task, timeout) do + mon = Process.monitor(pid) + exit(pid, :shutdown) + case shutdown_receive(task, mon, :shutdown, timeout) do + {:down, proc, :noconnection} -> + exit({reason(:noconnection, proc), {__MODULE__, :shutdown, [task, timeout]}}) + {:down, _, reason} -> + {:exit, reason} + result -> + result + end + end + + ## Helpers + + defp reason(:noconnection, proc), do: {:nodedown, monitor_node(proc)} + defp reason(reason, _), do: reason + + defp monitor_node(pid) when is_pid(pid), do: node(pid) + defp monitor_node({_, node}), do: node + + # spawn a process to ensure task gets exit signal if process dies from exit signal + # between unlink and exit. + defp exit(task, reason) do + caller = self() + ref = make_ref() + enforcer = spawn(fn() -> enforce_exit(task, reason, caller, ref) end) + Process.unlink(task) + Process.exit(task, reason) + send(enforcer, {:done, ref}) + :ok + end + + defp enforce_exit(pid, reason, caller, ref) do + mon = Process.monitor(caller) + receive do + {:done, ^ref} -> :ok + {:DOWN, ^mon, _, _, _} -> Process.exit(pid, reason) + end + end + + defp shutdown_receive(%{ref: ref} = task, mon, type, timeout) do + receive do + {:DOWN, ^mon, _, _, :shutdown} when type in [:shutdown, :timeout_kill] -> + Process.demonitor(ref, [:flush]) + flush_reply(ref) + {:DOWN, ^mon, _, _, :killed} when type == :brutal_kill -> + Process.demonitor(ref, [:flush]) + flush_reply(ref) + {:DOWN, ^mon, _, proc, :noproc} -> + reason = flush_noproc(ref, proc, type) + flush_reply(ref) || reason + {:DOWN, ^mon, _, proc, reason} -> + Process.demonitor(ref, [:flush]) + flush_reply(ref) || {:down, proc, reason} + after + timeout -> + Process.exit(task.pid, :kill) + shutdown_receive(task, mon, :timeout_kill, :infinity) + end + end + + defp flush_reply(ref) do + receive do + {^ref, reply} -> {:ok, reply} + after + 0 -> nil + end + end + + defp flush_noproc(ref, proc, type) do + receive do + {:DOWN, ^ref, _, _, :shutdown} when type in [:shutdown, :timeout_kill] -> + nil + {:DOWN, ^ref, _, _, :killed} when type == :brutal_kill -> + nil + {:DOWN, ^ref, _, _, reason} -> + {:down, proc, reason} + after + 0 -> + Process.demonitor(ref, [:flush]) + {:down, proc, :noproc} + end + end + + defp invalid_owner_error(task) do + "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}" + end end diff --git a/lib/elixir/lib/task/supervised.ex b/lib/elixir/lib/task/supervised.ex index 921e8432e8c..675abb326b5 100644 --- a/lib/elixir/lib/task/supervised.ex +++ b/lib/elixir/lib/task/supervised.ex @@ -1,28 +1,44 @@ defmodule Task.Supervised do @moduledoc false + @ref_timeout 5_000 + + def start(info, fun) do + {:ok, :proc_lib.spawn(__MODULE__, :noreply, [info, fun])} + end def start_link(info, fun) do {:ok, :proc_lib.spawn_link(__MODULE__, :noreply, [info, fun])} end - def start_link(caller, info, fun) do - :proc_lib.start_link(__MODULE__, :reply, [caller, info, fun]) + def start_link(caller, monitor, info, fun) do + {:ok, spawn_link(caller, monitor, info, fun)} end - def async(caller, info, mfa) do - initial_call(mfa) - ref = receive do: ({^caller, ref} -> ref) - send caller, {ref, do_apply(info, mfa)} + def spawn_link(caller, monitor \\ :nomonitor, info, fun) do + :proc_lib.spawn_link(__MODULE__, :reply, [caller, monitor, info, fun]) end - def reply(caller, info, mfa) do + def reply(caller, monitor, info, mfa) do initial_call(mfa) - :erlang.link(caller) - :proc_lib.init_ack({:ok, self()}) + case monitor do + :monitor -> + mref = Process.monitor(caller) + reply(caller, mref, @ref_timeout, info, mfa) + :nomonitor -> + reply(caller, nil, :infinity, info, mfa) + end + end - ref = - # There is a race condition on this operation when working accross - # node that manifests if a `Task.Supervisor.async/1` call is made + defp reply(caller, mref, timeout, info, mfa) do + receive do + {^caller, ref} -> + _ = if mref, do: Process.demonitor(mref, [:flush]) + send caller, {ref, do_apply(info, mfa)} + {:DOWN, ^mref, _, _, reason} when is_reference(mref) -> + exit({:shutdown, reason}) + after + # There is a race condition on this operation when working across + # node that manifests if a "Task.Supervisor.async/2" call is made # while the supervisor is busy spawning previous tasks. # # Imagine the following workflow: @@ -34,16 +50,15 @@ defmodule Task.Supervised do # 5. The spawned task waits forever for the monitor reference so it can begin # # We have solved this by specifying a timeout of 5000 seconds. - # Given no work is done in the client in between the task start and + # Given no work is done in the client between the task start and # sending the reference, 5000 should be enough to not raise false # negatives unless the nodes are indeed not available. - receive do - {^caller, ref} -> ref - after - 5000 -> exit(:timeout) - end - - send caller, {ref, do_apply(info, mfa)} + # + # The same situation could occur with "Task.Supervisor.async_nolink/2", + # except a monitor is used instead of a link. + timeout -> + exit(:timeout) + end end def noreply(info, mfa) do @@ -70,31 +85,33 @@ defmodule Task.Supervised do apply(module, fun, args) catch :error, value -> - exit(info, mfa, {value, System.stacktrace()}) + reason = {value, System.stacktrace()} + exit(info, mfa, reason, reason) :throw, value -> - exit(info, mfa, {{:nocatch, value}, System.stacktrace()}) + reason = {{:nocatch, value}, System.stacktrace()} + exit(info, mfa, reason, reason) :exit, value -> - exit(info, mfa, value) + exit(info, mfa, {value, System.stacktrace()}, value) end end - defp exit(_info, _mfa, reason) - when reason == :normal - when reason == :shutdown - when tuple_size(reason) == 2 and elem(reason, 0) == :shutdown do + defp exit(_info, _mfa, _log_reason, reason) + when reason == :normal + when reason == :shutdown + when tuple_size(reason) == 2 and elem(reason, 0) == :shutdown do exit(reason) end - defp exit(info, mfa, reason) do + defp exit(info, mfa, log_reason, reason) do {fun, args} = get_running(mfa) :error_logger.format( - "** Task ~p terminating~n" <> - "** Started from ~p~n" <> - "** When function == ~p~n" <> - "** arguments == ~p~n" <> - "** Reason for termination == ~n" <> - "** ~p~n", [self, get_from(info), fun, args, reason]) + '** Task ~p terminating~n' ++ + '** Started from ~p~n' ++ + '** When function == ~p~n' ++ + '** arguments == ~p~n' ++ + '** Reason for termination == ~n' ++ + '** ~p~n', [self(), get_from(info), fun, args, get_reason(log_reason)]) exit(reason) end @@ -104,4 +121,327 @@ defmodule Task.Supervised do defp get_running({:erlang, :apply, [fun, []]}) when is_function(fun, 0), do: {fun, []} defp get_running({mod, fun, args}), do: {:erlang.make_fun(mod, fun, length(args)), args} + + defp get_reason({:undef, [{mod, fun, args, _info} | _] = stacktrace} = reason) + when is_atom(mod) and is_atom(fun) do + cond do + :code.is_loaded(mod) === false -> + {:"module could not be loaded", stacktrace} + is_list(args) and not function_exported?(mod, fun, length(args)) -> + {:"function not exported", stacktrace} + is_integer(args) and not function_exported?(mod, fun, args) -> + {:"function not exported", stacktrace} + true -> + reason + end + end + + defp get_reason(reason) do + reason + end + + ## Stream + + def stream(enumerable, acc, reducer, mfa, options, spawn) do + next = &Enumerable.reduce(enumerable, &1, fn x, acc -> {:suspend, [x | acc]} end) + max_concurrency = Keyword.get(options, :max_concurrency, System.schedulers_online) + timeout = Keyword.get(options, :timeout, 5000) + on_timeout = Keyword.get(options, :on_timeout, :exit) + parent = self() + + {:trap_exit, trap_exit?} = Process.info(self(), :trap_exit) + + # Start a process responsible for spawning processes and translating "down" + # messages. This process will trap exits if the current process is trapping + # exit, or it won't trap exits otherwise. + {monitor_pid, monitor_ref} = + Process.spawn(fn -> + stream_monitor(parent, mfa, spawn, trap_exit?, timeout) + end, [:link, :monitor]) + + # Now that we have the pid of the "monitor" process and the reference of the + # monitor we use to monitor such process, we can inform the monitor process + # about our reference to it. + send(monitor_pid, {parent, monitor_ref}) + + stream_reduce(acc, max_concurrency, _spawned = 0, _delivered = 0, _waiting = %{}, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + end + + defp stream_reduce({:halt, acc}, _max, _spawned, _delivered, _waiting, next, + _reducer, monitor_pid, monitor_ref, timeout, _on_timeout) do + stream_close(monitor_pid, monitor_ref, timeout) + is_function(next) && next.({:halt, []}) + {:halted, acc} + end + + defp stream_reduce({:suspend, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) do + continuation = &stream_reduce(&1, max, spawned, delivered, waiting, next, reducer, monitor_pid, monitor_ref, timeout, on_timeout) + {:suspended, acc, continuation} + end + + # All spawned, all delivered, next is :done. + defp stream_reduce({:cont, acc}, _max, spawned, delivered, _waiting, next, + _reducer, monitor_pid, monitor_ref, timeout, _on_timeout) + when spawned == delivered and next == :done do + stream_close(monitor_pid, monitor_ref, timeout) + {:done, acc} + end + + # No more tasks to spawn because max == 0 or next is :done. We wait for task + # responses or tasks going down. + defp stream_reduce({:cont, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + when max == 0 + when next == :done do + receive do + # The task at position "position" replied with "value". We put the + # response in the "waiting" map and do nothing, since we'll only act on + # this response when the replying task dies (we'll notice in the :down + # message). + {{^monitor_ref, position}, value} -> + %{^position => {pid, :running}} = waiting + waiting = Map.put(waiting, position, {pid, {:ok, value}}) + stream_reduce({:cont, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + + # The task at position "position" died for some reason. We check if it + # replied already (then the death is peaceful) or if it's still running + # (then the reply from this task will be {:exit, reason}). This message is + # sent to us by the monitor process, not by the dying task directly. + {:down, {^monitor_ref, position}, reason} -> + waiting = + case waiting do + %{^position => {_, {:ok, _} = ok}} -> Map.put(waiting, position, {nil, ok}) + %{^position => {_, :running}} -> Map.put(waiting, position, {nil, {:exit, reason}}) + %{^position => {_, :timeout}} -> Map.put(waiting, position, {nil, {:exit, :timeout}}) + end + stream_deliver({:cont, acc}, max + 1, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + + # The task at position "position" timed out and the monitor process killed + # it and sent the current process this message. + {:killed_for_timeout, {^monitor_ref, position}} -> + # If this task had already replied, we basically ignore this message. + waiting = + case waiting do + %{^position => {_, {:ok, _}}} -> + waiting + %{^position => {pid, :running}} -> + case on_timeout do + :kill_task -> + # The monitor process already killed this task, we don't need + # to kill it here. + Map.put(waiting, position, {pid, :timeout}) + :exit -> + stream_cleanup_inbox(monitor_pid, monitor_ref) + exit({:timeout, {__MODULE__, :stream, [timeout]}}) + end + end + stream_reduce({:cont, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + + # The monitor process died. We just cleanup the messages from the monitor + # process and exit. + {:DOWN, ^monitor_ref, _, ^monitor_pid, reason} -> + stream_cleanup_inbox(monitor_pid, monitor_ref) + exit({reason, {__MODULE__, :stream, [timeout]}}) + end + end + + defp stream_reduce({:cont, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) do + try do + next.({:cont, []}) + catch + kind, reason -> + stacktrace = System.stacktrace + stream_close(monitor_pid, monitor_ref, timeout) + :erlang.raise(kind, reason, stacktrace) + else + {:suspended, [value], next} -> + waiting = stream_spawn(value, spawned, waiting, monitor_pid, monitor_ref, timeout) + stream_reduce({:cont, acc}, max - 1, spawned + 1, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + {_, [value]} -> + waiting = stream_spawn(value, spawned, waiting, monitor_pid, monitor_ref, timeout) + stream_reduce({:cont, acc}, max - 1, spawned + 1, delivered, waiting, :done, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + {_, []} -> + stream_reduce({:cont, acc}, max, spawned, delivered, waiting, :done, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + end + end + + defp stream_deliver({:suspend, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) do + continuation = &stream_deliver(&1, max, spawned, delivered, waiting, next, reducer, monitor_pid, monitor_ref, timeout, on_timeout) + {:suspended, acc, continuation} + end + defp stream_deliver({:halt, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) do + stream_reduce({:halt, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + end + defp stream_deliver({:cont, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) do + case waiting do + %{^delivered => {nil, reply}} -> + try do + reducer.(reply, acc) + catch + kind, reason -> + stacktrace = System.stacktrace + is_function(next) && next.({:halt, []}) + stream_close(monitor_pid, monitor_ref, timeout) + :erlang.raise(kind, reason, stacktrace) + else + pair -> + stream_deliver(pair, max, spawned, delivered + 1, Map.delete(waiting, delivered), next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + end + %{} -> + stream_reduce({:cont, acc}, max, spawned, delivered, waiting, next, + reducer, monitor_pid, monitor_ref, timeout, on_timeout) + end + end + + defp stream_close(monitor_pid, monitor_ref, timeout) do + send(monitor_pid, {:stop, monitor_ref}) + receive do + {:DOWN, ^monitor_ref, _, _, :normal} -> + stream_cleanup_inbox(monitor_pid, monitor_ref) + :ok + {:DOWN, ^monitor_ref, _, _, reason} -> + stream_cleanup_inbox(monitor_pid, monitor_ref) + exit({reason, {__MODULE__, :stream, [timeout]}}) + end + end + + defp stream_cleanup_inbox(monitor_pid, monitor_ref) do + receive do + {:EXIT, ^monitor_pid, _} -> stream_cleanup_inbox(monitor_ref) + after + 0 -> stream_cleanup_inbox(monitor_ref) + end + end + + defp stream_cleanup_inbox(monitor_ref) do + receive do + {{^monitor_ref, _}, _} -> + stream_cleanup_inbox(monitor_ref) + {:down, {^monitor_ref, _}, _} -> + stream_cleanup_inbox(monitor_ref) + after + 0 -> + :ok + end + end + + # This function spawns a task for the given "value", and puts the pid of this + # new task in the map of "waiting" tasks, which is returned. + defp stream_spawn(value, spawned, waiting, monitor_pid, monitor_ref, timeout) do + send(monitor_pid, {:spawn, spawned, value}) + + receive do + {:spawned, {^monitor_ref, ^spawned}, pid} -> + send(pid, {self(), {monitor_ref, spawned}}) + Map.put(waiting, spawned, {pid, :running}) + {:DOWN, ^monitor_ref, _, ^monitor_pid, reason} -> + stream_cleanup_inbox(monitor_pid, monitor_ref) + exit({reason, {__MODULE__, :stream, [timeout]}}) + end + end + + defp stream_monitor(parent_pid, mfa, spawn, trap_exit?, timeout) do + Process.flag(:trap_exit, trap_exit?) + + parent_ref = Process.monitor(parent_pid) + + # Let's wait for the parent process to tell this process the monitor ref + # it's using to monitor this process. If the parent process dies while this + # process waits, this process dies with the same reason. + receive do + {^parent_pid, monitor_ref} -> + stream_monitor_loop(parent_pid, parent_ref, mfa, spawn, monitor_ref, _running_tasks = %{}, timeout) + {:DOWN, ^parent_ref, _, _, reason} -> + exit(reason) + end + end + + defp stream_monitor_loop(parent_pid, parent_ref, mfa, spawn, monitor_ref, running_tasks, timeout) do + receive do + # The parent process is telling us to spawn a new task to process + # "value". We spawn it and notify the parent about its pid. + {:spawn, position, value} -> + {type, pid} = spawn.(parent_pid, normalize_mfa_with_arg(mfa, value)) + ref = Process.monitor(pid) + timer_ref = Process.send_after(self(), {:timeout, {monitor_ref, ref}}, timeout) + send(parent_pid, {:spawned, {monitor_ref, position}, pid}) + running_tasks = Map.put(running_tasks, ref, {position, type, pid, timer_ref}) + stream_monitor_loop(parent_pid, parent_ref, mfa, spawn, monitor_ref, running_tasks, timeout) + + # The parent process is telling us to stop because the stream is being + # closed. In this case, we forcely kill all spawned processes and then + # exit gracefully ourselves. + {:stop, ^monitor_ref} -> + Process.flag(:trap_exit, true) + for {ref, {_position, _type, pid, _timer_ref}} <- running_tasks do + Process.exit(pid, :kill) + receive do + {:DOWN, ^ref, _, _, _} -> :ok + end + end + exit(:normal) + + # The parent process went down with a given reason. We kill all the + # spawned processes (that are also linked) with the same reason, and then + # exit ourself with the same reason. + {:DOWN, ^parent_ref, _, _, reason} -> + for {_ref, {_position, :link, pid, _timer_ref}} <- running_tasks do + Process.exit(pid, reason) + end + exit(reason) + + # One of the spawned processes went down. We inform the parent process of + # this and keep going. + {:DOWN, ref, _, _, reason} -> + {{position, _type, _pid, timer_ref}, running_tasks} = Map.pop(running_tasks, ref) + :ok = Process.cancel_timer(timer_ref, async: true, info: false) + send(parent_pid, {:down, {monitor_ref, position}, reason}) + stream_monitor_loop(parent_pid, parent_ref, mfa, spawn, monitor_ref, running_tasks, timeout) + + # One of the spawned processes timed out. We kill that process here + # regardless of the value of :on_timeout. We then send a message to the + # parent process informing it that a task timed out, and the parent + # process decides what to do. + {:timeout, {^monitor_ref, ref}} -> + case running_tasks do + %{^ref => {position, _type, pid, _timer_ref}} -> + send(parent_pid, {:killed_for_timeout, {monitor_ref, position}}) + caller = self() + ref = make_ref() + enforcer = spawn(fn -> + mon = Process.monitor(caller) + receive do + {:done, ^ref} -> :ok + {:DOWN, ^mon, _, _, _} -> Process.exit(pid, :kill) + end + end) + Process.unlink(pid) + Process.exit(pid, :kill) + send(enforcer, {:done, ref}) + _other -> + :ok + end + stream_monitor_loop(parent_pid, parent_ref, mfa, spawn, monitor_ref, running_tasks, timeout) + + {:EXIT, _, _} -> + stream_monitor_loop(parent_pid, parent_ref, mfa, spawn, monitor_ref, running_tasks, timeout) + end + end + + defp normalize_mfa_with_arg({mod, fun, args}, arg), do: {mod, fun, [arg | args]} + defp normalize_mfa_with_arg(fun, arg), do: {:erlang, :apply, [fun, [arg]]} end diff --git a/lib/elixir/lib/task/supervisor.ex b/lib/elixir/lib/task/supervisor.ex index 8366ba7c3bc..86ec925ee9a 100644 --- a/lib/elixir/lib/task/supervisor.ex +++ b/lib/elixir/lib/task/supervisor.ex @@ -1,21 +1,26 @@ defmodule Task.Supervisor do @moduledoc """ - A tasks supervisor. + A task supervisor. This module defines a supervisor which can be used to dynamically supervise tasks. Behind the scenes, this module is implemented as a - `:simple_one_for_one` supervisor where the workers are temporary - (i.e. they are not restarted after they die). + `:simple_one_for_one` supervisor where the workers are temporary by + default (that is, they are not restarted after they die; read the docs + for `start_link/1` for more information on choosing the restart + strategy). - The functions in this module allow tasks to be spawned and awaited - from a supervisor, similar to the functions defined in the `Task` module. + See the `Task` module for more information. - ## Name Registration + ## Name registration A `Task.Supervisor` is bound to the same name registration rules as a - `GenServer`. Read more about it in the `GenServer` docs. + `GenServer`. Read more about them in the `GenServer` docs. """ + @typedoc "Option values used by `start_link`" + @type option :: Supervisor.option | {:restart, Supervisor.Spec.restart} | + {:shutdown, Supervisor.Spec.shutdown} + @doc """ Starts a new supervisor. @@ -25,14 +30,23 @@ defmodule Task.Supervisor do described under the `Name Registration` section in the `GenServer` module docs; + * `:restart` - the restart strategy, may be `:temporary` (the default), + `:transient` or `:permanent`. Check `Supervisor.Spec` for more info. + Defaults to `:temporary` so tasks aren't automatically restarted when + they complete nor in case of crashes; + * `:shutdown` - `:brutal_kill` if the tasks must be killed directly on shutdown or an integer indicating the timeout value, defaults to 5000 milliseconds; + + * `:max_restarts` and `:max_seconds` - as specified in `Supervisor.Spec.supervise/2`; + """ - @spec start_link(Supervisor.options) :: Supervisor.on_start + @spec start_link([option]) :: Supervisor.on_start def start_link(opts \\ []) do import Supervisor.Spec + {restart, opts} = Keyword.pop(opts, :restart, :temporary) {shutdown, opts} = Keyword.pop(opts, :shutdown, 5000) - children = [worker(Task.Supervised, [], restart: :temporary, shutdown: shutdown)] + children = [worker(Task.Supervised, [], restart: restart, shutdown: shutdown)] Supervisor.start_link(children, [strategy: :simple_one_for_one] ++ opts) end @@ -40,9 +54,10 @@ defmodule Task.Supervisor do Starts a task that can be awaited on. The `supervisor` must be a reference as defined in `Task.Supervisor`. - For more information on tasks, check the `Task` module. + The task will still be linked to the caller, see `Task.async/3` for + more information and `async_nolink/2` for a non-linked variant. """ - @spec async(Supervisor.supervisor, fun) :: Task.t + @spec async(Supervisor.supervisor, (() -> any)) :: Task.t def async(supervisor, fun) do async(supervisor, :erlang, :apply, [fun, []]) end @@ -51,15 +66,144 @@ defmodule Task.Supervisor do Starts a task that can be awaited on. The `supervisor` must be a reference as defined in `Task.Supervisor`. - For more information on tasks, check the `Task` module. + The task will still be linked to the caller, see `Task.async/3` for + more information and `async_nolink/2` for a non-linked variant. """ @spec async(Supervisor.supervisor, module, atom, [term]) :: Task.t def async(supervisor, module, fun, args) do - args = [self, get_info(self), {module, fun, args}] - {:ok, pid} = Supervisor.start_child(supervisor, args) - ref = Process.monitor(pid) - send pid, {self(), ref} - %Task{pid: pid, ref: ref} + do_async(supervisor, :link, module, fun, args) + end + + @doc """ + Starts a task that can be awaited on. + + The `supervisor` must be a reference as defined in `Task.Supervisor`. + The task won't be linked to the caller, see `Task.async/3` for + more information. + + ## Compatibility with OTP behaviours + + If you create a task using `async_nolink` inside an OTP behaviour + like `GenServer`, you should match on the message coming from the + task inside your `c:GenServer.handle_info/2` callback. + + The reply sent by the task will be in the format `{ref, result}`, + where `ref` is the monitor reference held by the task struct + and `result` is the return value of the task function. + + Keep in mind that, regardless of how the task created with `async_nolink` + terminates, the caller's process will always receive a `:DOWN` message + with the same `ref` value that is held by the task struct. If the task + terminates normally, the reason in the `:DOWN` message will be `:normal`. + """ + @spec async_nolink(Supervisor.supervisor, (() -> any)) :: Task.t + def async_nolink(supervisor, fun) do + async_nolink(supervisor, :erlang, :apply, [fun, []]) + end + + @doc """ + Starts a task that can be awaited on. + + The `supervisor` must be a reference as defined in `Task.Supervisor`. + The task won't be linked to the caller, see `Task.async/3` for + more information. + """ + @spec async_nolink(Supervisor.supervisor, module, atom, [term]) :: Task.t + def async_nolink(supervisor, module, fun, args) do + do_async(supervisor, :nolink, module, fun, args) + end + + @doc """ + Returns a stream that runs the given `module`, `function`, and `args` + concurrently on each item in `enumerable`. + + Each item will be prepended to the given `args` and processed by its + own task. The tasks will be spawned under the given `supervisor` and + linked to the current process, similarly to `async/4`. + + When streamed, each task will emit `{:ok, val}` upon successful + completion or `{:exit, val}` if the caller is trapping exits. Results + are emitted in the same order as the original `enumerable`. + + The level of concurrency can be controlled via the `:max_concurrency` + option and defaults to `System.schedulers_online/0`. A timeout + can also be given as an option representing the maximum amount of + time to wait without a task reply. + + Finally, if you find yourself trapping exits to handle exits inside + the async stream, consider using `async_stream_nolink/6` to start tasks + that are not linked to the current process. + + ## Options + + * `:max_concurrency` - sets the maximum number of tasks to run + at the same time. Defaults to `System.schedulers_online/0`. + * `:timeout` - the maximum amount of time to wait (in milliseconds) + without receiving a task reply (across all running tasks). + Defaults to `5000`. + + ## Examples + + Let's build a stream and then enumerate it: + + stream = Task.Supervisor.async_stream(MySupervisor, collection, Mod, :expensive_fun, []) + Enum.to_list(stream) + + """ + @spec async_stream(Supervisor.supervisor, Enumerable.t, module, atom, [term], Keyword.t) :: + Enumerable.t + def async_stream(supervisor, enumerable, module, function, args, options \\ []) + when is_atom(module) and is_atom(function) and is_list(args) do + build_stream(supervisor, :link, enumerable, {module, function, args}, options) + end + + @doc """ + Returns a stream that runs the given function `fun` concurrently + on each item in `enumerable`. + + Each item in `enumerable` is passed as argument to the given function `fun` + and processed by its own task. The tasks will be spawned under the given + `supervisor` and linked to the current process, similarly to `async/2`. + + See `async_stream/6` for discussion, options, and examples. + """ + @spec async_stream(Supervisor.supervisor, Enumerable.t, (term -> term), Keyword.t) :: + Enumerable.t + def async_stream(supervisor, enumerable, fun, options \\ []) when is_function(fun, 1) do + build_stream(supervisor, :link, enumerable, fun, options) + end + + @doc """ + Returns a stream that runs the given `module`, `function`, and `args` + concurrently on each item in `enumerable`. + + Each item in `enumerable` will be prepended to the given `args` and processed + by its own task. The tasks will be spawned under the given `supervisor` and + will not be linked to the current process, similarly to `async_nolink/4`. + + See `async_stream/6` for discussion, options, and examples. + """ + @spec async_stream_nolink(Supervisor.supervisor, Enumerable.t, module, atom, [term], Keyword.t) :: + Enumerable.t + def async_stream_nolink(supervisor, enumerable, module, function, args, options \\ []) + when is_atom(module) and is_atom(function) and is_list(args) do + build_stream(supervisor, :nolink, enumerable, {module, function, args}, options) + end + + @doc """ + Returns a stream that runs the given `function` concurrently on each + item in `enumerable`. + + Each item in `enumerable` is passed as argument to the given function `fun` + and processed by its own task. The tasks will be spawned under the given + `supervisor` and linked to the current process, similarly to `async_nolink/2`. + + See `async_stream/6` for discussion and examples. + """ + @spec async_stream_nolink(Supervisor.supervisor, Enumerable.t, (term -> term), Keyword.t) :: + Enumerable.t + def async_stream_nolink(supervisor, enumerable, fun, options \\ []) when is_function(fun, 1) do + build_stream(supervisor, :nolink, enumerable, fun, options) end @doc """ @@ -67,46 +211,65 @@ defmodule Task.Supervisor do """ @spec terminate_child(Supervisor.supervisor, pid) :: :ok def terminate_child(supervisor, pid) when is_pid(pid) do - :supervisor.terminate_child(supervisor, pid) + Supervisor.terminate_child(supervisor, pid) end @doc """ - Returns all children pids. + Returns all children PIDs. """ @spec children(Supervisor.supervisor) :: [pid] def children(supervisor) do - :supervisor.which_children(supervisor) |> Enum.map(&elem(&1, 1)) + for {_, pid, _, _} <- Supervisor.which_children(supervisor), is_pid(pid), do: pid end @doc """ - Starts a task as child of the given `supervisor`. + Starts a task as a child of the given `supervisor`. Note that the spawned process is not linked to the caller, but only to the supervisor. This command is useful in case the task needs to perform side-effects (like I/O) and does not need to report back to the caller. """ - @spec start_child(Supervisor.supervisor, fun) :: {:ok, pid} + @spec start_child(Supervisor.supervisor, (() -> any)) :: {:ok, pid} def start_child(supervisor, fun) do start_child(supervisor, :erlang, :apply, [fun, []]) end @doc """ - Starts a task as child of the given `supervisor`. + Starts a task as a child of the given `supervisor`. Similar to `start_child/2` except the task is specified by the given `module`, `fun` and `args`. """ @spec start_child(Supervisor.supervisor, module, atom, [term]) :: {:ok, pid} - def start_child(supervisor, module, fun, args) do - Supervisor.start_child(supervisor, [get_info(self), {module, fun, args}]) + def start_child(supervisor, module, fun, args) when is_atom(fun) and is_list(args) do + Supervisor.start_child(supervisor, [get_info(self()), {module, fun, args}]) end defp get_info(self) do {node(), case Process.info(self, :registered_name) do - {:registered_name, []} -> self() + {:registered_name, []} -> self {:registered_name, name} -> name end} end + + defp do_async(supervisor, link_type, module, fun, args) do + owner = self() + args = [owner, :monitor, get_info(owner), {module, fun, args}] + {:ok, pid} = Supervisor.start_child(supervisor, args) + if link_type == :link, do: Process.link(pid) + ref = Process.monitor(pid) + send pid, {owner, ref} + %Task{pid: pid, ref: ref, owner: owner} + end + + defp build_stream(supervisor, link_type, enumerable, fun, options) do + &Task.Supervised.stream(enumerable, &1, &2, fun, options, fn owner, mfa -> + args = [owner, :monitor, get_info(owner), mfa] + {:ok, pid} = Supervisor.start_child(supervisor, args) + if link_type == :link, do: Process.link(pid) + {link_type, pid} + end) + end end diff --git a/lib/elixir/lib/tuple.ex b/lib/elixir/lib/tuple.ex index b20256aa547..1a4665e384c 100644 --- a/lib/elixir/lib/tuple.ex +++ b/lib/elixir/lib/tuple.ex @@ -1,12 +1,56 @@ defmodule Tuple do @moduledoc """ Functions for working with tuples. + + Tuples are ordered collections of elements; tuples can contain elements of any + type, and a tuple can contain elements of different types. Curly braces can be + used to create tuples: + + iex> {} + {} + iex> {1, :two, "three"} + {1, :two, "three"} + + Tuples store elements contiguously in memory; this means that accessing a + tuple element by index (which can be done through the `Kernel.elem/2` + function) is a constant-time operation: + + iex> tuple = {1, :two, "three"} + iex> elem(tuple, 0) + 1 + iex> elem(tuple, 2) + "three" + + Same goes for getting the tuple size (via `Kernel.tuple_size/1`): + + iex> tuple_size({}) + 0 + iex> tuple_size({1, 2, 3}) + 3 + + Tuples being stored contiguously in memory also means that updating a tuple + (for example replacing an element with `Kernel.put_elem/3`) will make a copy + of the whole tuple. + + Tuples are not meant to be used as a "collection" type (which is also + suggested by the absence of an implementation of the `Enumerable` protocol for + tuples): they're mostly meant to be used as a fixed-size container for + multiple elements. For example, tuples are often used to have functions return + "enriched" values: a common pattern is for functions to return `{:ok, value}` + for successful cases and `{:error, reason}` for unsuccessful cases. For + example, this is exactly what `File.read/1` does: it returns `{:ok, contents}` + if reading the given file is successful, or `{:error, reason}` otherwise + (e.g., `{:error, :enoent}` if the file doesn't exist). + + This module provides functions to work with tuples; some more functions to + work with tuples can be found in `Kernel` (`Kernel.tuple_size/1`, + `Kernel.elem/2`, `Kernel.put_elem/3`, and others). """ @doc """ Creates a new tuple. - Creates a tuple of size `size` containing the + Creates a tuple of `size` containing the given `data` at every position. Inlined by the compiler. @@ -25,9 +69,9 @@ defmodule Tuple do @doc """ Inserts an element into a tuple. - Inserts `value` into `tuple` at the given zero-based `index`. - Raises an `ArgumentError` if `index` is greater than the - length of `tuple`. + Inserts `value` into `tuple` at the given `index`. + Raises an `ArgumentError` if `index` is negative or greater than the + length of `tuple`. Index is zero-based. Inlined by the compiler. @@ -36,19 +80,40 @@ defmodule Tuple do iex> tuple = {:bar, :baz} iex> Tuple.insert_at(tuple, 0, :foo) {:foo, :bar, :baz} + iex> Tuple.insert_at(tuple, 2, :bong) + {:bar, :baz, :bong} """ @spec insert_at(tuple, non_neg_integer, term) :: tuple - def insert_at(tuple, index, term) do - :erlang.insert_element(index + 1, tuple, term) + def insert_at(tuple, index, value) do + :erlang.insert_element(index + 1, tuple, value) + end + + @doc """ + Inserts an element at the end of a tuple. + + Returns a new tuple with the element appended at the end, and contains + the elements in `tuple` followed by `value` as the last element. + + Inlined by the compiler. + + ## Examples + iex> tuple = {:foo, :bar} + iex> Tuple.append(tuple, :baz) + {:foo, :bar, :baz} + + """ + @spec append(tuple, term) :: tuple + def append(tuple, value) do + :erlang.append_element(tuple, value) end @doc """ Removes an element from a tuple. - Deletes the element at the zero-based `index` from `tuple`. - Raises an `ArgumentError` if `index` is greater than - or equal to the length of `tuple`. + Deletes the element at the given `index` from `tuple`. + Raises an `ArgumentError` if `index` is negative or greater than + or equal to the length of `tuple`. Index is zero-based. Inlined by the compiler. @@ -67,7 +132,16 @@ defmodule Tuple do @doc """ Converts a tuple to a list. + Returns a new list with all the tuple elements. + Inlined by the compiler. + + ## Examples + + iex> tuple = {:foo, :bar, :baz} + iex> Tuple.to_list(tuple) + [:foo, :bar, :baz] + """ @spec to_list(tuple) :: list def to_list(tuple) do diff --git a/lib/elixir/lib/uri.ex b/lib/elixir/lib/uri.ex index 9b918874d63..0267cde89c3 100644 --- a/lib/elixir/lib/uri.ex +++ b/lib/elixir/lib/uri.ex @@ -1,39 +1,35 @@ defmodule URI do @moduledoc """ - Utilities for working with and creating URIs. + Utilities for working with URIs. + + This module provides functions for working with URIs (for example, parsing + URIs or encoding query strings). The functions in this module are implemented + according to [RFC 3986](https://tools.ietf.org/html/rfc3986). """ defstruct scheme: nil, path: nil, query: nil, fragment: nil, authority: nil, userinfo: nil, host: nil, port: nil - import Bitwise - - @ports %{ - "ftp" => 21, - "http" => 80, - "https" => 443, - "ldap" => 389, - "sftp" => 22, - "tftp" => 69, + @type t :: %__MODULE__{ + scheme: nil | binary, + path: nil | binary, + query: nil | binary, + fragment: nil | binary, + authority: nil | binary, + userinfo: nil | binary, + host: nil | binary, + port: nil | :inet.port_number, } - Enum.each @ports, fn {scheme, port} -> - def normalize_scheme(unquote(scheme)), do: unquote(scheme) - def default_port(unquote(scheme)), do: unquote(port) - end - - @doc """ - Normalizes the scheme according to the spec by downcasing it. - """ - def normalize_scheme(nil), do: nil - def normalize_scheme(scheme), do: String.downcase(scheme) + import Bitwise @doc """ Returns the default port for a given scheme. - If the scheme is unknown to URI, returns `nil`. - Any scheme may be registered via `default_port/2`. + If the scheme is unknown to the `URI` module, this function returns + `nil`. The default port for any scheme can be configured globally + via `default_port/2`. ## Examples @@ -44,29 +40,35 @@ defmodule URI do nil """ + @spec default_port(binary) :: nil | non_neg_integer def default_port(scheme) when is_binary(scheme) do - {:ok, dict} = Application.fetch_env(:elixir, :uri) - Map.get(dict, scheme) + :elixir_config.get({:uri, scheme}) end @doc """ - Registers a scheme with a default port. + Registers the default `port` for the given `scheme`. + + After this function is called, `port` will be returned by + `default_port/1` for the given scheme `scheme`. Note that this function + changes the default port for the given `scheme` *globally*, meaning for + every application. It is recommended for this function to be invoked in your - application start callback in case you want to register + application's start callback in case you want to register new URIs. """ - def default_port(scheme, port) when is_binary(scheme) and port > 0 do - {:ok, dict} = Application.fetch_env(:elixir, :uri) - Application.put_env(:elixir, :uri, Map.put(dict, scheme, port), persistent: true) + @spec default_port(binary, non_neg_integer) :: :ok + def default_port(scheme, port) when is_binary(scheme) and is_integer(port) and port >= 0 do + :elixir_config.put({:uri, scheme}, port) end @doc """ Encodes an enumerable into a query string. - Takes an enumerable (containing a sequence of two-item tuples) - and returns a string of the form "key1=value1&key2=value2..." where - keys and values are URL encoded as per `encode/1`. + Takes an enumerable that enumerates as a list of two-element + tuples (e.g., a map or a keyword list) and returns a string + in the form of `key1=value1&key2=value2...` where keys and + values are URL encoded as per `encode_www_form/1`. Keys and values can be any term that implements the `String.Chars` protocol, except lists which are explicitly forbidden. @@ -77,15 +79,39 @@ defmodule URI do iex> URI.encode_query(hd) "bar=2&foo=1" + iex> query = %{"key" => "value with spaces"} + iex> URI.encode_query(query) + "key=value+with+spaces" + + iex> URI.encode_query %{key: [:a, :list]} + ** (ArgumentError) encode_query/1 values cannot be lists, got: [:a, :list] + """ - def encode_query(l), do: Enum.map_join(l, "&", &pair/1) + @spec encode_query(term) :: binary + def encode_query(enumerable) do + Enum.map_join(enumerable, "&", &encode_kv_pair/1) + end + + defp encode_kv_pair({key, _}) when is_list(key) do + raise ArgumentError, "encode_query/1 keys cannot be lists, got: #{inspect key}" + end + + defp encode_kv_pair({_, value}) when is_list(value) do + raise ArgumentError, "encode_query/1 values cannot be lists, got: #{inspect value}" + end + + defp encode_kv_pair({key, value}) do + encode_www_form(Kernel.to_string(key)) <> + "=" <> encode_www_form(Kernel.to_string(value)) + end @doc """ - Decodes a query string into a dictionary (by default uses a map). + Decodes a query string into a map. - Given a query string of the form "key1=value1&key2=value2...", produces a - map with one entry for each key-value pair. Each key and value will be a - binary. Keys and values will be percent-unescaped. + Given a query string of the form of `key1=value1&key2=value2...`, this + function inserts each key-value pair in the query string as one entry in the + given `map`. Keys and values in the resulting map will be binaries. Keys and + values will be percent-unescaped. Use `query_decoder/1` if you want to iterate over each value manually. @@ -94,79 +120,121 @@ defmodule URI do iex> URI.decode_query("foo=1&bar=2") %{"bar" => "2", "foo" => "1"} + iex> URI.decode_query("percent=oh+yes%21", %{"starting" => "map"}) + %{"percent" => "oh yes!", "starting" => "map"} + """ - def decode_query(q, dict \\ %{}) when is_binary(q) do - Enum.reduce query_decoder(q), dict, fn({k, v}, acc) -> Dict.put(acc, k, v) end + @spec decode_query(binary, map) :: map + def decode_query(query, map \\ %{}) + + # TODO: Remove on 2.0 + def decode_query(query, %{__struct__: _} = dict) when is_binary(query) do + IO.warn "URI.decode_query/2 is deprecated, please use URI.decode_query/1" + decode_query_into_dict(query, dict) + end + + def decode_query(query, map) when is_binary(query) and is_map(map) do + decode_query_into_map(query, map) + end + + # TODO: Remove on 2.0 + def decode_query(query, dict) when is_binary(query) do + IO.warn "URI.decode_query/2 is deprecated, please use URI.decode_query/1" + decode_query_into_dict(query, dict) + end + + defp decode_query_into_map(query, map) do + case decode_next_query_pair(query) do + nil -> + map + {{key, value}, rest} -> + decode_query_into_map(rest, Map.put(map, key, value)) + end + end + + defp decode_query_into_dict(query, dict) do + case decode_next_query_pair(query) do + nil -> + dict + {{key, value}, rest} -> + # Avoid warnings about Dict being deprecated + dict_module = Dict + decode_query_into_dict(rest, dict_module.put(dict, key, value)) + end end @doc """ - Returns an iterator function over the query string that decodes - the query string in steps. + Returns a stream of two-element tuples representing key-value pairs in the + given `query`. + + Key and value in each tuple will be binaries and will be percent-unescaped. ## Examples - iex> URI.query_decoder("foo=1&bar=2") |> Enum.map &(&1) + iex> URI.query_decoder("foo=1&bar=2") |> Enum.to_list() [{"foo", "1"}, {"bar", "2"}] """ - def query_decoder(q) when is_binary(q) do - Stream.unfold(q, &do_decoder/1) + @spec query_decoder(binary) :: Enumerable.t + def query_decoder(query) when is_binary(query) do + Stream.unfold(query, &decode_next_query_pair/1) end - defp do_decoder("") do + defp decode_next_query_pair("") do nil end - defp do_decoder(q) do - {first, next} = - case :binary.split(q, "&") do - [first, rest] -> {first, rest} - [first] -> {first, ""} + defp decode_next_query_pair(query) do + {undecoded_next_pair, rest} = + case :binary.split(query, "&") do + [next_pair, rest] -> {next_pair, rest} + [next_pair] -> {next_pair, ""} end - current = - case :binary.split(first, "=") do - [key, value] -> - {decode_www_form(key), decode_www_form(value)} - [key] -> - {decode_www_form(key), nil} + next_pair = + case :binary.split(undecoded_next_pair, "=") do + [key, value] -> {decode_www_form(key), decode_www_form(value)} + [key] -> {decode_www_form(key), nil} end - {current, next} + {next_pair, rest} end - defp pair({k, _}) when is_list(k) do - raise ArgumentError, "encode_query/1 keys cannot be lists, got: #{inspect k}" - end + @doc """ + Checks if the character is a "reserved" character in a URI. - defp pair({_, v}) when is_list(v) do - raise ArgumentError, "encode_query/1 values cannot be lists, got: #{inspect v}" - end + Reserved characters are specified in + [RFC 3986, section 2.2](https://tools.ietf.org/html/rfc3986#section-2.2). - defp pair({k, v}) do - encode_www_form(to_string(k)) <> - "=" <> encode_www_form(to_string(v)) - end + ## Examples - @doc """ - Checks if the character is a "reserved" character in a URI. + iex> URI.char_reserved?(?+) + true - Reserved characters are specified in RFC3986, section 2.2. """ - def char_reserved?(c) do - c in ':/?#[]@!$&\'()*+,;=' + @spec char_reserved?(char) :: boolean + def char_reserved?(char) when char in 0..0x10FFFF do + char in ':/?#[]@!$&\'()*+,;=' end @doc """ Checks if the character is a "unreserved" character in a URI. - Unreserved characters are specified in RFC3986, section 2.3. + Unreserved characters are specified in + [RFC 3986, section 2.3](https://tools.ietf.org/html/rfc3986#section-2.3). + + ## Examples + + iex> URI.char_unreserved?(?_) + true + """ - def char_unreserved?(c) do - c in ?0..?9 or - c in ?a..?z or - c in ?A..?Z or - c in '~_-.' + @spec char_unreserved?(char) :: boolean + def char_unreserved?(char) when char in 0..0x10FFFF do + char in ?0..?9 or + char in ?a..?z or + char in ?A..?Z or + char in '~_-.' end @doc """ @@ -174,27 +242,50 @@ defmodule URI do This is the default used by `URI.encode/2` where both reserved and unreserved characters are kept unescaped. + + ## Examples + + iex> URI.char_unescaped?(?{) + false + """ - def char_unescaped?(c) do - char_reserved?(c) or char_unreserved?(c) + @spec char_unescaped?(char) :: boolean + def char_unescaped?(char) when char in 0..0x10FFFF do + char_reserved?(char) or char_unreserved?(char) end @doc """ - Percent-escape a URI. - Accepts `predicate` function as an argument to specify if char can be left as is. + Percent-escapes all characters that require escaped in a string. - ## Example + This means reserved characters, such as `:` and `/`, and the so- + called unreserved characters, which have the same meaning both + escaped and unescaped, won't be escaped by default. + + See `encode_www_form` if you are interested in escaping reserved + characters too. + + This function also accepts a `predicate` function as an optional + argument. If passed, this function will be called with each byte + in `string` as its argument and should return `true` if the given + byte should be left as is. + + ## Examples iex> URI.encode("ftp://s-ite.tld/?value=put it+й") "ftp://s-ite.tld/?value=put%20it+%D0%B9" + iex> URI.encode("a string", &(&1 != ?i)) + "a str%69ng" + """ - def encode(str, predicate \\ &char_unescaped?/1) when is_binary(str) do - for <>, into: "", do: percent(c, predicate) + @spec encode(binary, (byte -> boolean)) :: binary + def encode(string, predicate \\ &char_unescaped?/1) + when is_binary(string) and is_function(predicate, 1) do + for <>, into: "", do: percent(char, predicate) end @doc """ - Encode a string as "x-www-urlencoded". + Encodes a string as "x-www-form-urlencoded". ## Example @@ -202,28 +293,29 @@ defmodule URI do "put%3A+it%2B%D0%B9" """ - def encode_www_form(str) when is_binary(str) do - for <>, into: "" do - case percent(c, &char_unreserved?/1) do + @spec encode_www_form(binary) :: binary + def encode_www_form(string) when is_binary(string) do + for <>, into: "" do + case percent(char, &char_unreserved?/1) do "%20" -> "+" - pct -> pct + percent -> percent end end end - defp percent(c, predicate) do - if predicate.(c) do - <> + defp percent(char, predicate) do + if predicate.(char) do + <> else - "%" <> hex(bsr(c, 4)) <> hex(band(c, 15)) + <<"%", hex(bsr(char, 4)), hex(band(char, 15))>> end end - defp hex(n) when n <= 9, do: <> - defp hex(n), do: <> + defp hex(n) when n <= 9, do: n + ?0 + defp hex(n), do: n + ?A - 10 @doc """ - Percent-unescape a URI. + Percent-unescapes a URI. ## Examples @@ -231,15 +323,16 @@ defmodule URI do "http://elixir-lang.org/" """ + @spec decode(binary) :: binary def decode(uri) do - unpercent(uri) + unpercent(uri, "", false) catch :malformed_uri -> raise ArgumentError, "malformed URI #{inspect uri}" end @doc """ - Decode a string as "x-www-urlencoded". + Decodes a string as "x-www-form-urlencoded". ## Examples @@ -247,24 +340,27 @@ defmodule URI do " Enum.map_join(" ", &unpercent/1) + @spec decode_www_form(binary) :: binary + def decode_www_form(string) do + unpercent(string, "", true) catch :malformed_uri -> - raise ArgumentError, "malformed URI #{inspect str}" + raise ArgumentError, "malformed URI #{inspect string}" end - defp unpercent(<>) do - <> <> unpercent(tail) + defp unpercent(<>, acc, spaces = true) do + unpercent(tail, <>, spaces) end - defp unpercent(<>), do: throw(:malformed_uri) - defp unpercent(<>), do: throw(:malformed_uri) - defp unpercent(<>) do - <> <> unpercent(tail) + defp unpercent(<>, acc, spaces) do + unpercent(tail, <>, spaces) end + defp unpercent(<>, _acc, _spaces), do: throw(:malformed_uri) - defp unpercent(<<>>), do: <<>> + defp unpercent(<>, acc, spaces) do + unpercent(tail, <>, spaces) + end + defp unpercent(<<>>, acc, _spaces), do: acc defp hex_to_dec(n) when n in ?A..?F, do: n - ?A + 10 defp hex_to_dec(n) when n in ?a..?f, do: n - ?a + 10 @@ -272,12 +368,20 @@ defmodule URI do defp hex_to_dec(_n), do: throw(:malformed_uri) @doc """ - Parses a URI into components. + Parses a well-formed URI reference into its components. + + Note this function expects a well-formed URI and does not perform + any validation. See the "Examples" section below for examples of how + `URI.parse/1` can be used to parse a wide range of URIs. - URIs have portions that are handled specially for the particular - scheme of the URI. For example, http and https have different - default ports. Such values can be accessed and registered via - `URI.default_port/1` and `URI.default_port/2`. + This function uses the parsing regular expression as defined + in [RFC 3986, Appendix B](https://tools.ietf.org/html/rfc3986#appendix-B). + + When a URI is given without a port, the value returned by + `URI.default_port/1` for the URI's scheme is used for the `:port` field. + + If a `%URI{}` struct is given to this function, this function returns it + unmodified. ## Examples @@ -286,30 +390,34 @@ defmodule URI do authority: "elixir-lang.org", userinfo: nil, host: "elixir-lang.org", port: 80} + iex> URI.parse("//elixir-lang.org/") + %URI{authority: "elixir-lang.org", fragment: nil, host: "elixir-lang.org", + path: "/", port: nil, query: nil, scheme: nil, userinfo: nil} + + iex> URI.parse("/foo/bar") + %URI{authority: nil, fragment: nil, host: nil, path: "/foo/bar", + port: nil, query: nil, scheme: nil, userinfo: nil} + + iex> URI.parse("foo/bar") + %URI{authority: nil, fragment: nil, host: nil, path: "foo/bar", + port: nil, query: nil, scheme: nil, userinfo: nil} + """ + @spec parse(t | binary) :: t + def parse(uri) + def parse(%URI{} = uri), do: uri - def parse(s) when is_binary(s) do - # From http://tools.ietf.org/html/rfc3986#appendix-B - regex = ~r/^(([^:\/?#]+):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?/ - parts = nillify(Regex.run(regex, s)) + def parse(string) when is_binary(string) do + # From https://tools.ietf.org/html/rfc3986#appendix-B + regex = Regex.recompile!(~r/^(([a-z][a-z0-9\+\-\.]*):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?/i) + parts = nillify(Regex.run(regex, string)) destructure [_, _, scheme, _, authority, path, _, query, _, fragment], parts {userinfo, host, port} = split_authority(authority) - if authority do - authority = "" - - if userinfo, do: authority = authority <> userinfo <> "@" - if host, do: authority = authority <> host - if port, do: authority = authority <> ":" <> Integer.to_string(port) - end - - scheme = normalize_scheme(scheme) - - if nil?(port) and not nil?(scheme) do - port = default_port(scheme) - end + scheme = scheme && String.downcase(scheme) + port = port || (scheme && default_port(scheme)) %URI{ scheme: scheme, path: path, query: query, @@ -319,44 +427,145 @@ defmodule URI do end # Split an authority into its userinfo, host and port parts. - defp split_authority(s) do - s = s || "" - components = Regex.run ~r/(^(.*)@)?(\[[a-zA-Z0-9:.]*\]|[^:]*)(:(\d*))?/, s + defp split_authority(string) do + regex = Regex.recompile!(~r/(^(.*)@)?(\[[a-zA-Z0-9:.]*\]|[^:]*)(:(\d*))?/) + components = Regex.run(regex, string || "") destructure [_, _, userinfo, host, _, port], nillify(components) + host = if host, do: host |> String.trim_leading("[") |> String.trim_trailing("]") port = if port, do: String.to_integer(port) - host = if host, do: host |> String.lstrip(?[) |> String.rstrip(?]) {userinfo, host, port} end # Regex.run returns empty strings sometimes. We want # to replace those with nil for consistency. - defp nillify(l) do - for s <- l do - if byte_size(s) > 0, do: s, else: nil + defp nillify(list) do + for string <- list do + if byte_size(string) > 0, do: string end end + + @doc """ + Returns the string representation of the given `URI` struct. + + iex> URI.to_string(URI.parse("http://google.com/")) + "http://google.com/" + + iex> URI.to_string(%URI{scheme: "foo", host: "bar.baz"}) + "foo://bar.baz" + + """ + @spec to_string(t) :: binary + defdelegate to_string(uri), to: String.Chars.URI + + @doc ~S""" + Merges two URIs. + + This function merges two URIs as per + [RFC 3986, section 5.2](https://tools.ietf.org/html/rfc3986#section-5.2). + + ## Examples + + iex> URI.merge(URI.parse("http://google.com/"), "/query") |> to_string + "http://google.com/query" + + iex> URI.merge("http://example.com/", "http://google.com/") |> to_string + "http://google.com/" + + """ + @spec merge(t | binary, t | binary) :: t + def merge(uri, rel) + + def merge(%URI{authority: nil}, _rel) do + raise ArgumentError, "you must merge onto an absolute URI" + end + def merge(_base, %URI{scheme: rel_scheme} = rel) when rel_scheme != nil do + rel + end + def merge(base, %URI{authority: authority} = rel) when authority != nil do + %{rel | scheme: base.scheme} + end + def merge(%URI{} = base, %URI{path: rel_path} = rel) when rel_path in ["", nil] do + %{base | query: rel.query || base.query, fragment: rel.fragment} + end + def merge(%URI{} = base, %URI{} = rel) do + new_path = merge_paths(base.path, rel.path) + %{base | path: new_path, query: rel.query, fragment: rel.fragment} + end + def merge(base, rel) do + merge(parse(base), parse(rel)) + end + + defp merge_paths(nil, rel_path), + do: merge_paths("/", rel_path) + defp merge_paths(_, "/" <> _ = rel_path), + do: rel_path + defp merge_paths(base_path, rel_path) do + [_ | base_segments] = path_to_segments(base_path) + path_to_segments(rel_path) + |> Kernel.++(base_segments) + |> remove_dot_segments([]) + |> Enum.join("/") + end + + defp remove_dot_segments([], [head, ".." | acc]), + do: remove_dot_segments([], [head | acc]) + defp remove_dot_segments([], acc), + do: acc + defp remove_dot_segments(["." | tail], acc), + do: remove_dot_segments(tail, acc) + defp remove_dot_segments([head | tail], ["..", ".." | _] = acc), + do: remove_dot_segments(tail, [head | acc]) + defp remove_dot_segments(segments, [_, ".." | acc]), + do: remove_dot_segments(segments, acc) + defp remove_dot_segments([head | tail], acc), + do: remove_dot_segments(tail, [head | acc]) + + defp path_to_segments(path) do + [head | tail] = String.split(path, "/") + reverse_and_discard_empty(tail, [head]) + end + + defp reverse_and_discard_empty([], acc), + do: acc + defp reverse_and_discard_empty([head], acc), + do: [head | acc] + defp reverse_and_discard_empty(["" | tail], acc), + do: reverse_and_discard_empty(tail, acc) + defp reverse_and_discard_empty([head | tail], acc), + do: reverse_and_discard_empty(tail, [head | acc]) end defimpl String.Chars, for: URI do - def to_string(uri) do - scheme = uri.scheme - - if scheme && (port = URI.default_port(scheme)) do - if uri.port == port, do: uri = %{uri | port: nil} - end + def to_string(%{scheme: scheme, port: port, path: path, + query: query, fragment: fragment} = uri) do + uri = + case scheme && URI.default_port(scheme) do + ^port -> %{uri | port: nil} + _ -> uri + end - result = "" + # Based on https://tools.ietf.org/html/rfc3986#section-5.3 + authority = extract_authority(uri) - if uri.scheme, do: result = result <> uri.scheme <> "://" - if uri.userinfo, do: result = result <> uri.userinfo <> "@" - if uri.host, do: result = result <> uri.host - if uri.port, do: result = result <> ":" <> Integer.to_string(uri.port) - if uri.path, do: result = result <> uri.path - if uri.query, do: result = result <> "?" <> uri.query - if uri.fragment, do: result = result <> "#" <> uri.fragment + if(scheme, do: scheme <> ":", else: "") <> + if(authority, do: "//" <> authority, else: "") <> + if(path, do: path, else: "") <> + if(query, do: "?" <> query, else: "") <> + if(fragment, do: "#" <> fragment, else: "") + end - result + defp extract_authority(%{host: nil, authority: authority}) do + authority + end + defp extract_authority(%{host: host, userinfo: userinfo, port: port}) do + # According to the grammar at + # https://tools.ietf.org/html/rfc3986#appendix-A, a "host" can have a colon + # in it only if it's an IPv6 or "IPvFuture" address, so if there's a colon + # in the host we can safely surround it with []. + if(userinfo, do: userinfo <> "@", else: "") <> + if(String.contains?(host, ":"), do: "[" <> host <> "]", else: host) <> + if(port, do: ":" <> Integer.to_string(port), else: "") end end diff --git a/lib/elixir/lib/version.ex b/lib/elixir/lib/version.ex index 19289de4c8f..704d50f4bec 100644 --- a/lib/elixir/lib/version.ex +++ b/lib/elixir/lib/version.ex @@ -10,7 +10,7 @@ defmodule Version do ## Versions - In a nutshell, a version is given by three numbers: + In a nutshell, a version is represented by three numbers: MAJOR.MINOR.PATCH @@ -24,15 +24,15 @@ defmodule Version do ## Struct - The version is represented by the Version struct and it has its - fields named according to Semver: `:major`, `:minor`, `:patch`, - `:pre` and `:build`. + The version is represented by the `Version` struct and fields + are named according to SemVer: `:major`, `:minor`, `:patch`, + `:pre`, and `:build`. ## Requirements Requirements allow you to specify which versions of a given - dependency you are willing to work against. It supports common - operators like `>=`, `<=`, `>`, `==` and friends that + dependency you are willing to work against. Requirements support common + operators like `>=`, `<=`, `>`, `==`, and friends that work as one would expect: # Only version 2.0.0 @@ -51,6 +51,37 @@ defmodule Version do "~> 2.0.0" + `~>` will never include pre-release versions of its upper bound. + It can also be used to set an upper bound on only the major + version part. See the table below for `~>` requirements and + their corresponding translation. + + `~>` | Translation + :------------- | :--------------------- + `~> 2.0.0` | `>= 2.0.0 and < 2.1.0` + `~> 2.1.2` | `>= 2.1.2 and < 2.2.0` + `~> 2.1.3-dev` | `>= 2.1.3-dev and < 2.2.0` + `~> 2.0` | `>= 2.0.0 and < 3.0.0` + `~> 2.1` | `>= 2.1.0 and < 3.0.0` + + When `allow_pre: false` is set, the requirement will not match a + pre-release version unless the operand is a pre-release version. + The default is to always allow pre-releases but note that in + Hex `:allow_pre` is set to `false`. See the table below for examples. + + Requirement | Version | `:allow_pre` | Matches + :------------- | :---------- | :----------- | :------ + `~> 2.0` | `2.1.0` | - | `true` + `~> 2.0` | `3.0.0` | - | `false` + `~> 2.0.0` | `2.0.1` | - | `true` + `~> 2.0.0` | `2.1.0` | - | `false` + `~> 2.1.2` | `2.1.3-dev` | `true` | `true` + `~> 2.1.2` | `2.1.3-dev` | `false` | `false` + `~> 2.1-dev` | `2.2.0-dev` | `false` | `true` + `~> 2.1.2-dev` | `2.1.3-dev` | `false` | `true` + `>= 2.1.0` | `2.2.0-dev` | `false` | `false` + `>= 2.1.0-dev` | `2.2.3-dev` | `true` | `true` + """ import Kernel, except: [match?: 2] @@ -58,89 +89,146 @@ defmodule Version do @type version :: String.t | t @type requirement :: String.t | Version.Requirement.t - @type matchable :: {major :: String.t | non_neg_integer, - minor :: non_neg_integer | nil, - patch :: non_neg_integer | nil, - pre :: [String.t]} + @type major :: String.t | non_neg_integer + @type minor :: non_neg_integer | nil + @type patch :: non_neg_integer | nil + @type pre :: [String.t | non_neg_integer] + @type build :: String.t | nil + @type matchable :: {major :: major, + minor :: minor, + patch :: patch, + pre :: pre} + @type t :: %__MODULE__{ + major: major, + minor: minor, + patch: patch, + pre: pre, + build: build} defmodule Requirement do - @moduledoc false - defstruct [:source, :matchspec] + defstruct [:source, :matchspec, :compiled] + @type t :: %__MODULE__{} end defmodule InvalidRequirementError do - defexception [:message] + defexception [:requirement] + + def exception(requirement) when is_binary(requirement) do + %__MODULE__{requirement: requirement} + end + + def message(%{requirement: requirement}) do + "invalid requirement: #{inspect requirement}" + end end defmodule InvalidVersionError do - defexception [:message] + defexception [:version] + + def exception(version) when is_binary(version) do + %__MODULE__{version: version} + end + + def message(%{version: version}) do + "invalid version: #{inspect version}" + end end @doc """ - Check if the given version matches the specification. + Checks if the given version matches the specification. Returns `true` if `version` satisfies `requirement`, `false` otherwise. Raises a `Version.InvalidRequirementError` exception if `requirement` is not - parseable, or `Version.InvalidVersionError` if `version` is not parseable. + parsable, or a `Version.InvalidVersionError` exception if `version` is not parsable. If given an already parsed version and requirement this function won't raise. + ## Options + + * `:allow_pre` (boolean) - when `false`, pre-release versions will not match + unless the operand is a pre-release version. See the table above + for examples. Defaults to `true`. + ## Examples - iex> Version.match?("2.0.0", ">1.0.0") + iex> Version.match?("2.0.0", "> 1.0.0") true - iex> Version.match?("2.0.0", "==1.0.0") + iex> Version.match?("2.0.0", "== 1.0.0") false - iex> Version.match?("foo", "==1.0.0") - ** (Version.InvalidVersionError) foo + iex> Version.match?("foo", "== 1.0.0") + ** (Version.InvalidVersionError) invalid version: "foo" - iex> Version.match?("2.0.0", "== ==1.0.0") - ** (Version.InvalidRequirementError) == ==1.0.0 + iex> Version.match?("2.0.0", "== == 1.0.0") + ** (Version.InvalidRequirementError) invalid requirement: "== == 1.0.0" """ - @spec match?(version, requirement) :: boolean - def match?(vsn, req) when is_binary(req) do - case parse_requirement(req) do - {:ok, req} -> - match?(vsn, req) + @spec match?(version, requirement, Keyword.t) :: boolean + def match?(version, requirement, opts \\ []) + + def match?(version, requirement, opts) when is_binary(requirement) do + case parse_requirement(requirement) do + {:ok, requirement} -> + match?(version, requirement, opts) :error -> - raise InvalidRequirementError, message: req + raise InvalidRequirementError, requirement end end - def match?(version, %Requirement{matchspec: spec}) do - {:ok, result} = :ets.test_ms(to_matchable(version), spec) + def match?(version, %Requirement{matchspec: spec, compiled: false}, opts) do + allow_pre = Keyword.get(opts, :allow_pre, true) + {:ok, result} = :ets.test_ms(to_matchable(version, allow_pre), spec) result != false end + def match?(version, %Requirement{matchspec: spec, compiled: true}, opts) do + allow_pre = Keyword.get(opts, :allow_pre, true) + :ets.match_spec_run([to_matchable(version, allow_pre)], spec) != [] + end + @doc """ - Compares two versions. Returns `:gt` if first version is greater than - the second and `:lt` for vice versa. If the two versions are equal `:eq` - is returned + Compares two versions. + + Returns `:gt` if the first version is greater than the second one, and `:lt` + for vice versa. If the two versions are equal, `:eq` is returned. + + Pre-releases are strictly less than their corresponding release versions. + + Patch segments are compared lexicographically if they are alphanumeric, and + numerically otherwise. - Raises a `Version.InvalidVersionError` exception if `version` is not parseable. - If given an already parsed version this function won't raise. + Build segments are ignored: if two versions differ only in their build segment + they are considered to be equal. + + Raises a `Version.InvalidVersionError` exception if any of the two given + versions are not parsable. If given an already parsed version this function + won't raise. ## Examples iex> Version.compare("2.0.1-alpha1", "2.0.0") :gt + iex> Version.compare("1.0.0-beta", "1.0.0-rc1") + :lt + + iex> Version.compare("1.0.0-10", "1.0.0-2") + :gt + iex> Version.compare("2.0.1+build0", "2.0.1") :eq iex> Version.compare("invalid", "2.0.1") - ** (Version.InvalidVersionError) invalid + ** (Version.InvalidVersionError) invalid version: "invalid" """ @spec compare(version, version) :: :gt | :eq | :lt - def compare(vsn1, vsn2) do - do_compare(to_matchable(vsn1), to_matchable(vsn2)) + def compare(version1, version2) do + do_compare(to_matchable(version1, true), to_matchable(version2, true)) end - defp do_compare({major1, minor1, patch1, pre1}, {major2, minor2, patch2, pre2}) do + defp do_compare({major1, minor1, patch1, pre1, _}, {major2, minor2, patch2, pre2, _}) do cond do {major1, minor1, patch1} > {major2, minor2, patch2} -> :gt {major1, minor1, patch1} < {major2, minor2, patch2} -> :lt @@ -153,11 +241,12 @@ defmodule Version do end @doc """ - Parse a version string into a `Version`. + Parses a version string into a `Version` struct. ## Examples - iex> Version.parse("2.0.1-alpha1") |> elem(1) + iex> {:ok, version} = Version.parse("2.0.1-alpha1") + iex> version #Version<2.0.1-alpha1> iex> Version.parse("2.0-alpha1") @@ -167,21 +256,45 @@ defmodule Version do @spec parse(String.t) :: {:ok, t} | :error def parse(string) when is_binary(string) do case Version.Parser.parse_version(string) do - {:ok, {major, minor, patch, pre}} -> - vsn = %Version{major: major, minor: minor, patch: patch, - pre: pre, build: get_build(string)} - {:ok, vsn} + {:ok, {major, minor, patch, pre, build_parts}} -> + build = if build_parts == [], do: nil, else: Enum.join(build_parts, "") + version = %Version{major: major, minor: minor, patch: patch, + pre: pre, build: build} + {:ok, version} :error -> :error end end @doc """ - Parse a version requirement string into a `Version.Requirement`. + Parses a version string into a `Version`. + + If `string` is an invalid version, an `InvalidVersionError` is raised. + + ## Examples + + iex> Version.parse!("2.0.1-alpha1") + #Version<2.0.1-alpha1> + + iex> Version.parse!("2.0-alpha1") + ** (Version.InvalidVersionError) invalid version: "2.0-alpha1" + + """ + @spec parse!(String.t) :: t | no_return + def parse!(string) when is_binary(string) do + case parse(string) do + {:ok, version} -> version + :error -> raise InvalidVersionError, string + end + end + + @doc """ + Parses a version requirement string into a `Version.Requirement` struct. ## Examples - iex> Version.parse_requirement("== 2.0.1") |> elem(1) + iex> {:ok, requirement} = Version.parse_requirement("== 2.0.1") + iex> requirement #Version.Requirement<== 2.0.1> iex> Version.parse_requirement("== == 2.0.1") @@ -192,161 +305,173 @@ defmodule Version do def parse_requirement(string) when is_binary(string) do case Version.Parser.parse_requirement(string) do {:ok, spec} -> - {:ok, %Requirement{source: string, matchspec: spec}} + {:ok, %Requirement{source: string, matchspec: spec, compiled: false}} :error -> :error end end - defp to_matchable(%Version{major: major, minor: minor, patch: patch, pre: pre}) do - {major, minor, patch, pre} - end + @doc """ + Compiles a requirement to its internal representation with + `:ets.match_spec_compile/1` for faster matching. - defp to_matchable(string) do - case Version.Parser.parse_version(string) do - {:ok, vsn} -> vsn - :error -> raise InvalidVersionError, message: string - end + The internal representation is opaque and cannot be converted to external + term format and then back again without losing its properties (meaning it + can not be sent to a process on another node and still remain a valid + compiled match_spec, nor can it be stored on disk). + """ + @spec compile_requirement(Requirement.t) :: Requirement.t + def compile_requirement(%Requirement{matchspec: spec} = req) do + %{req | matchspec: :ets.match_spec_compile(spec), compiled: true} end - defp get_build(string) do - case Regex.run(~r/\+([^\s]+)$/, string) do - nil -> - nil + defp to_matchable(%Version{major: major, minor: minor, patch: patch, pre: pre}, allow_pre?) do + {major, minor, patch, pre, allow_pre?} + end - [_, build] -> - build + defp to_matchable(string, allow_pre?) do + case Version.Parser.parse_version(string) do + {:ok, {major, minor, patch, pre, _build_parts}} -> + {major, minor, patch, pre, allow_pre?} + :error -> + raise InvalidVersionError, string end end - defmodule Parser.DSL do + defmodule Parser do @moduledoc false - defmacro deflexer(match, do: body) when is_binary(match) do - quote do - def lexer(unquote(match) <> rest, acc) do - lexer(rest, [unquote(body) | acc]) - end + operators = [ + {">=", :>=}, + {"<=", :<=}, + {"~>", :~>}, + {">", :>}, + {"<", :<}, + {"==", :==}, + {"!=", :!=}, + {"!", :!=}, + {" or ", :||}, + {" and ", :&&}, + ] + for {string_op, atom_op} <- operators do + def lexer(unquote(string_op) <> rest, acc) do + lexer(rest, [unquote(atom_op) | acc]) end end - defmacro deflexer(acc, do: body) do - quote do - def lexer("", unquote(acc)) do - unquote(body) - end - end + def lexer(" " <> rest, acc) do + lexer(rest, acc) end - defmacro deflexer(char, acc, do: body) do - quote do - def lexer(<< unquote(char) :: utf8, rest :: binary >>, unquote(acc)) do - unquote(char) = << unquote(char) :: utf8 >> + def lexer(<>, []) do + lexer(rest, [<>, :==]) + end - lexer(rest, unquote(body)) + def lexer(<>, [head | acc]) do + acc = + case head do + head when is_binary(head) -> + [<> | acc] + head when head in [:||, :&&] -> + [<>, :==, head | acc] + _other -> + [<>, head | acc] end - end - end - end - defmodule Parser do - @moduledoc false - import Parser.DSL - - deflexer ">=", do: :'>=' - deflexer "<=", do: :'<=' - deflexer "~>", do: :'~>' - deflexer ">", do: :'>' - deflexer "<", do: :'<' - deflexer "==", do: :'==' - deflexer "!=", do: :'!=' - deflexer "!", do: :'!=' - deflexer " or ", do: :'||' - deflexer " and ", do: :'&&' - deflexer " ", do: :' ' - - deflexer x, [] do - [x, :'=='] - end - - deflexer x, [h | acc] do - cond do - is_binary h -> - [h <> x | acc] - - h in [:'||', :'&&'] -> - [x, :'==', h | acc] - - true -> - [x, h | acc] - end + lexer(body, acc) end - deflexer acc do - Enum.filter(Enum.reverse(acc), &(&1 != :' ')) + def lexer("", acc) do + Enum.reverse(acc) end - @version_regex ~r/^ - (\d+) # major - (?:\.(\d+))? # minor - (?:\.(\d+))? # patch - (?:\-([\d\w\.\-]+))? # pre - (?:\+([\d\w\-]+))? # build - $/x - - @spec parse_requirement(String.t) :: {:ok, Version.Requirement.t} | :error + @spec parse_requirement(String.t) :: {:ok, term} | :error def parse_requirement(source) do lexed = lexer(source, []) to_matchspec(lexed) end - defp nillify(""), do: nil - defp nillify(o), do: o - @spec parse_version(String.t) :: {:ok, Version.matchable} | :error def parse_version(string, approximate? \\ false) when is_binary(string) do - if parsed = Regex.run(@version_regex, string) do - destructure [_, major, minor, patch, pre], parsed - patch = nillify(patch) - pre = nillify(pre) - - if nil?(minor) or (nil?(patch) and not approximate?) do - :error - else - major = String.to_integer(major) - minor = String.to_integer(minor) - patch = patch && String.to_integer(patch) - - case parse_pre(pre) do - {:ok, pre} -> - {:ok, {major, minor, patch, pre}} - :error -> - :error - end - end + destructure [version_with_pre, build], String.split(string, "+", parts: 2) + destructure [version, pre], String.split(version_with_pre, "-", parts: 2) + destructure [major, minor, patch, next], String.split(version, ".") + + with nil <- next, + {:ok, major} <- require_digits(major), + {:ok, minor} <- require_digits(minor), + {:ok, patch} <- maybe_patch(patch, approximate?), + {:ok, pre_parts} <- optional_dot_separated(pre), + {:ok, pre_parts} <- convert_parts_to_integer(pre_parts, []), + {:ok, build_parts} <- optional_dot_separated(build) do + {:ok, {major, minor, patch, pre_parts, build_parts}} else - :error + _other -> :error end end - defp parse_pre(nil), do: {:ok, []} - defp parse_pre(pre), do: parse_pre(String.split(pre, "."), []) + defp require_digits(nil), do: :error + defp require_digits(string) do + if leading_zero?(string), do: :error, else: parse_digits(string, "") + end + + defp leading_zero?(<>), do: true + defp leading_zero?(_), do: false - defp parse_pre([piece|t], acc) do - cond do - piece =~ ~r/^(0|[1-9][0-9]*)$/ -> - parse_pre(t, [String.to_integer(piece)|acc]) - piece =~ ~r/^[0-9]*$/ -> - :error - true -> - parse_pre(t, [piece|acc]) + defp parse_digits(<>, acc) when char in ?0..?9, + do: parse_digits(rest, <>) + defp parse_digits(<<>>, acc) when byte_size(acc) > 0, + do: {:ok, String.to_integer(acc)} + defp parse_digits(_, _acc), + do: :error + + defp maybe_patch(patch, approximate?) + defp maybe_patch(nil, true), do: {:ok, nil} + defp maybe_patch(patch, _), do: require_digits(patch) + + defp optional_dot_separated(nil), do: {:ok, []} + defp optional_dot_separated(string) do + parts = String.split(string, ".") + if Enum.all?(parts, &(&1 != "" and valid_identifier?(&1))) do + {:ok, parts} + else + :error + end + end + + defp convert_parts_to_integer([part | rest], acc) do + case parse_digits(part, "") do + {:ok, integer} -> + if leading_zero?(part) do + :error + else + convert_parts_to_integer(rest, [integer | acc]) + end + :error -> + convert_parts_to_integer(rest, [part | acc]) end end - defp parse_pre([], acc) do + defp convert_parts_to_integer([], acc) do {:ok, Enum.reverse(acc)} end + defp valid_identifier?(<>) + when char in ?0..?9 + when char in ?a..?z + when char in ?A..?Z + when char == ?- do + valid_identifier?(rest) + end + + defp valid_identifier?(<<>>) do + true + end + + defp valid_identifier?(_other) do + false + end + defp valid_requirement?([]), do: false defp valid_requirement?([a | next]), do: valid_requirement?(a, next) @@ -393,7 +518,7 @@ defmodule Version do if valid_requirement?(lexed) do first = to_condition(lexed) rest = Enum.drop(lexed, 2) - {:ok, [{{:'$1', :'$2', :'$3', :'$4'}, [to_condition(first, rest)], [:'$_']}]} + {:ok, [{{:'$1', :'$2', :'$3', :'$4', :'$5'}, [to_condition(first, rest)], [:'$_']}]} else :error end @@ -401,87 +526,113 @@ defmodule Version do :invalid_matchspec -> :error end - defp to_condition([:'==', version | _]) do - version = parse_condition(version) - {:'==', :'$_', {:const, version}} + defp to_condition([:==, version | _]) do + matchable = parse_condition(version) + main_condition(:==, matchable) end - defp to_condition([:'!=', version | _]) do - version = parse_condition(version) - {:'/=', :'$_', {:const, version}} + defp to_condition([:!=, version | _]) do + matchable = parse_condition(version) + main_condition(:'/=', matchable) end - defp to_condition([:'~>', version | _]) do + defp to_condition([:~>, version | _]) do from = parse_condition(version, true) to = approximate_upper(from) - {:andalso, to_condition([:'>=', matchable_to_string(from)]), - to_condition([:'<', matchable_to_string(to)])} + {:andalso, to_condition([:>=, matchable_to_string(from)]), + to_condition([:<, matchable_to_string(to)])} end - defp to_condition([:'>', version | _]) do + defp to_condition([:>, version | _]) do {major, minor, patch, pre} = parse_condition(version) - {:orelse, {:'>', {{:'$1', :'$2', :'$3'}}, - {:const, {major, minor, patch}}}, - {:andalso, {:'==', {{:'$1', :'$2', :'$3'}}, - {:const, {major, minor, patch}}}, - {:orelse, {:andalso, {:'==', {:length, :'$4'}, 0}, - {:'/=', length(pre), 0}}, - {:andalso, {:'/=', length(pre), 0}, - {:orelse, {:'>', {:length, :'$4'}, length(pre)}, - {:andalso, {:'==', {:length, :'$4'}, length(pre)}, - {:'>', :'$4', {:const, pre}}}}}}}} + {:andalso, {:orelse, main_condition(:>, {major, minor, patch}), + {:andalso, main_condition(:==, {major, minor, patch}), + pre_condition(:>, pre)}}, + no_pre_condition(pre)} end - defp to_condition([:'>=', version | _]) do + defp to_condition([:>=, version | _]) do matchable = parse_condition(version) - {:orelse, {:'==', :'$_', {:const, matchable}}, - to_condition([:'>', version])} + {:orelse, main_condition(:==, matchable), + to_condition([:>, version])} end - defp to_condition([:'<', version | _]) do + defp to_condition([:<, version | _]) do {major, minor, patch, pre} = parse_condition(version) - {:orelse, {:'<', {{:'$1', :'$2', :'$3'}}, - {:const, {major, minor, patch}}}, - {:andalso, {:'==', {{:'$1', :'$2', :'$3'}}, - {:const, {major, minor, patch}}}, - {:orelse, {:andalso, {:'/=', {:length, :'$4'}, 0}, - {:'==', length(pre), 0}}, - {:andalso, {:'/=', {:length, :'$4'}, 0}, - {:orelse, {:'<', {:length, :'$4'}, length(pre)}, - {:andalso, {:'==', {:length, :'$4'}, length(pre)}, - {:'<', :'$4', {:const, pre}}}}}}}} + {:orelse, main_condition(:<, {major, minor, patch}), + {:andalso, main_condition(:==, {major, minor, patch}), + pre_condition(:<, pre)}} end - defp to_condition([:'<=', version | _]) do + defp to_condition([:<=, version | _]) do matchable = parse_condition(version) - {:orelse, {:'==', :'$_', {:const, matchable}}, - to_condition([:'<', version])} + {:orelse, main_condition(:==, matchable), + to_condition([:<, version])} end defp to_condition(current, []) do current end - defp to_condition(current, [:'&&', operator, version | rest]) do + defp to_condition(current, [:&&, operator, version | rest]) do to_condition({:andalso, current, to_condition([operator, version])}, rest) end - defp to_condition(current, [:'||', operator, version | rest]) do + defp to_condition(current, [:||, operator, version | rest]) do to_condition({:orelse, current, to_condition([operator, version])}, rest) end defp parse_condition(version, approximate? \\ false) do case parse_version(version, approximate?) do - {:ok, version} -> version + {:ok, {major, minor, patch, pre, _build}} -> {major, minor, patch, pre} :error -> throw :invalid_matchspec end end + defp main_condition(op, version) when tuple_size(version) == 3 do + {op, {{:'$1', :'$2', :'$3'}}, + {:const, version}} + end + + defp main_condition(op, version) when tuple_size(version) == 4 do + {op, {{:'$1', :'$2', :'$3', :'$4'}}, + {:const, version}} + end + + defp pre_condition(:>, pre) do + length_pre = length(pre) + + {:orelse, {:andalso, {:==, {:length, :'$4'}, 0}, + {:const, length_pre != 0}}, + {:andalso, {:const, length_pre != 0}, + {:orelse, {:>, {:length, :'$4'}, length_pre}, + {:andalso, {:==, {:length, :'$4'}, length_pre}, + {:>, :'$4', {:const, pre}}}}}} + end + + defp pre_condition(:<, pre) do + length_pre = length(pre) + + {:orelse, {:andalso, {:'/=', {:length, :'$4'}, 0}, + {:const, length_pre == 0}}, + {:andalso, {:'/=', {:length, :'$4'}, 0}, + {:orelse, {:<, {:length, :'$4'}, length_pre}, + {:andalso, {:==, {:length, :'$4'}, length_pre}, + {:<, :'$4', {:const, pre}}}}}} + end + + defp no_pre_condition([]) do + {:orelse, :'$5', {:==, {:length, :'$4'}, 0}} + end + defp no_pre_condition(_pre) do + {:const, true} + end + defp matchable_to_string({major, minor, patch, pre}) do patch = if patch, do: "#{patch}", else: "0" pre = if pre != [], do: "-#{Enum.join(pre, ".")}" @@ -492,10 +643,22 @@ end defimpl String.Chars, for: Version do def to_string(version) do - pre = unless Enum.empty?(pre = version.pre), do: "-#{pre}" + pre = pre(version.pre) build = if build = version.build, do: "+#{build}" "#{version.major}.#{version.minor}.#{version.patch}#{pre}#{build}" end + + defp pre([]) do + "" + end + + defp pre(pre) do + "-" <> + Enum.map_join(pre, ".", fn + int when is_integer(int) -> Integer.to_string(int) + string when is_binary(string) -> string + end) + end end defimpl Inspect, for: Version do diff --git a/lib/elixir/mix.exs b/lib/elixir/mix.exs index 4b39a746602..92355fed05f 100644 --- a/lib/elixir/mix.exs +++ b/lib/elixir/mix.exs @@ -4,9 +4,6 @@ defmodule Elixir.Mixfile do def project do [app: :elixir, version: System.version, - build_per_environment: false, - escript_embed_elixir: false, - escript_main_module: :elixir, - escript_emu_args: "%%! -noshell\n"] + build_per_environment: false] end end diff --git a/lib/elixir/pages/Behaviours.md b/lib/elixir/pages/Behaviours.md new file mode 100644 index 00000000000..287cff62346 --- /dev/null +++ b/lib/elixir/pages/Behaviours.md @@ -0,0 +1,54 @@ +# Behaviours + +Behaviours in Elixir (and Erlang) are a way to separate and abstract the generic part of a component (which becomes the *behaviour module*) from the specific part (which becomes the *callback module*). + +A behaviour module defines a set of functions and macros (referred to as *callbacks*) that callback modules implementing that behaviour must export. This "interface" identifies the specific part of the component. For example, the `GenServer` behaviour and functions abstract away all the message-passing (sending and receiving) and error reporting that a "server" process will likely want to implement from the specific parts such as the actions that this server process has to perform. + +If a callback module that implements a given behaviour doesn't export all the functions and macros defined by that behaviour, the user will be notified through warnings during the compilation process (no errors will happen). + +Elixir's standard library contains a few frequently used behaviours such as `GenServer`, `Supervisor`, and `Application`. + +## Defining a behaviour + +A behaviour is always backed by a module (which is how the behaviour will be identified): the module where callbacks are defined. To define a behaviour module, it's enough to define one or more callbacks in that module. To define callbacks, the `@callback` and `@macrocallback` module attributes can be used (for function callbacks and macro callbacks respectively). + + defmodule MyBehaviour do + @callback my_fun(arg :: any) :: any + @macrocallback my_macro(arg :: any) :: Macro.t + end + +As seen in the example above, defining a callback is a matter of defining a specification for that callback, made of: + + * the callback name (`my_fun` or `my_macro` in the example) + * the arguments that the callback must accept (`arg :: any` in the example) + * the *expected* type of the callback return value + +For more information on typespecs, consult the ["Typespecs"](typespecs.html) page in the Elixir documentation. As mentioned in this page, type specification are only annotations used by documentation and tools, so defining such specifications for behaviours serves mostly for such purposes. + +### Optional callbacks + +Optional callbacks are callbacks that callback modules may implement if they want to, but are not required to. +Usually, behaviour modules know if they should call those callbacks based on configuration, or they check if the callbacks are defined with `function_exported?/3` or `macro_exported?/3`. +Optional callbacks can be defined through the `@optional_callbacks` module attribute, which has to be a keyword list with function or macro name as key and arity as value. For example: + + defmodule MyBehaviour do + @callback vital_fun() :: any + @callback non_vital_fun() :: any + @macrocallback non_vital_macro(arg :: any) :: Macro.t + @optional_callbacks non_vital_fun: 0, non_vital_macro: 1 + end + +One example of optional callback in Elixir's standard library is `c:GenServer.format_status/2`. + +## Implementing behaviours + +To specify that a module implements a given behaviour, the `@behaviour` attribute must be used: + + defmodule MyBehaviour do + @callback my_fun(arg :: any) :: any + end + + defmodule MyCallbackModule do + @behaviour MyBehaviour + def my_fun(arg), do: arg + end diff --git a/lib/elixir/pages/Deprecations.md b/lib/elixir/pages/Deprecations.md new file mode 100644 index 00000000000..6377f09abbc --- /dev/null +++ b/lib/elixir/pages/Deprecations.md @@ -0,0 +1,75 @@ +# Deprecations + +## Policy + +Elixir deprecations happen in 3 steps: + + 1. The feature is soft-deprecated. It means both CHANGELOG and documentation must list the feature as deprecated but no warning is effectively emitted by running the code. There is no requirement to soft-deprecate a feature. + + 2. The feature is effectively deprecated by emitting warnings on usage. In order to deprecate a feature, the proposed alternative MUST exist for AT LEAST two versions. For example, `Enum.uniq/2` was soft-deprecated in favor of `Enum.uniq_by/2` in Elixir v1.1. This means a deprecation warning may only be emitted by Elixir v1.3 or later. + + 3. The feature is removed. This can only happen on major releases. This means deprecated features in Elixir v1.x shall only be removed by Elixir v2.x. + + +## Table of deprecations + +Deprecated feature | Deprecated in | Replaced by (available since) +:----------------------------------------------- | :------------ | :---------------------------- +Passing a non-Date to `Date.to_erl/1` | [v1.5] | Explicitly convert it to a Date first (v1.3) +Passing a non-Date to `Date.to_iso8601/1` | [v1.5] | Explicitly convert it to a Date first (v1.3) +Passing a non-Time to `Time.to_erl/1` | [v1.5] | Explicitly convert it to a Time first (v1.3) +Passing a non-Time to `Time.to_iso8601/1` | [v1.5] | Explicitly convert it to a Time first (v1.3) +`Atom.to_char_list/1` | [v1.5] | `Atom.to_charlist/1` (v1.3) +`Enum.filter_map/3` | [v1.5] | `Enum.filter/2` + `Enum.map/2` or for comprehensions (v1.0) +`Float.to_char_list/1` | [v1.5] | `Float.to_charlist/1` (v1.3) +`GenEvent` module | [v1.5] | `Supervisor` and `GenServer` (v1.0);
[`GenStage`](https://hex.pm/packages/gen_stage) (v1.3);
[`:gen_event`](http://www.erlang.org/doc/man/gen_event.html) (OTP 17) +`Integer.to_char_list/1` and `Integer.to_char_list/2` | [v1.5] | `Integer.to_charlist/1` and `Integer.to_charlist/2` (v1.3) +`Kernel.to_char_list/1` | [v1.5] | `Kernel.to_charlist/1` (v1.3) +`List.Chars.to_char_list/1` | [v1.5] | `List.Chars.to_charlist/1` (v1.3) +`Stream.filter_map/3` | [v1.5] | `Stream.filter/2` + `Stream.map/2` (v1.0) +`String.ljust/3` and `String.rjust/3` | [v1.5] | `String.pad_leading/3` and `String.pad_trailing/3` with a binary padding (v1.3) +`String.strip/1` and `String.strip/2` | [v1.5] | `String.trim/1` and `String.trim/2` (v1.3) +`String.lstrip/1` and `String.rstrip/1` | [v1.5] | `String.trim_leading/1` and `String.trim_trailing/1` (v1.3) +`String.lstrip/2` and `String.rstrip/2` | [v1.5] | `String.trim_leading/2` and `String.trim_trailing/2` with a binary as second argument (v1.3) +`String.to_char_list/1` | [v1.5] | `String.to_charlist/1` (v1.3) +`()` to mean `nil` | [v1.5] | `nil` (v1.0) +`:as_char_lists` value in `t:Inspect.Opts.t/0` type | [v1.5] | `:as_charlists` (v1.3) +`:char_lists` key in `t:Inspect.Opts.t/0` type | [v1.5] | `:charlists` (v1.3) +`char_list/0` type | [v1.5] | `charlist/0` type (v1.3) +`@compile {:parse_transform, _}` in `Module` | [v1.5] | *None* +`Access.key/1` | [v1.4] | `Access.key/2` (v1.3) +`Behaviour` module | [v1.4] | `@callback` (v1.0) +`Enum.uniq/2` | [v1.4] | `Enum.uniq_by/2` (v1.2) +`Float.to_char_list/2` | [v1.4] | `:erlang.float_to_list/2` (OTP 17) +`Float.to_string/2` | [v1.4] | `:erlang.float_to_binary/2` (OTP 17) +`HashDict` module | [v1.4] | `Map` (v1.2) +`HashSet` module | [v1.4] | `MapSet` (v1.1) +`Set` module | [v1.4] | `MapSet` (v1.1) +`Stream.uniq/2` | [v1.4] | `Stream.uniq_by/2` (v1.2) +`IEx.Helpers.import_file/2` | [v1.4] | [`IEx.Helpers.import_file_if_available/1`](https://hexdocs.pm/iex/IEx.Helpers.html#import_file_if_available/1) (v1.3) +`Mix.Utils.camelize/1` | [v1.4] | `Macro.camelize/1` (v1.2) +`Mix.Utils.underscore/1` | [v1.4] | `Macro.underscore/1` (v1.2) +Variable used as function call | [v1.4] | Use parentheses (v1.0) +Anonymous functions with no expression after `->` | [v1.4] | Use an expression or explicitly return `nil` (v1.0) +`Dict` module | [v1.3] | `Keyword` (v1.0);
`Map` (v1.2) +`Keyword.size/1` | [v1.3] | `Kernel.length/1` (v1.0) +`Map.size/1` | [v1.3] | `Kernel.map_size/1` (v1.0) +`Set` behaviour | [v1.3] | `MapSet` data structure (v1.1) +`String.valid_character?/1` | [v1.3] | `String.valid?/1` (v1.0) +`Task.find/2` | [v1.3] | Use direct message matching (v1.0) +`:append_first` option in `Kernel.defdelegate/2` | [v1.3] | Define the function explicitly (v1.0) +`/r` option in `Regex` | [v1.3] | `/U` (v1.1) +`\x{X*}` inside strings/sigils/charlists | [v1.3] | `\uXXXX` or `\u{X*}` (v1.1) +Map or dictionary as second argument in `Enum.group_by/3` | [v1.3] | Use `Enum.reduce/3` (v1.0) +Non-map as second argument in `URI.decode_query/2` | [v1.3] | Use a map (v1.0) +`Dict` behaviour | [v1.2] | `MapSet` data structure (v1.1) +`Access` protocol | [v1.1] | `Access` behaviour (v1.1) +`as: true \| false` in `alias/2` and `require/2` | [v1.1] | *None* +`?\xHEX` | [v1.1] | `0xHEX` (v1.0) +Empty string in `String.starts_with?/2`, `String.ends_with?/2`, `String.contains?/2`.
*__NOTE__: Feature made back available in v1.3* | [v1.1] to [v1.2] | Explicitly check for `""` beforehand (v1.0) + +[v1.1]: https://github.com/elixir-lang/elixir/blob/v1.1/CHANGELOG.md#4-deprecations +[v1.2]: https://github.com/elixir-lang/elixir/blob/v1.2/CHANGELOG.md#changelog-for-elixir-v12 +[v1.3]: https://github.com/elixir-lang/elixir/blob/v1.3/CHANGELOG.md#4-deprecations +[v1.4]: https://github.com/elixir-lang/elixir/blob/v1.4/CHANGELOG.md#4-deprecations +[v1.5]: https://github.com/elixir-lang/elixir/blob/master/CHANGELOG.md#4-deprecations diff --git a/lib/elixir/pages/Guards.md b/lib/elixir/pages/Guards.md new file mode 100644 index 00000000000..cc7b2b3f9bc --- /dev/null +++ b/lib/elixir/pages/Guards.md @@ -0,0 +1,222 @@ +# Guards + +Guards are a way to augment pattern matching with more complex checks; they are allowed in a predefined set of constructs where pattern matching is allowed. + +## List of allowed expressions + +For reference, the following is a comprehensive list of all expressions allowed in guards: + + * comparison operators (`==`, `!=`, `===`, `!==`, `>`, `>=`, `<`, `<=`) + * strictly boolean operators (`and`, `or`, `not`) (the `&&`, `||`, and `!` sibling operators are not allowed as they're not *strictly* boolean - meaning they don't require both sides to be booleans) + * arithmetic binary operators (`+`, `-`, `*`, `/`) + * arithmetic unary operators (`+`, `-`) + * binary concatenation operator (`<>`) + * `in` and `not in` operators (as long as the right-hand side is a list or a range) + * the following "type-check" functions (all documented in the `Kernel` module): + * `is_atom/1` + * `is_binary/1` + * `is_bitstring/1` + * `is_boolean/1` + * `is_float/1` + * `is_function/1` + * `is_function/2` + * `is_integer/1` + * `is_list/1` + * `is_map/1` + * `is_nil/1` + * `is_number/1` + * `is_pid/1` + * `is_port/1` + * `is_reference/1` + * `is_tuple/1` + * the following guard-friendly functions (all documented in the `Kernel` module): + * `abs/1` + * `binary_part/3` + * `bit_size/1` + * `byte_size/1` + * `div/2` + * `elem/2` + * `hd/1` + * `length/1` + * `map_size/1` + * `node/0` + * `node/1` + * `rem/2` + * `round/1` + * `self/0` + * `tl/1` + * `trunc/1` + * `tuple_size/1` + * the following handful of Erlang bitwise operations, if imported from the `Bitwise` module: + * `band/2` or the `&&&` operator + * `bor/2` or the `|||` operator + * `bnot/1` or the `~~~` operator + * `bsl/1` or the `<<<` operator + * `bsr/1` or the `>>>` operator + * `bxor/2` or the `^^^` operator + +Macros constructed out of any combination of the above guards are also valid guards - for example, `Integer.is_even/1`. See the section "Defining custom guard expressions" below. + +## Why guards + +Let's see an example of a guard used in a function clause: + +```elixir +def empty_map?(map) when map_size(map) == 0, do: true +def empty_map?(map) when is_map(map), do: false +``` + +Guards start with the `when` keyword, which is followed by a boolean expression (we will define the grammar of guards more formally later on). + +Writing the `empty_map?/1` function by only using pattern matching would not be possible (as pattern matching on `%{}` would match *every* map, not empty maps). + +## Where guards can be used + +In the example above, we show how guards can be used in function clauses. There are several constructs that allow guards; for example: + + * function clauses: + + ```elixir + def foo(term) when is_integer(term), do: term + def foo(term) when is_float(term), do: round(term) + ``` + + * `case` expressions: + + ```elixir + case x do + 1 -> :one + 2 -> :two + n when is_integer(n) and n > 2 -> :larger_than_two + end + ``` + + * anonymous functions (`fn`s): + + ```elixir + larger_than_two? = fn + n when is_integer(n) and n > 2 -> true + n when is_integer(n) -> false + end + ``` + +Other constructs are `for`, `with`, `try`/`rescue`/`catch`/`else`/, and the `match?/2` macro in the `Kernel` module. + +## Failing guards + +Errors in guards do not result in a runtime error, but in the erroring guard fail. For example, the `length/1` function only works with lists, and if we use it on anything else it fails: + +```elixir +iex> length("hello") +** (ArgumentError) argument error +``` + +However, when used in guards, it simply makes the corresponding clause fail (i.e., not match): + +```elixir +iex> case "hello" do +...> something when length(something) > 0 -> +...> :length_worked +...> _anything_else -> +...> :length_failed +...> end +:length_failed +``` + +In many cases, we can take advantage of this: in the code above, for example, we can use `length/1` to both check that the given thing is a list *and* check some properties of its length (instead of using `is_list(something) and length(something) > 0`). + +## Expressions in guard clauses + +Not all expressions are allowed in guard clauses, but only a handful of them. This is a deliberate choice: only a predefined set of side-effect-free functions are allowed. This way, Elixir (and Erlang) can make sure that nothing bad happens while executing guards and no mutations happen anywhere. This behaviour is also coherent with pattern match, which is a naturally a side-effect-free operation. Finally, keeping expressions allowed in clauses to a close set of predefined ones allows the compiler to optimize the code related to choosing the right clause. + +## Defining custom guard expressions + +As mentioned before, only the expressions listed in this page are allowed in guards. However, we can take advantage of macros to write custom guards that can simplify our programs or make them more domain-specific. At the end of the day, what matters is that the *output* of the macros (which is what will be compiled) boils down to a combinations of the allowed expressions. + +Let's look at a quick case study: we want to check that a function argument is an even or odd integer. With pattern matching, this is impossible to do since there are infinite integers, and thus we can't pattern match on the single even/odd numbers. Let's focus on checking for even numbers since checking for odd ones is almost identical. + +Such a guard would look like this: + +```elixir +def my_function(number) when is_integer(number) and rem(number, 2) == 0 do + # do stuff +end +``` + +This would be repetitive to write every time we need this check, so, as mentioned at the beginning of this section, we can abstract this away using a macro. Remember that defining a function that performs this check wouldn't work because we can't use custom functions in guards. Our macro would look like this: + +```elixir +defmodule MyInteger do + defmacro is_even(number) do + quote do + is_integer(unquote(number)) and rem(unquote(number), 2) == 0 + end + end +end +``` + +and then: + +```elixir +import MyInteger, only: [is_even: 1] + +def my_function(number) when is_even(number) do + # do stuff +end +``` + +## Multiple guards in the same clause + +There exists an additional way to simplify a chain of `or`s in guards: Elixir supports writing "multiple guards" in the same clause. This: + +```elixir +def foo(term) when is_integer(term) or is_float(term) or is_nil(term), + do: :maybe_number +def foo(_other), + do: :something_else +``` + +can be alternatively written as: + +```elixir +def foo(term) + when is_integer(term) + when is_float(term) + when is_nil(term) do + :maybe_number +end + +def foo(_other) do + :something_else +end +``` + +For most cases, the two forms are exactly the same. However, there exists a subtle difference in the case of failing guards, as discussed in the section above. +In case of a boolean expression guard, a failed element means the whole guard fails. In case of multiple guards it means the next one will be evaluated. +The difference can be highlighted with an example: + +```elixir +def multiguard(value) + when map_size(value) < 1 + when tuple_size(value) < 1 do + :guard_passed +end +def multiguard(_value) do + :guard_failed +end + +def boolean(value) when map_size(value) < 1 or tuple_size(value) < 1 do + :guard_passed +end +def boolean(value) do + :guard_failed +end + +multiguard(%{}) #=> :guard_passed +multiguard({}) #=> :guard_passed + +boolean(%{}) #=> :guard_passed +boolean({}) #=> :guard_failed +``` + +For cases where guards do not rely on the failing guard behavior the two forms are exactly the same semantically but there are cases where multiple guard clauses may be more aesthetically pleasing. diff --git a/lib/elixir/pages/Naming Conventions.md b/lib/elixir/pages/Naming Conventions.md new file mode 100644 index 00000000000..0bf59ec4b03 --- /dev/null +++ b/lib/elixir/pages/Naming Conventions.md @@ -0,0 +1,108 @@ +# Naming Conventions + +This document covers some naming conventions in Elixir code, from casing to punctuation characters. + +## Casing + +Elixir developers must use `snake_case` when defining variables, function names, module attributes, etc: + + some_map = %{this_is_a_key: "and a value"} + is_map(some_map) + +Aliases, commonly used as module names, are an exception as they must be capitalized and written in `CamelCase`, like `OptionParser`. For aliases, capital letters are kept in acronyms, like `ExUnit.CaptureIO` or `Mix.SCM`. + +Atoms can be written either in `:snake_case` or `:CamelCase`, although the convention is to use the snake case version throughout Elixir. + +Generally speaking, filenames follow the `snake_case` convention of the module they define. For example, `MyApp` should be defined inside the `my_app.ex` file. However, this is only a convention. At the end of the day, any filename can be used as they do not affect the compiled code in any way. + +## Underscore (_foo) + +Elixir relies on underscores in different situations. + +For example, a value that is not meant to be used must be assigned to `_` or to a variable starting with underscore: + + iex> {:ok, _contents} = File.read("README.md") + +Function names may also start with an underscore. Such functions are never imported by default: + + iex> defmodule Example do + ...> def _wont_be_imported do + ...> :oops + ...> end + ...> end + + iex> import Example + iex> _wont_be_imported() + ** (CompileError) iex:1: undefined function _wont_be_imported/0 + +Due to this property, Elixir relies on functions starting with underscore to attach compile-time metadata to modules. Such functions are most often in the `__foo__` format. For example, every module in Elixir has an `__info__/1` function: + + iex> String.__info__(:functions) + [at: 2, capitalize: 1, chunk: 2, ...] + +Elixir also includes 4 special variables that follow the double underscore format. These forms retrieve compile-time information about the current environment: `__MODULE__/0`, `__DIR__/0`, `__ENV__/0` and `__CALLER__/0`. + +## Trailing bang (foo!) + +A trailing bang (exclamation mark) signifies a function or macro where failure cases raise an exception. + +Many functions come in pairs, such as `File.read/1` and `File.read!/1`. `File.read/1` will return a success or failure tuple, whereas `File.read!/1` will return a plain value or else raise an exception: + + iex> File.read("file.txt") + {:ok, "file contents"} + iex> File.read("no_such_file.txt") + {:error, :enoent} + + iex> File.read!("file.txt") + "file contents" + iex> File.read!("no_such_file.txt") + ** (File.Error) could not read file no_such_file.txt: no such file or directory + +The version without `!` is preferred when you want to handle different outcomes using pattern matching: + + case File.read(file) do + {:ok, body} -> # do something with the `body` + {:error, reason} -> # handle the error caused by `reason` + end + +However, if you expect the outcome to always to be successful (e.g. if you expect the file always to exist), the bang variation can be more convenient and will raise a more helpful error message (than a failed pattern match) on failure. + +More examples of paired functions: `Base.decode16/2` and `Base.decode16!/2`, `File.cwd/0` and `File.cwd!/0` + +There are also some non-paired functions, with no non-bang variant. The bang still signifies that it will raise an exception on failure. Examples: `Mix.Config.validate!/1`, `Protocol.assert_protocol!/1` + +In macro code, the bang on `Kernel.alias!/1` and `Kernel.var!/2` signifies that [macro hygiene](http://elixir-lang.org/getting-started/meta/macros.html#macros-hygiene) is set aside. + +## Trailing question mark (foo?) + +Functions that return a boolean are named with a trailing question mark. + +Examples: `Keyword.keyword?/1`, `Mix.debug?/0`, `String.contains?/2` + +However, functions that return booleans and are valid in guards follow another convention, described next. + +## is_ prefix (is_foo) + +Type checks and other boolean checks that are allowed in guard clauses are named with an `is_` prefix. + +Examples: `Integer.is_even/1`, `Kernel.is_list/1` + +These functions and macros follow the Erlang convention of an `is_` prefix, instead of a trailing question mark, precisely to indicate that they are allowed in guard clauses. + +Note that type checks that are not valid in guard clauses do not follow this convention. Examples: `Keyword.keyword?/1`, `Regex.regex?/1` + +## Special names + +Some names have specific meaning in Elixir. We detail those cases below. + +### length and size + +When you see `size` in a function name, it means the operation runs in constant time (also written as "O(1) time") because the size is stored alongside the data structure. + +Examples: `Kernel.map_size/1`, `Kernel.tuple_size/1` + +When you see `length`, the operation runs in linear time ("O(n) time") because the entire data structure has to be traversed. + +Examples: `Kernel.length/1`, `String.length/1` + +In other words, functions using the word "size" in its name will take the same amount of time whether the data structure is tiny or huge. Conversely, functions having "length" in its name will take more time as the data structure grows in size. diff --git a/lib/elixir/pages/Operators.md b/lib/elixir/pages/Operators.md new file mode 100644 index 00000000000..158c032e9a3 --- /dev/null +++ b/lib/elixir/pages/Operators.md @@ -0,0 +1,154 @@ +# Operators + +This document covers operators in Elixir, how they are parsed, how they can be defined, and how they can be overridden. + +## Operator precedence and associativity + +The following is a list of all operators that Elixir is capable of parsing, ordered from higher to lower precedence, alongside their associativity: + +Operator | Associativity +---------------------------------------------------------------------------------------- | ------------- +`@` | Unary +`.` | Left to right +`+` `-` `!` `^` `not` `~~~` | Unary +`*` `/` | Left to right +`+` `-` | Left to right +`++` `--` `..` `<>` | Right to left +`in` `not in` | Left to right +`\|>` `<<<` `>>>` `~>>` `<<~` `~>` `<~` `<~>` `<\|>` | Left to right +`<` `>` `<=` `>=` | Left to right +`==` `!=` `=~` `===` `!==` | Left to right +`&&` `&&&` `and` | Left to right +`\|\|` `\|\|\|` `or` | Left to right +`=` | Right to left +`=>` | Right to left +`\|` | Right to left +`::` | Right to left +`when` | Right to left +`<-`, `\\` | Left to right +`&` | Unary + +## Comparison operators + +Elixir provides the following built-in comparison operators: + + * `==` - equality + * `===` - strict equality + * `!=` - inequality + * `!==` - strict inequality + * `>` - greater than + * `<` - less than + * `>=` - greater than or equal + * `<=` - less than or equal + +The only difference between `==` and `===` is that `===` is stricter when it comes to comparing integers and floats: + +```elixir +iex> 1 == 1.0 +true +iex> 1 === 1.0 +false +``` + +`!=` and `!==` act as the negation of `==` and `===`, respectively. + +### Term ordering + +In Elixir, different data types can be compared using comparison operators: + +```elixir +iex> 1 < :an_atom +true +``` + +The reason we can compare different data types is pragmatism. Sorting algorithms don’t need to worry about different data types in order to sort. For reference, the overall sorting order is defined below: + +``` +number < atom < reference < function < port < pid < tuple < map < list < bitstring +``` + +When comparing two numbers of different types (a number is either an integer or a float), a conversion to the type with greater precision will always occur, unless the comparison operator used is either `===` or `!==`. A float will be considered more precise than an integer, unless the float is greater/less than +/-9007199254740992.0, at which point all the significant figures of the float are to the left of the decimal point. This behavior exists so that the comparison of large numbers remains transitive. + +The collection types are compared using the following rules: + +* Tuples are compared by size then element by element. +* Maps are compared by size then by keys in ascending term order then by values in key order. In the specific case of maps' key ordering, integers are always considered to be less than floats. +* Lists are compared element by element. + +## Custom and overridden operators + +### Defining custom operators + +Elixir is capable of parsing a predefined set of operators; this means that it's not possible to define new operators (like one could do in Haskell, for example). However, not all operators that Elixir can parse are *used* by Elixir: for example, `+` and `||` are used by Elixir for addition and boolean *or*, but `<~>` is not used (but valid). + +To define an operator, you can use the usual `def*` constructs (`def`, `defp`, `defmacro`, and so on) but with a syntax similar to how the operator is used: + +```elixir +defmodule MyOperators do + # We define ~> to return the maximum of the given two numbers, + # and <~ to return the minimum. + + def a ~> b, do: max(a, b) + def a <~ b, do: min(a, b) +end +``` + +To use the newly defined operators, we **have to** import the module that defines them: + +```elixir +iex> import MyOperators +iex> 1 ~> 2 +2 +iex> 1 <~ 2 +1 +``` + +The following is a table of all the operators that Elixir is capable of parsing, but that are not used by default: + + * `|` + * `|||` + * `&&&` + * `<<<` + * `>>>` + * `~>>` + * `<<~` + * `~>` + * `<~` + * `<~>` + * `<|>` + * `^^^` + * `~~~` + +The following operators are used by the `Bitwise` module when imported: `&&&`, `^^^`, `<<<`, `>>>`, `|||`, `~~~`. See the documentation for `Bitwise` for more information. + +### Redefining existing operators + +The operators that Elixir uses (for example, `+`) can be defined by any module and used in place of the ones defined by Elixir, provided they're specifically not imported from `Kernel` (which is imported everywhere by default). For example: + +```elixir +defmodule WrongMath do + # Let's make math wrong by changing the meaning of +: + def a + b, do: a - b +end +``` + +Now, we will get an error if we try to use this operator "out of the box": + +```elixir +iex> import WrongMath +iex> 1 + 2 +** (CompileError) iex:11: function +/2 imported from both WrongMath and Kernel, call is ambiguous +``` + +So, as mentioned above, we need to explicitly *not* import `+/2` from `Kernel`: + +```elixir +iex> import WrongMath +iex> import Kernel, except: [+: 2] +iex> 1 + 2 +-1 +``` + +### Final note + +While it's possible to define unused operators (such as `<~>`) and to "override" predefined operators (such as `+`), the Elixir community generally discourages this. Custom-defined operators can be really hard to read and even more to understand, as they don't have a descriptive name like functions do. That said, some specific cases or custom domain specific languages (DSLs) may justify these practices. diff --git a/lib/elixir/pages/Syntax Reference.md b/lib/elixir/pages/Syntax Reference.md new file mode 100644 index 00000000000..97bfc224d5e --- /dev/null +++ b/lib/elixir/pages/Syntax Reference.md @@ -0,0 +1,516 @@ +# Syntax reference + +Here we document the syntax constructs in Elixir. We explore the base language constructs as well as the "syntax sugar" provided by Elixir and the underlying construct they desugar to. + +## The Elixir AST + +Elixir syntax was designed to have a straightforward conversion to an abstract syntax tree (AST). Elixir's AST is a regular Elixir data structure composed of the following elements: + + * atoms - such as `:foo` + * integers - such as `42` + * floats - such as `13.1` + * strings - such as `"hello"` + * lists - such as `[1, 2, 3]` + * tuples with two elements - such as `{"hello", :world}` + * tuples with three elements, representing calls or variables, as explained next + +The building block of Elixir's AST is a call, such as: + +```elixir +sum(arg1, arg2, arg3) +``` + +which is represented as a tuple with three elements: + +```elixir +{:sum, meta, args} +``` + +the first element is an atom (or another tuple), the second element is a list of two-item tuples with metadata (such as line numbers) and the third is a list of arguments. + +We can retrieve the AST for any Elixir expression by calling `quote`: + +```elixir +quote do + sum() +end +#=> {:sum, [], []} +``` + +Variables are also represented using a tuple with three elements and a combination of lists and atoms, for example: + +```elixir +quote do + sum +end +#=> {:sum, [], Elixir} +``` + +You can see that variables are also represented with a tuple, except the third element is an atom expressing the variable context. + +Over the next section, we will explore many of Elixir syntax constructs alongside their AST representation. + +### Numbers + +Integers (`1234`) and floats (`123.4`) in Elixir are represented as a sequence of digits that may be separated by underscore for readability purposes, such as `1_000_000`. Integers never contain a dot (`.`) in their representation. Floats contain a dot and at least one other digit after the dot. Floats also support the scientific format, such as `123.4e10` or `123.4E10`. + +Numbers are always represented as themselves in the AST: + +```elixir +quote do + 1 +end +#=> 1 +``` + +### Atoms + +Atoms in Elixir start with a colon (`:`) which must be followed by non-combining Unicode characters and underscore. The atom may continue using a sequence of Unicode characters, including numbers, underscore and `@`. Atoms may end in `!` or `?`. See [Unicode Syntax](unicode-syntax.html) for a formal specification. Unicode characters require OTP 20. + +All operators in Elixir are also valid atoms. Valid examples are `:foo`, `:FOO`, `:foo_42`, `:foo@bar` and `:++`. Invalid examples are `:@foo` (`@` is not allowed at start), `:123` (numbers are not allowed at start) and `:(*)` (not a valid operator). + +If the colon is followed by a double- or single-quote, the atom can be made of any character, such as `:"++olá++"`. + +Atoms are always represented as themselves in the AST: + +```elixir +quote do + :foo +end +#=> :foo +``` + +### Strings + +Strings in Elixir are written between double-quotes, such as `"foo"`. Any double-quote inside the string must be escaped with `\`. Strings support Unicode characters and are stored in UTF-8 encoding. + +Strings are always represented as themselves in the AST. + +### Charlists + +Charlists in Elixir are written in single-quotes, such as `'foo'`. Any single-quote inside the string must be escaped with `\`. Charlists are a list of integers, each integer representing a Unicode character. + +Charlists are always represented as themselves in the AST. + +### Variables + +Variables in Elixir must start with underscore or a non-combining Unicode character that is not in uppercase or titlecase. The variable may continue using a sequence of Unicode characters, including numbers and underscore. Variables may end in `?` or `!`. See [Unicode Syntax](unicode-syntax.html) for a formal specification. Unicode characters require OTP 20. + +[Elixir's naming conventions](naming-conventions.html) recommend variables to be in `snake_case` format. + +Variables are represented by three-element tuples: + +```elixir +quote do + sum +end +#=> {:sum, [], Elixir} +``` + +### Non-qualified calls + +Non-qualified calls, such as `add(1, 2)`, must start with underscore or a non-combining Unicode character that is not in uppercase or titlecase. The call may continue using a sequence of Unicode characters, including numbers and underscore. Calls may end in `?` or `!`. See [Unicode Syntax](unicode-syntax.html) for a formal specification. Unicode characters require OTP 20. + +[Elixir's naming conventions](naming-conventions.html) recommend calls to be in `snake_case` format. + +Non-qualified calls are represented by three-element tuples: + +```elixir +quote do + sum(1, 2, 3) +end +#=> {:sum, [], [1, 2, 3]} +``` + +### Operators + +Operators are treated as non-qualified calls: + +```elixir +quote do + 1 + 2 +end +#=> {:+, [], [1, 2]} +``` + +Notice that `.` is also an operator. Remote calls use the dot in the AST with two arguments, where the second argument is always an atom: + +```elixir +quote do + foo.bar(1, 2, 3) +end +#=> {{:., [], [{:foo, [], Elixir}, :bar]}, [], [1, 2, 3]} +``` + +Calling anonymous functions uses the dot in the AST with a single argument, mirroring the fact the function name is "missing" from right side of the dot: + +```elixir +quote do + foo.(1, 2, 3) +end +#=> {{:., [], [{:foo, [], Elixir}]}, [], [1, 2, 3]} +``` + +Many other Elixir constructs, such as `=`, `when`, `&` and `@` are simply treated as operators. See [the Operators page](operators.html) for a full reference. + +### Qualified calls (remote calls) + +Qualified calls, such as `Math.add(1, 2)`, must start with underscore or a non-combining Unicode character that is not in uppercase or titlecase. The call may continue using a sequence of Unicode characters, including numbers and underscore. Calls may end in `?` or `!`. See [Unicode Syntax](unicode-syntax.html) for a formal specification. Unicode characters require OTP 20. + +[Elixir's naming conventions](naming-conventions.html) recommend calls to be in `snake_case` format. + +For qualified calls, Elixir also allows the function name to be written between double- or single-quotes, allowing calls such as `Math."++add++"(1, 2)`. Operators can be used as qualified calls without a need for quote, such as `Kernel.+(1, 2)`. + +Qualified calls are represented as a tuple with three elements in the AST where the first element is the a tuple reprsenting the dot: + +```elixir +quote do + :foo.bar(1, 2) +end +#=> {{:., [], [:foo, :bar]}, [], [1, 2]} +``` + +### Aliases + +Aliases are constructs that expand to atoms at compile-time. The alias `String` expands to the atom `:"Elixir.String"`. Aliases must start with an ASCII uppercase character which may be followed by any ASCII letter, number, or underscore. Non-ASCII characters are not supported in aliases. + +[Elixir's naming conventions](naming-conventions.html) recommend aliases to be in `CamelCase` format. + +Aliases are represented by an `__aliases__` call with each segment separated by dot as an argument: + +```elixir +quote do + Foo.Bar.Baz +end +#=> {:__aliases__, [], [:Foo, :Bar, :Baz]} + +quote do + __MODULE__.Bar.Baz +end +#=> {:__aliases__, [], [{:__MODULE__, [], Elixir}, :Bar, :Baz]} +``` + +All arguments, except the first, are guaranteed to be atoms. + +### Data structures + +Data structures such as lists, tuples, and binaries are marked respectively by the delimiters `[...]`, `{...}`, and `<<...>>`. Each element is separated by comma. A trailing comma is also allowed, such as in `[1, 2, 3,]`. + +Maps use the `%{...}` notation and each key-value is given by pairs marked with `=>`, such as `%{"hello" => 1, 2 => "world"}`. + +Both maps and keyword lists support a notation for when the keys are atoms. Keywords are written using the same rules as atoms, except the colon character `:` is moved to the end, such as `%{hello: "world"}` and `[foo: :bar]`. This notation is a syntax sugar that emits the same AST representation. It will be explained in later sections. + +Lists are represented as themselves in the AST: + +```elixir +quote do + [1, 2, 3] +end +#=> [1, 2, 3] +``` + +Tuples have their own representation, except for two-element tuples, which are represented as themselves: + +```elixir +quote do + {1, 2} +end +#=> {1, 2} + +quote do + {1, 2, 3} +end +#=> {:{}, [], [1, 2, 3]} +``` + +Binaries have a representation similar to tuples, except they are tagged with `:<<>>` instead of `:{}`: + +```elixir +quote do + <<1, 2, 3>> +end +#=> {:<<>>, [], [1, 2, 3]} +``` + +The same applies to maps except pairs are treated as a list of tuples with two elements: + +```elixir +quote do + %{1 => 2, 3 => 4} +end +#=> {:%{}, [], [{1, 2}, {3, 4}]} +``` + +### Blocks + +Blocks are multiple Elixir expressions separated by newlines. They are expanded to a `__block__` call with each line as a separate argument: + +```elixir +quote do + 1 + 2 + 3 +end +#=> {:__block__, [], [1, 2, 3]} +``` + +Expressions in Elixir are separated by newlines or semi-colons: + +```elixir +quote do 1; 2; 3; end +#=> {:__block__, [], [1, 2, 3]} +``` + +### Left to right arrow + +The left to right arrow (`->`) is used to establish a relationship between left and right. The left side may have zero, one or more arguments, the right side is an expression. The `->` is always between one of the following terminators: `do`/`end`, `fn`/`end` or `(`/`)`. + +It is seen on `case` and `cond` constructs between `do`/`end`: + +```elixir +quote do + case 1 do + 2 -> 3 + 4 -> 5 + end +end +#=> {:case, [], [1, [do: [{:->, [], [[2], 3]}, {:->, [], [[4], 5]}]]]} + +quote do + cond do + true -> false + end +end +#=> {:cond, [], [[do: [{:->, [], [[true], false]}]]]} +``` + +Seen in typespecs between `(`/`)`: + +```elixir +quote do + (1, 2 -> 3 + 4, 5 -> 6) +end +#=> [{:->, [], [[1, 2], 3]}, {:->, [], [[4, 5], 6]}] +``` + +It is also used between `fn/end` for building anonymous functions: + +```elixir +quote do + fn + 1, 2 -> 3 + 4, 5 -> 6 + end +end +#=> {:fn, [], [{:->, [], [[1, 2], 3]}, {:->, [], [[4, 5], 6]}]} +``` + +## Syntactic sugar + +All of the constructs above are part of Elixir's syntax and have their own representation as part of the Elixir AST. This section will discuss the remaining constructs that "desugar" to one of the constructs explored above. In other words, the constructs below can be represented in more than one way in your Elixir code and retain AST equivalence. + +### `true`, `false`, and `nil` + +`true`, `false`, and `nil` are reserved words that are represented by the atoms `:true`, `:false` and `:nil` respectively. + +### Integers in other bases and Unicode codepoints + +Elixir allows integers to contain `_` to separate digits and provides conveniences to represent integers in other bases: + +```elixir +1_000_000 +#=> 1000000 + +0xABCD +#=> 43981 (Hexadecimal base) + +0o01234567 +#=> 342391 (Octal base) + +0b10101010 +#=> 170 (Binary base) + +?é +#=> 233 (Unicode codepoint) +``` + +Those constructs exist only at the syntax level. All of the examples above are represented as integers in the AST. + +### Optional parentheses + +Elixir provides optional parentheses for non-qualified and qualified calls. + +```elixir +quote do + sum 1, 2, 3 +end +#=> {:sum, [], [1, 2, 3]} +``` + +The above is treated the same as `sum(1, 2, 3)` by the parser. + +The same applies to qualified calls such as `Foo.bar(1, 2, 3)`, which is the same as `Foo.bar 1, 2, 3`. However, keep in mind parentheses are not optional for local calls with no arguments, such as `sum()`. Removing the parentheses for `sum` causes it to be represented as the variable `sum`, changing its semantics. + +### Access + +The access syntax in Elixir, such as `foo[:bar]`, is treated as a shortcut to the remote call `Access.get(foo, :bar)`: + +```elixir +quote do + foo[:bar] +end +#=> {{:., [], [Access, :get]}, [], [{:foo, [], Elixir}, :bar]} +``` + +### Sigils + +Sigils start with `~` and are followed by a letter and one of the following pairs: + + * `(` and `)` + * `{` and `}` + * `[` and `]` + * `<` and `>` + * `"` and `"` + * `'` and `'` + * `|` and `|` + * `/` and `/` + +After closing the pair, zero or more ASCII letters can be given as a modifier. Sigils are expressed as calls prefixed with `sigil_` where the first argument is the sigil contents as a string and the second argument is a list of integers as modifiers: + +```elixir +quote do + ~r/foo/ +end +#=> {:sigil_r, [], [{:<<>>, [], ["foo"]}, []]} + +quote do + ~m/foo/abc +end +#=> {:sigil_m, [], [{:<<>>, [], ["foo"]}, 'abc']} +``` + +If the sigil letter is in uppercase, no interpolation is allowed in the sigil, otherwise its contents may be dynamic. Compare the quotes below for more information: + +```elixir +quote do + ~r/f#{"o"}o/ +end + +quote do + ~R/f#{"o"}o/ +end +``` + +### Keywords + +Keywords in Elixir are a list of tuples of two elements where the first element is an atom. Using the base constructs, they would be represented as: + +```elixir +[{:foo, 1}, {:bar, 2}] +``` + +However Elixir introduces a syntax sugar where the keywords above may be written as follows: + +```elixir +[foo: 1, bar: 2] +``` + +Atoms with foreign characters in their name, such as whitespace, must be wrapped in quotes. This rule applies to keywords as well: + +```elixir +[{:"foo bar", 1}, {:"bar baz", 2}] == ["foo bar": 1, "bar baz": 2] +``` + +Remember that, because lists and two-element tuples are quoted literals, by definition keywords are also literals (in fact, the only reason tuples with two elements are quoted literals is to support keywords as literals). + +### Keywords as last arguments + +Elixir also supports a syntax where if the last argument of a call is a keyword then the square brackets can be skipped. This means that the following: + +```elixir +if(condition, do: this, else: that) +``` + +is the same as + +```elixir +if(condition, [do: this, else: that]) +``` + +which in turn is the same as + +```elixir +if(condition, [{:do, this}, {:else, that}]) +``` + +### `do`/`end` blocks + +The last syntax convenience are `do`/`end` blocks. `do`/`end` blocks are equivalent to keywords where the block contents are wrapped in parentheses. For example: + +```elixir +if true do + this +else + that +end +``` + +is the same as: + +```elixir +if(true, do: (this), else: (that)) +``` + +which we have explored in the previous section. + +Parentheses are important to support multiple expressions. This: + +```elixir +if true do + this + that +end +``` + +is the same as: + +```elixir +if(true, do: ( + this + that +)) +``` + +Inside `do`/`end` blocks you may introduce other keywords, such as `else` used in the `if` above. The supported keywords between `do`/`end` are static and are: + + * `after` + * `catch` + * `else` + * `rescue` + +You can see them being used in constructs such as `receive`, `try`, and others. + +## Summary + +This document provides a quick reference to Elixir syntax, exploring the simplicity behind its AST and documenting the base constructs with their AST equivalents. + +We have also discussed a handful of syntax conveniences provided by Elixir. Those conveniences are what allow us to write + +```elixir +defmodule Math do + def add(a, b) do + a + b + end +end +``` + +instead of + +```elixir +defmodule(Math, [ + {:do, def(add(a, b), [{:do, a + b}])} +]) +``` + +The mapping between code and data (the underlying AST) is what allows Elixir to implement `defmodule`, `def`, `if`, and others in Elixir itself. Elixir makes the constructs available for building the language itself also accessible to developers who want to extend the language to new domains. diff --git a/lib/elixir/pages/Typespecs.md b/lib/elixir/pages/Typespecs.md new file mode 100644 index 00000000000..dd32f5e0ba1 --- /dev/null +++ b/lib/elixir/pages/Typespecs.md @@ -0,0 +1,193 @@ +# Typespecs + +Elixir comes with a notation for declaring types and specifications. Elixir is a dynamically typed language, and as such, type specifications are never used by the compiler to optimize or modify code. Still, using type specifications is useful because + + * they provide documentation (for example, tools such as [ExDoc](https://github.com/elixir-lang/ex_doc) show type specifications in the documentation) + * they're used by tools such as [Dialyzer](http://www.erlang.org/doc/man/dialyzer.html), that can analyze code with typespec to find type inconsistencies and possible bugs + +Type specifications (sometimes referred to as *typespecs*) are defined in different contexts using the following attributes: + + * `@type` + * `@opaque` + * `@typep` + * `@spec` + * `@callback` + * `@macrocallback` + +See the "Defining a type" and "Defining a specification" sub-sections below for more information on defining types and typespecs. + +## Types and their syntax + +The syntax Elixir provides for type specifications is similar to [the one in Erlang](http://www.erlang.org/doc/reference_manual/typespec.html). Most of the built-in types provided in Erlang (for example, `pid()`) are expressed in the same way: `pid()` (or simply `pid`). Parametrized types (such as `list(integer)`) are supported as well and so are remote types (such as `Enum.t`). Integers and atom literals are allowed as types (e.g., `1`, `:atom`, or `false`). All other types are built out of unions of predefined types. Some shorthands are allowed, such as `[...]`, `<<>>`, and `{...}`. + +### Basic types + + type :: any() # the top type, the set of all terms + | none() # the bottom type, contains no terms + | atom() + | map() # any map + | pid() # process identifier + | port() + | reference() + | struct() # any struct + | tuple() # tuple of any size + + ## Numbers + | float() + | integer() + | neg_integer() # ..., -3, -2, -1 + | non_neg_integer() # 0, 1, 2, 3, ... + | pos_integer() # 1, 2, 3, ... + + ## Lists + | list(type) # proper list ([]-terminated) + | nonempty_list(type) # non-empty proper list + | maybe_improper_list(type1, type2) # proper or improper list + | nonempty_improper_list(type1, type2) # improper list + | nonempty_maybe_improper_list(type1, type2) # non-empty proper or improper list + + | Literals # Described in section "Literals" + | Builtin # Described in section "Built-in types" + | Remotes # Described in section "Remote types" + | UserDefined # Described in section "User-defined types" + +### Literals + +The following literals are also supported in typespecs: + + type :: ## Atoms + :atom # atoms: :foo, :bar, ... + | true | false | nil # special atom literals + + ## Bitstrings + | <<>> # empty bitstring + | <<_::size>> # size is 0 or a positive integer + | <<_::_*unit>> # unit is an integer from 1 to 256 + | <<_::size, _::_*unit>> + + ## Functions + | (... -> type) # any arity, returns type + | (() -> type) # 0-arity, returns type + | (type1, type2 -> type) # 2-arity, returns type + + ## Integers + | 1 # integer + | 1..10 # integer from 1 to 10 + + ## Lists + | [type] # list with any number of type elements + | [] # empty list + | [...] # shorthand for nonempty_list(any()) + | [type, ...] # shorthand for nonempty_list(type) + | [key: value_type] # keyword list with key :key of value_type + + ## Maps + | %{} # empty map + | %{key: value_type} # map with required key :key of value_type + | %{required(key_type) => value_type} # map with required pairs of key_type and value_type + | %{optional(key_type) => value_type} # map with optional pairs of key_type and value_type + | %SomeStruct{} # struct with all fields of any type + | %SomeStruct{key: value_type} # struct with required key :key of value_type + + ## Tuples + | {} # empty tuple + | {:ok, type} # two-element tuple with an atom and any type + +### Built-in types + +The following types are also provided by Elixir as shortcuts on top of the basic and literal types described above. + +Built-in type | Defined as +:---------------------- | :--------- +`term()` | `any()` +`arity()` | `0..255` +`as_boolean(t)` | `t` +`binary()` | `<<_::_*8>>` +`bitstring()` | `<<_::_*1>>` +`boolean()` | `false` \| `true` +`byte()` | `0..255` +`char()` | `0..0x10FFFF` +`charlist()` | `[char()]` +`nonempty_charlist()` | `[char(), ...]` +`fun()` | `(... -> any)` +`function()` | `fun()` +`identifier()` | `pid()` \| `port()` \| `reference()` +`iodata()` | `iolist()` \| `binary()` +`iolist()` | `maybe_improper_list(byte() \| binary() \| iolist(), binary() \| [])` +`keyword()` | `[{atom(), any()}]` +`keyword(t)` | `[{atom(), t}]` +`list()` | `[any()]` +`nonempty_list()` | `nonempty_list(any())` +`maybe_improper_list()` | `maybe_improper_list(any(), any())` +`nonempty_maybe_improper_list()` | `nonempty_maybe_improper_list(any(), any())` +`mfa()` | `{module(), atom(), arity()}` +`module()` | `atom()` +`no_return()` | `none()` +`node()` | `atom()` +`number()` | `integer()` \| `float()` +`struct()` | `%{:__struct__ => atom(), optional(atom()) => any()}` +`timeout()` | `:infinity` \| `non_neg_integer()` + +### Remote types + +Any module is also able to define its own types and the modules in Elixir are no exception. For example, the `Range` module defines a `t/0` type that represents a range: this type can be referred to as `t:Range.t/0`. In a similar fashion, a string is `t:String.t/0`, any enumerable can be `t:Enum.t/0`, and so on. + +### Maps + +The key types in maps are allowed to overlap, and if they do, the leftmost key takes precedence. +A map value does not belong to this type if it contains a key that is not in the allowed map keys. + +If you want to denote that keys that were not previously defined in the map are allowed, +it is common to end a map type with `optional(any) => any`. + +Notice that the syntactic representation of `map()` is `%{optional(any) => any}`, not `%{}`. The notation `%{}` specifies the singleton type for the empty map. + +### User-defined types + +The `@type`, `@typep`, and `@opaque` module attributes can be used to define new types: + + @type type_name :: type + @typep type_name :: type + @opaque type_name :: type + +A type defined with `@typep` is private. An opaque type, defined with `@opaque` is a type where the internal structure of the type will not be visible, but the type is still public. + +Types can be parameterized by defining variables as parameters; these variables can then be used to define the type. + + @type dict(key, value) :: [{key, value}] + +## Defining a specification + + @spec function_name(type1, type2) :: return_type + @callback function_name(type1, type2) :: return_type + @macrocallback macro_name(type1, type2) :: Macro.t + +Callbacks are used to define the callbacks functions of behaviours (see the ["Behaviours"](behaviours.html) page in the documentation for more information on behaviours). + +Guards can be used to restrict type variables given as arguments to the function. + + @spec function(arg) :: [arg] when arg: atom + +If you want to specify more than one variable, you separate them by a comma. + + @spec function(arg1, arg2) :: [arg1, arg2] when arg1: atom, arg2: integer + +Type variables with no restriction can also be defined. + + @spec function(arg) :: [arg] when arg: var + +You can also name your arguments in a typespec using `arg_name :: arg_type` syntax. This is particularly useful in documentation as a way to differentiate multiple arguments of the same type (or multiple elements of the same type in a type definition): + + @spec days_since_epoch(year :: integer, month :: integer, day :: integer) :: integer + @type color :: {red :: integer, green :: integer, blue :: integer} + +Specifications can be overloaded just like ordinary functions. + + @spec function(integer) :: atom + @spec function(atom) :: integer + +## Notes + +Elixir discourages the use of type `t:string/0` as it might be confused with binaries which are referred to as "strings" in Elixir (as opposed to character lists). In order to use the type that is called `t:string/0` in Erlang, one has to use the `t:charlist/0` type which is a synonym for `string`. If you use `string`, you'll get a warning from the compiler. + +If you want to refer to the "string" type (the one operated on by functions in the `String` module), use `t:String.t/0` type instead. diff --git a/lib/elixir/pages/Unicode Syntax.md b/lib/elixir/pages/Unicode Syntax.md new file mode 100644 index 00000000000..f0aab5a56c0 --- /dev/null +++ b/lib/elixir/pages/Unicode Syntax.md @@ -0,0 +1,56 @@ +# Unicode Syntax + +Elixir implements [Unicode Annex #31](http://unicode.org/reports/tr31/) for non-quoted atoms and variables as specified in this document. + +## Version + +The specification in this document is included in Elixir v1.5 and requires OTP 20+. To check the Unicode version of your current Elixir installation, please run `String.Unicode.version()`. + +## R1. Default Identifiers + +Elixir identifiers are identified as: + + := * ? + +where `` is: + +> characters derived from the Unicode General Category of uppercase letters, lowercase letters, titlecase letters, modifier letters, other letters, letter numbers, plus `Other_ID_Start`, minus `Pattern_Syntax` and `Pattern_White_Space` code points +> +> In set notation: `[[:L:][:Nl:][:Other_ID_Start:]--[:Pattern_Syntax:]--[:Pattern_White_Space:]]` + +and `` is: + +> ID_Start characters, plus characters having the Unicode General Category of nonspacing marks, spacing combining marks, decimal number, connector punctuation, plus `Other_ID_Continue`, minus `Pattern_Syntax` and `Pattern_White_Space` code points. +> +> In set notation: `[[:ID_Start:][:Mn:][:Mc:][:Nd:][:Pc:][:Other_ID_Continue:]--[:Pattern_Syntax:]--[:Pattern_White_Space:]]` + +`` is an addition specific to Elixir that includes the codepoints ? (003F) and ! (0021). + +Elixir does not implement requirement R1a. It does implement requirement R1b. + +### Atoms + +Atoms in Elixir follow the identifier rule above with the following modifications: + + * `` includes the codepoint _ (005F) + * `` includes the codepoint @ (0040) + +### Variables + +Atoms in Elixir follow the identifier rule above with the following modifications: + + * `` includes the codepoint _ (005F) + * `` must not include Lu (letter uppercase) and Lt (letter titlecase) characters + * `` includes Lu (letter uppercase) and Lt (letter titlecase) characters + +## R6. Filtered Normalized Identifiers + +Identifiers in Elixir are case sensitive. + +Elixir requires all atoms and variables to be in NFC form. Any other form will fail with a relevant error message. Quoted-atoms and variables can, however, be in any form and are not verified by the parser. + +In other words, the atom `:josé` can only be written with the codepoints 006A 006F 0073 00E9. On the other hand, `:"josé"` may be written as 006A 006F 0073 00E9 or 006A 006F 0073 0065 0301. + +## Other considerations + +It is worth noting that Elixir supports only codepoints \t (0009), \n (000A), \r (000D) and \s (0020) as whitespace and therefore does not follow requirement R3. diff --git a/lib/elixir/pages/Writing Documentation.md b/lib/elixir/pages/Writing Documentation.md new file mode 100644 index 00000000000..3a4054b8b53 --- /dev/null +++ b/lib/elixir/pages/Writing Documentation.md @@ -0,0 +1,126 @@ +# Writing Documentation + +Elixir treats documentation as a first-class citizen. This means documentation should be easy to write and easy to read. In this document you will learn how to write documentation in Elixir, covering constructs like module attributes, style practices and doctests. + +## Markdown + +Elixir documentation is written using Markdown. There are plenty of guides on Markdown online, we recommend the ones available at GitHub as a getting started point: + + * [Basic writing and formatting syntax](https://help.github.com/articles/basic-writing-and-formatting-syntax/) + * [Mastering Markdown](https://guides.github.com/features/mastering-markdown/) + +## Module Attributes + +Documentation in Elixir is usually attached to module attributes. Let's see an example: + + defmodule MyApp.Hello do + @moduledoc """ + This is the Hello module. + """ + + @doc """ + Says hello to the given `name`. + + Returns `:ok`. + + ## Examples + + iex> MyApp.Hello.world(:john) + :ok + + """ + def world(name) do + IO.puts "hello #{name}" + end + end + +The `@moduledoc` attribute is used to add documentation to the module. `@doc` is used before a function to provide documentation for it. Besides the attributes above, `@typedoc` can also be used to attach documentation to types defined as part of typespecs. + +## Function Arguments + +When documenting a function, argument names are inferred by the compiler. For example: + + def size(%{size: size}) do + size + end + +The compiler will infer this argument as `map`. Sometimes the inference will be suboptimal, especially if the function contains multiple clauses with the argument matching on different values each time. You can specify the proper names for documentation by declaring only the function head at any moment before the implementation: + + def size(map) + def size(%{size: size}) do + size + end + +## Recommendations + +When writing documentation: + + * Keep the first paragraph of the documentation concise and simple, typically one-line. Tools like [ExDoc](https://github.com/elixir-lang/ex_doc/) use the first line to generate a summary. + + * Reference modules by their full name. + + Markdown uses backticks (`` ` ``) to quote code. Elixir builds on top of that to automatically generate links when module or function names are referenced. For this reason, always use full module names. If you have a module called `MyApp.Hello`, always reference it as `` `MyApp.Hello` `` and never as `` `Hello` ``. + + * Reference functions by name and arity if they are local, as in `` `world/1` ``, or by module, name and arity if pointing to an external module: `` `MyApp.Hello.world/1` ``. + + * Reference a `@callback` by prepending `c:`, as in `` `c:world/1` ``. + + * Reference a `@type` by prepending `t:`, as in `` `t:values/0` ``. + + * Start new sections with second level Markdown headers `##`. First level headers are reserved for module and function names. + + * Place documentation before the first clause of multi-clause functions. Documentation is always per function and arity and not per clause. + +## Doctests + +We recommend that developers include examples in their documentation, often under their own `## Examples` heading. To ensure examples do not get out of date, Elixir's test framework (ExUnit) provides a feature called doctests that allows developers to test the examples in their documentation. Doctests work by parsing out code samples starting with `iex>` from the documentation. You can read more about it at `ExUnit.DocTest`. + +Notice doctests have limitations. When you cannot doctest a function, because it relies on state or side-effects, we recommend developers include examples directly without the `iex>` prompt. + +## Documentation != Comments + +Elixir treats documentation and code comments as different concepts. Documentation is for users of your Application Programming Interface (API), be it your co-worker or your future self. Modules and functions must always be documented if they are part of your API. + +Code comments are for developers reading the code. They are useful to mark improvements, leave notes for developers reading the code (for example, you decided not to call a function due to a bug in a library) and so forth. + +In other words: documentation is required, code comments are optional. + +## Hiding Internal Modules and Functions + +Besides the modules and functions libraries provide as part of their public interface, libraries may also implement important functionality that is not part of their API. While these modules and functions can be accessed, they are meant to be internal to the library and thus should not have documentation for end users. + +Luckily, Elixir allows developers to hide modules and functions from the documentation. For example, one common practice for documenting internal behaviour is to set the `@moduledoc` attribute to `false` while documenting each function: + + defmodule MyApp.Hidden do + @moduledoc false + + @doc """ + This function won't be listed in docs. + """ + def function_that_wont_be_listed_in_docs do + # ... + end + end + +Similarly, developers can add `@doc false` to functions they do not want to be publicly exposed: + + defmodule MyApp.Sample do + @doc false + def add(a, b), do: a + b + end + +However, keep in mind that adding `@doc false` does not make the function private. The function above can still be invoked as `MyApp.Sample.add(1, 2)`. Not only that, if `MyApp.Sample` is imported, the `add/2` function will also be imported into the caller. For those reasons, be cautious when adding `@doc false` to functions, instead use one of these two options: + + * Move the undocumented function to a module with `@moduledoc false`, like `MyApp.Hidden`, ensuring the function won't be accidentally exposed or imported. Remember you can use `@moduledoc false` to hide a whole module and still document each function with `@doc`. Tools will still ignore the module. + + * Start the function name with one or two underscores, for example, `__add__/2`, and add `@doc false`. The compiler does not import functions with leading underscores and they hint to anyone reading the code of their intended private usage. + +## Documenting Private Functions + +Elixir warns if a private function has a `@doc` attribute and discards its content, because `@doc` is intended to be used only for your public interface. + +Private functions may still need internal documentation for maintainers, though. That can be accomplished with code comments. + +## Code.get_docs/2 + +Elixir stores documentation inside pre-defined chunks in the bytecode. It can be accessed from Elixir by using the `Code.get_docs/2` function. This also means documentation is only accessed when required and not when modules are loaded by the Virtual Machine. The only downside is that modules defined in-memory, like the ones defined in IEx, cannot have their documentation accessed as they do not have their bytecode written to disk. diff --git a/lib/elixir/rebar.config b/lib/elixir/rebar.config index 902a412104c..476b0e6046b 100644 --- a/lib/elixir/rebar.config +++ b/lib/elixir/rebar.config @@ -1,23 +1,24 @@ {erl_opts, [ - warn_unused_vars, - warn_export_all, - warn_shadow_vars, - warn_unused_import, - warn_unused_function, - warn_bif_clash, - warn_unused_record, - warn_deprecated_function, - warn_obsolete_guard, - strict_validation, - warn_exported_vars, - %% warn_export_vars, - %% warn_missing_spec, - %% warn_untyped_record, - %% warnings_as_errors, - debug_info - ]}. + warn_unused_vars, + warn_export_all, + warn_shadow_vars, + warn_unused_import, + warn_unused_function, + warn_bif_clash, + warn_unused_record, + warn_deprecated_function, + warn_obsolete_guard, + strict_validation, + warn_exported_vars, + %% warn_export_vars, + %% warn_missing_spec, + %% warn_untyped_record, + %% warnings_as_errors, + debug_info, + {platform_define, "^18.*", old_map_specs} +]}. {yrl_opts, [ - {report, true}, - {verbose, false} - ]}. + {report, true}, + {verbose, false} +]}. diff --git a/lib/elixir/src/elixir.erl b/lib/elixir/src/elixir.erl index a7548c56489..73a7d954138 100644 --- a/lib/elixir/src/elixir.erl +++ b/lib/elixir/src/elixir.erl @@ -2,16 +2,29 @@ %% private to the Elixir compiler and reserved to be used by Elixir only. -module(elixir). -behaviour(application). --export([main/1, start_cli/0, +-export([start_cli/0, string_to_quoted/4, 'string_to_quoted!'/4, env_for_eval/1, env_for_eval/2, quoted_to_erl/2, quoted_to_erl/3, eval/2, eval/3, eval_forms/3, eval_forms/4, eval_quoted/3]). -include("elixir.hrl"). +-define(system, 'Elixir.System'). %% Top level types --export_type([char_list/0, as_boolean/1]). +%% TODO: Remove char_list type by 2.0 +-export_type([charlist/0, char_list/0, nonempty_charlist/0, struct/0, as_boolean/1, keyword/0, keyword/1]). +-type charlist() :: string(). -type char_list() :: string(). +-type nonempty_charlist() :: nonempty_string(). -type as_boolean(T) :: T. +-type keyword() :: [{atom(), any()}]. +-type keyword(T) :: [{atom(), T}]. + +%% TODO: Remove ifdef once we drop OTP 18 +-ifdef(old_map_specs). +-type struct() :: #{'__struct__' => atom(), atom() => any()}. +-else. +-type struct() :: #{'__struct__' := atom(), atom() => any()}. +-endif. %% OTP Application API @@ -24,13 +37,45 @@ start(_Type, _Args) -> %% has encoding set to latin1. Opts = case init:get_argument(noshell) of - {ok, _} -> [binary,{encoding,utf8}]; + {ok, _} -> [binary, {encoding, utf8}]; error -> [binary] end, - io:setopts(standard_io, Opts), - io:setopts(standard_error, [{unicode,true}]), - case file:native_name_encoding() of + OTPRelease = string:to_integer(erlang:system_info(otp_release)), + + case OTPRelease of + {Num, _} when Num >= 18 -> + ok; + _ -> + io:format(standard_error, "unsupported Erlang version, expected Erlang 18+~n", []), + erlang:halt(1) + end, + + %% We need to make sure the re module is preloaded + %% to make function_exported checks on it fast. + %% TODO: Remove this once we support OTP 20+. + _ = code:ensure_loaded(re), + + case code:ensure_loaded(?system) of + {module, ?system} -> + Endianness = ?system:endianness(), + case ?system:compiled_endianness() of + Endianness -> ok; + _ -> + io:format(standard_error, + "warning: Elixir is running in a system with a different endianness than the one its " + "source code was compiled in. Please make sure Elixir and all source files were compiled " + "in a machine with the same endianness as the current one: ~ts~n", [Endianness]) + end; + {error, _} -> + ok + end, + + ok = io:setopts(standard_io, Opts), + ok = io:setopts(standard_error, [{encoding, utf8}]), + + Encoding = file:native_name_encoding(), + case Encoding of latin1 -> io:format(standard_error, "warning: the VM is running with native name encoding of latin1 which may cause " @@ -40,31 +85,63 @@ start(_Type, _Args) -> ok end, - elixir_sup:start_link(). + %% TODO: Remove OTPRelease check once we support OTP 20+. + Tokenizer = case code:ensure_loaded('Elixir.String.Tokenizer') of + {module, Mod} when OTPRelease >= 20 -> Mod; + {error, _} -> elixir_tokenizer + end, -stop(_S) -> - ok. + URIConfig = [{{uri, <<"ftp">>}, 21}, + {{uri, <<"sftp">>}, 22}, + {{uri, <<"tftp">>}, 69}, + {{uri, <<"http">>}, 80}, + {{uri, <<"https">>}, 443}, + {{uri, <<"ldap">>}, 389}], + CompilerOpts = #{docs => true, ignore_module_conflict => false, + debug_info => true, warnings_as_errors => false, + relative_paths => true}, + {ok, [[Home] | _]} = init:get_argument(home), + Config = [{at_exit, []}, + {home, unicode:characters_to_binary(Home, Encoding, Encoding)}, + {compiler_options, CompilerOpts}, + {identifier_tokenizer, Tokenizer} + | URIConfig], + Tab = elixir_config:new(Config), + case elixir_sup:start_link() of + {ok, Sup} -> + {ok, Sup, Tab}; + {error, _Reason} = Error -> + elixir_config:delete(Tab), + Error + end. + +stop(Tab) -> + elixir_config:delete(Tab). config_change(_Changed, _New, _Remove) -> ok. -%% escript entry point - -main(Args) -> - application:start(?MODULE), - 'Elixir.Kernel.CLI':main(Args). - %% Boot and process given options. Invoked by Elixir's script. start_cli() -> - application:start(?MODULE), + {ok, _} = application:ensure_all_started(?MODULE), + + %% We start the Logger so tools that depend on Elixir + %% always have the Logger directly accessible. However + %% Logger is not a dependency of the Elixir application, + %% which means releases that want to use Logger must + %% always list it as part of its applications. + _ = case code:ensure_loaded('Elixir.Logger') of + {module, _} -> application:start(logger); + {error, _} -> ok + end, + 'Elixir.Kernel.CLI':main(init:get_plain_arguments()). %% EVAL HOOKS env_for_eval(Opts) -> env_for_eval((elixir_env:new())#{ - local := nil, requires := elixir_dispatch:default_requires(), functions := elixir_dispatch:default_functions(), macros := elixir_dispatch:default_macros() @@ -73,37 +150,32 @@ env_for_eval(Opts) -> env_for_eval(Env, Opts) -> Line = case lists:keyfind(line, 1, Opts) of {line, LineOpt} when is_integer(LineOpt) -> LineOpt; - false -> ?m(Env, line) + false -> ?key(Env, line) end, File = case lists:keyfind(file, 1, Opts) of {file, FileOpt} when is_binary(FileOpt) -> FileOpt; - false -> ?m(Env, file) - end, - - Local = case lists:keyfind(delegate_locals_to, 1, Opts) of - {delegate_locals_to, LocalOpt} when is_atom(LocalOpt) -> LocalOpt; - false -> ?m(Env, local) + false -> ?key(Env, file) end, Aliases = case lists:keyfind(aliases, 1, Opts) of {aliases, AliasesOpt} when is_list(AliasesOpt) -> AliasesOpt; - false -> ?m(Env, aliases) + false -> ?key(Env, aliases) end, Requires = case lists:keyfind(requires, 1, Opts) of {requires, RequiresOpt} when is_list(RequiresOpt) -> ordsets:from_list(RequiresOpt); - false -> ?m(Env, requires) + false -> ?key(Env, requires) end, Functions = case lists:keyfind(functions, 1, Opts) of {functions, FunctionsOpt} when is_list(FunctionsOpt) -> FunctionsOpt; - false -> ?m(Env, functions) + false -> ?key(Env, functions) end, Macros = case lists:keyfind(macros, 1, Opts) of {macros, MacrosOpt} when is_list(MacrosOpt) -> MacrosOpt; - false -> ?m(Env, macros) + false -> ?key(Env, macros) end, Module = case lists:keyfind(module, 1, Opts) of @@ -112,7 +184,7 @@ env_for_eval(Env, Opts) -> end, Env#{ - file := File, local := Local, module := Module, + file := File, module := Module, macros := Macros, functions := Functions, requires := Requires, aliases := Aliases, line := Line }. @@ -137,7 +209,7 @@ eval_quoted(Tree, Binding, #{line := Line} = E) -> eval_forms(elixir_quote:linify(Line, Tree), Binding, E). %% Handle forms evaluation. The main difference to -%% to eval_quoted is that it does not linefy the given +%% eval_quoted is that it does not linefy the given %% args. eval_forms(Tree, Binding, Opts) when is_list(Opts) -> @@ -145,23 +217,21 @@ eval_forms(Tree, Binding, Opts) when is_list(Opts) -> eval_forms(Tree, Binding, E) -> eval_forms(Tree, Binding, E, elixir_env:env_to_scope(E)). eval_forms(Tree, Binding, Env, Scope) -> - {ParsedBinding, ParsedScope} = elixir_scope:load_binding(Binding, Scope), - ParsedEnv = Env#{vars := [K || {K,_} <- ParsedScope#elixir_scope.vars]}, + {ParsedBinding, ParsedVars, ParsedScope} = elixir_erl_var:load_binding(Binding, Scope), + ParsedEnv = Env#{vars := ParsedVars}, {Erl, NewEnv, NewScope} = quoted_to_erl(Tree, ParsedEnv, ParsedScope), case Erl of {atom, _, Atom} -> {Atom, Binding, NewEnv, NewScope}; _ -> - {value, Value, NewBinding} = erl_eval(Erl, ParsedBinding), - {Value, elixir_scope:dump_binding(NewBinding, NewScope), NewEnv, NewScope} + % Below must be all one line for locations to be the same + % when the stacktrace is extended to the full stacktrace. + {value, Value, NewBinding} = + try erl_eval:expr(Erl, ParsedBinding, none, none, none) catch Class:Exception -> erlang:raise(Class, Exception, get_stacktrace()) end, + {Value, elixir_erl_var:dump_binding(NewBinding, NewScope), NewEnv, NewScope} end. -erl_eval(Erl, ParsedBinding) -> - % Below must be all one line for locations to be the same when the stacktrace - % needs to be extended to the full stacktrace. - try erl_eval:expr(Erl, ParsedBinding) catch Class:Exception -> erlang:raise(Class, Exception, get_stacktrace()) end. - get_stacktrace() -> Stacktrace = erlang:get_stacktrace(), % eval_eval and eval_bits can call :erlang.raise/3 without the full @@ -185,28 +255,36 @@ get_stacktrace(CurrentStack, CurrentStack) -> get_stacktrace([StackItem | Stacktrace], CurrentStack) -> [StackItem | get_stacktrace(Stacktrace, CurrentStack)]. -%% Converts a quoted expression to erlang abstract format +%% Converts a quoted expression to Erlang abstract format quoted_to_erl(Quoted, Env) -> quoted_to_erl(Quoted, Env, elixir_env:env_to_scope(Env)). quoted_to_erl(Quoted, Env, Scope) -> - {Expanded, NewEnv} = elixir_exp:expand(Quoted, Env), - {Erl, NewScope} = elixir_translator:translate(Expanded, Scope), + {Expanded, NewEnv} = elixir_expand:expand(Quoted, Env), + {Erl, NewScope} = elixir_erl_pass:translate(Expanded, Scope), {Erl, NewEnv, NewScope}. -%% Converts a given string (char list) into quote expression +%% Converts a given string (charlist) into quote expression string_to_quoted(String, StartLine, File, Opts) when is_integer(StartLine), is_binary(File) -> - case elixir_tokenizer:tokenize(String, StartLine, [{file, File}|Opts]) of - {ok, _Line, Tokens} -> + case elixir_tokenizer:tokenize(String, StartLine, [{file, File} | Opts]) of + {ok, _Line, _Column, Tokens} -> + put(elixir_parser_file, File), try elixir_parser:parse(Tokens) of {ok, Forms} -> {ok, Forms}; + {error, {{Line, _, _}, _, [Error, Token]}} -> {error, {Line, to_binary(Error), to_binary(Token)}}; {error, {Line, _, [Error, Token]}} -> {error, {Line, to_binary(Error), to_binary(Token)}} catch + {error, {{Line, _, _}, _, [Error, Token]}} -> {error, {Line, to_binary(Error), to_binary(Token)}}; {error, {Line, _, [Error, Token]}} -> {error, {Line, to_binary(Error), to_binary(Token)}} + after + erase(elixir_parser_file) end; - {error, {Line, Error, Token}, _Rest, _SoFar} -> {error, {Line, to_binary(Error), to_binary(Token)}} + {error, {Line, {ErrorPrefix, ErrorSuffix}, Token}, _Rest, _SoFar} -> + {error, {Line, {to_binary(ErrorPrefix), to_binary(ErrorSuffix)}, to_binary(Token)}}; + {error, {Line, Error, Token}, _Rest, _SoFar} -> + {error, {Line, to_binary(Error), to_binary(Token)}} end. 'string_to_quoted!'(String, StartLine, File, Opts) -> diff --git a/lib/elixir/src/elixir.hrl b/lib/elixir/src/elixir.hrl new file mode 100644 index 00000000000..83b1b44d408 --- /dev/null +++ b/lib/elixir/src/elixir.hrl @@ -0,0 +1,40 @@ +-define(key(M, K), maps:get(K, M)). +-define(ann(Opts), elixir_erl:get_ann(Opts)). +-define(line(Opts), elixir_utils:get_line(Opts)). +-define(generated(Meta), [{generated, true} | Meta]). +-define(var_context, ?MODULE). + +-record(elixir_erl, { + def=nil, %% a tuple with the current definition {def | ..., name, arity} + context=nil, %% can be match, guards or nil + extra=nil, %% extra information about the context, like pin_guard and map_key + caller=false, %% when true, it means caller was invoked + vars=#{}, %% a map of defined variables and their alias + backup_vars=nil, %% a copy of vars to be used on ^var + match_vars=nil, %% a set of all variables defined in a particular match + export_vars=nil, %% a dict of all variables defined in a particular clause + extra_guards=nil, %% extra guards from args expansion + counter=#{}, %% a map counting the variables defined + file=(<<"nofile">>) %% the current scope filename +}). + +-record(elixir_quote, { + line=false, + file=nil, + context=nil, + vars_hygiene=true, + aliases_hygiene=true, + imports_hygiene=true, + unquote=true, + unquoted=false, + escape=false, + generated=false +}). + +-record(elixir_tokenizer, { + file, + terminators=[], + check_terminators=true, + existing_atoms_only=false, + identifier_tokenizer=elixir_tokenizer +}). diff --git a/lib/elixir/src/elixir_aliases.erl b/lib/elixir/src/elixir_aliases.erl index 8f06fffc8cb..140d0679ea6 100644 --- a/lib/elixir/src/elixir_aliases.erl +++ b/lib/elixir/src/elixir_aliases.erl @@ -10,21 +10,32 @@ inspect(Atom) when is_atom(Atom) -> end. %% Store an alias in the given scope -store(_Meta, New, New, _TKV, Aliases, MacroAliases, _Lexical) -> - {Aliases, MacroAliases}; -store(Meta, New, Old, TKV, Aliases, MacroAliases, Lexical) -> - record_warn(Meta, New, TKV, Lexical), +store(Meta, New, New, _TOpts, Aliases, MacroAliases, _Lexical) -> + {remove_alias(New, Aliases), remove_macro_alias(Meta, New, MacroAliases)}; +store(Meta, New, Old, TOpts, Aliases, MacroAliases, Lexical) -> + record_warn(Meta, New, TOpts, Lexical), {store_alias(New, Old, Aliases), store_macro_alias(Meta, New, Old, MacroAliases)}. store_alias(New, Old, Aliases) -> lists:keystore(New, 1, Aliases, {New, Old}). + store_macro_alias(Meta, New, Old, Aliases) -> - case lists:keymember(context, 1, Meta) andalso - lists:keyfind(counter, 1, Meta) of + case lists:keyfind(counter, 1, Meta) of {counter, Counter} when is_integer(Counter) -> lists:keystore(New, 1, Aliases, {New, {Counter, Old}}); - _ -> + false -> + Aliases + end. + +remove_alias(Atom, Aliases) -> + lists:keydelete(Atom, 1, Aliases). + +remove_macro_alias(Meta, Atom, Aliases) -> + case lists:keyfind(counter, 1, Meta) of + {counter, Counter} when is_integer(Counter) -> + lists:keydelete(Atom, 1, Aliases); + false -> Aliases end. @@ -40,7 +51,7 @@ record_warn(Meta, Ref, Opts, Lexical) -> %% Expand an alias. It returns an atom (meaning that there %% was an expansion) or a list of atoms. -expand({'__aliases__', _Meta, ['Elixir'|_] = List}, _Aliases, _MacroAliases, _LexicalTracker) -> +expand({'__aliases__', _Meta, ['Elixir' | _] = List}, _Aliases, _MacroAliases, _LexicalTracker) -> concat(List); expand({'__aliases__', Meta, _} = Alias, Aliases, MacroAliases, LexicalTracker) -> @@ -53,19 +64,19 @@ expand({'__aliases__', Meta, _} = Alias, Aliases, MacroAliases, LexicalTracker) expand(Alias, Aliases, LexicalTracker) end. -expand({'__aliases__', Meta, [H|T]}, Aliases, LexicalTracker) when is_atom(H) -> +expand({'__aliases__', Meta, [H | T]}, Aliases, LexicalTracker) when is_atom(H) -> Lookup = list_to_atom("Elixir." ++ atom_to_list(H)), Counter = case lists:keyfind(counter, 1, Meta) of {counter, C} -> C; _ -> nil end, case lookup(Lookup, Aliases, Counter) of - Lookup -> [H|T]; + Lookup -> [H | T]; Atom -> elixir_lexical:record_alias(Lookup, LexicalTracker), case T of [] -> Atom; - _ -> concat([Atom|T]) + _ -> concat([Atom | T]) end end; @@ -80,11 +91,15 @@ ensure_loaded(Meta, Ref, E) -> Ref:module_info(compile) catch error:undef -> - Kind = case lists:member(Ref, ?m(E, context_modules)) of - true -> scheduled_module; + Kind = case lists:member(Ref, ?key(E, context_modules)) of + true -> + case ?key(E, module) of + Ref -> circular_module; + _ -> scheduled_module + end; false -> unloaded_module end, - elixir_errors:form_error(Meta, ?m(E, file), ?MODULE, {Kind, Ref}) + elixir_errors:form_error(Meta, ?key(E, file), ?MODULE, {Kind, Ref}) end. %% Receives an atom and returns the last bit as an alias. @@ -93,9 +108,9 @@ last(Atom) -> Last = last(lists:reverse(atom_to_list(Atom)), []), list_to_atom("Elixir." ++ Last). -last([$.|_], Acc) -> Acc; -last([H|T], Acc) -> last(T, [H|Acc]); -last([], Acc) -> Acc. +last([$. | _], Acc) -> Acc; +last([H | T], Acc) -> last(T, [H | Acc]); +last([], Acc) -> Acc. %% Receives a list of atoms, binaries or lists %% representing modules and concatenates them. @@ -103,20 +118,20 @@ last([], Acc) -> Acc. concat(Args) -> binary_to_atom(do_concat(Args), utf8). safe_concat(Args) -> binary_to_existing_atom(do_concat(Args), utf8). -do_concat([H|T]) when is_atom(H), H /= nil -> - do_concat([atom_to_binary(H, utf8)|T]); -do_concat([<<"Elixir.", _/binary>>=H|T]) -> +do_concat([H | T]) when is_atom(H), H /= nil -> + do_concat([atom_to_binary(H, utf8) | T]); +do_concat([<<"Elixir.", _/binary>>=H | T]) -> do_concat(T, H); -do_concat([<<"Elixir">>=H|T]) -> +do_concat([<<"Elixir">>=H | T]) -> do_concat(T, H); do_concat(T) -> do_concat(T, <<"Elixir">>). -do_concat([nil|T], Acc) -> +do_concat([nil | T], Acc) -> do_concat(T, Acc); -do_concat([H|T], Acc) when is_atom(H) -> +do_concat([H | T], Acc) when is_atom(H) -> do_concat(T, <>); -do_concat([H|T], Acc) when is_binary(H) -> +do_concat([H | T], Acc) when is_binary(H) -> do_concat(T, <>); do_concat([], Acc) -> Acc. @@ -137,8 +152,49 @@ lookup(Else, Dict, Counter) -> %% Errors format_error({unloaded_module, Module}) -> - io_lib:format("module ~ts is not loaded and could not be found", [elixir_aliases:inspect(Module)]); + io_lib:format("module ~ts is not loaded and could not be found", [inspect(Module)]); format_error({scheduled_module, Module}) -> - io_lib:format("module ~ts is not loaded but was defined. This happens because you are trying to use a module in the same context it is defined. Try defining the module outside the context that requires it.", - [inspect(Module)]). \ No newline at end of file + io_lib:format( + "module ~ts is not loaded but was defined. This happens when you depend on " + "a module in the same context it is defined. For example:\n" + "\n" + " defmodule MyApp do\n" + " defmodule Mod do\n" + " end\n" + "\n" + " use Mod\n" + " end\n" + "\n" + "Try defining the module outside the context that uses it:\n" + "\n" + " defmodule MyApp.Mod do\n" + " end\n" + "\n" + " defmodule MyApp do\n" + " use MyApp.Mod\n" + " end\n" + "\n" + "If the module is defined at the top-level and you are trying to " + "use it at the top-level, such is not supported by Elixir", + [inspect(Module)]); + +format_error({circular_module, Module}) -> + io_lib:format( + "you are trying to use the module ~ts which is currently being defined.\n" + "\n" + "This may happen if you accidentally override the module you want to use. For example:\n" + "\n" + " defmodule MyApp do\n" + " defmodule Supervisor do\n" + " use Supervisor\n" + " end\n" + " end\n" + "\n" + "In the example above, the new Supervisor conflicts with Elixir's. " + "This may be fixed by using the fully qualified name on definition:\n" + "\n" + " defmodule MyApp.Supervisor do\n" + " use Supervisor\n" + " end\n", + [inspect(Module)]). diff --git a/lib/elixir/src/elixir_bitstring.erl b/lib/elixir/src/elixir_bitstring.erl index fadb4fec843..8daa0acc4a7 100644 --- a/lib/elixir/src/elixir_bitstring.erl +++ b/lib/elixir/src/elixir_bitstring.erl @@ -1,223 +1,264 @@ -module(elixir_bitstring). --export([translate/3, expand/3, has_size/1]). +-export([expand/4, format_error/1]). +-import(elixir_errors, [form_error/4]). -include("elixir.hrl"). -%% Expansion - -expand(Meta, Args, E) -> - case ?m(E, context) of +expand(Meta, Args, E, RequireSize) -> + case ?key(E, context) of match -> - {EArgs, EA} = expand_bitstr(fun elixir_exp:expand/2, Args, [], E), + {EArgs, EA} = expand(Meta, fun elixir_expand:expand/2, Args, [], E, RequireSize), {{'<<>>', Meta, EArgs}, EA}; _ -> - {EArgs, {EC, EV}} = expand_bitstr(fun elixir_exp:expand_arg/2, Args, [], {E, E}), + {EArgs, {EC, EV}} = expand(Meta, fun elixir_expand:expand_arg/2, Args, [], {E, E}, RequireSize), {{'<<>>', Meta, EArgs}, elixir_env:mergea(EV, EC)} end. -expand_bitstr(_Fun, [], Acc, E) -> +expand(_BitstrMeta, _Fun, [], Acc, E, _RequireSize) -> {lists:reverse(Acc), E}; -expand_bitstr(Fun, [{'::',Meta,[Left,Right]}|T], Acc, E) -> - {ELeft, EL} = Fun(Left, E), +expand(BitstrMeta, Fun, [{'::', Meta, [Left, Right]} | T], Acc, E, RequireSize) -> + {ELeft, EL} = expand_expr(Meta, Left, Fun, E), %% Variables defined outside the binary can be accounted %% on subparts, however we can't assign new variables. - case E of - {ER, _} -> ok; %% expand_arg, no assigns - _ -> ER = E#{context := nil} %% expand_each, revert assigns - end, - - ERight = expand_bit_info(Meta, Right, ER), - expand_bitstr(Fun, T, [{'::',Meta,[ELeft,ERight]}|Acc], EL); + {ER, MatchSize} = + case E of + {EExtracted, _} -> {EExtracted, false}; %% expand_arg, no assigns + _ -> {E#{context := nil}, T /= []} %% expand, revert assigns + end, -expand_bitstr(Fun, [H|T], Acc, E) -> - {Expr, ES} = Fun(H, E), - expand_bitstr(Fun, T, [Expr|Acc], ES). + ERight = expand_specs(expr_type(ELeft), Meta, Right, ER, RequireSize or MatchSize), + expand(BitstrMeta, Fun, T, [{'::', Meta, [ELeft, ERight]} | Acc], EL, RequireSize); +expand(BitstrMeta, Fun, [{_, Meta, _} = H | T], Acc, E, RequireSize) -> + {Expr, ES} = expand_expr(Meta, H, Fun, E), + expand(BitstrMeta, Fun, T, [wrap_expr(Expr) | Acc], ES, RequireSize); +expand(Meta, Fun, [H | T], Acc, E, RequireSize) -> + {Expr, ES} = expand_expr(Meta, H, Fun, E), + expand(Meta, Fun, T, [wrap_expr(Expr) | Acc], ES, RequireSize). + +wrap_expr(Expr) -> + case expr_type(Expr) of + bitstring -> + {'::', [], [Expr, {bitstring, [], []}]}; + binary -> + {'::', [], [Expr, {binary, [], []}]}; + float -> + {'::', [], [Expr, {float, [], []}]}; + _ -> + {'::', [], [Expr, {integer, [], []}]} + end. -%% Expand bit info +expr_type(Integer) when is_integer(Integer) -> integer; +expr_type(Float) when is_float(Float) -> float; +expr_type(Binary) when is_binary(Binary) -> binary; +expr_type({'<<>>', _, _}) -> bitstring; +expr_type(_) -> default. -expand_bit_info(Meta, Info, E) when is_list(Info) -> - expand_bit_info(Meta, Info, default, [], E); +%% Expands the expression of a bitstring, that is, the LHS of :: or +%% an argument of the bitstring (such as "foo" in "<>"). -expand_bit_info(Meta, Info, E) -> - expand_bit_info(Meta, [Info], E). +expand_expr(_, {{'.', M1, ['Elixir.Kernel', to_string]}, M2, [Arg]}, Fun, E) -> + case Fun(Arg, E) of + {EBin, EE} when is_binary(EBin) -> {EBin, EE}; + {EArg, EE} -> {{{'.', M1, ['Elixir.String.Chars', to_string]}, M2, [EArg]}, EE} + end; +expand_expr(Meta, Component, Fun, E) -> + case Fun(Component, E) of + {EComponent, _} when is_list(EComponent); is_atom(EComponent) -> + ErrorE = env_for_error(E), + form_error(Meta, ?key(ErrorE, file), ?MODULE, {invalid_literal, EComponent}); + {_, _} = Expanded -> + Expanded + end. -expand_bit_info(Meta, [{Expr, ExprMeta, Args}|T], Size, Types, E) when is_atom(Expr) -> - ListArgs = if is_atom(Args) -> []; is_list(Args) -> Args end, - case expand_bit_type_or_size(Expr, ListArgs) of - type -> - {EArgs, EE} = elixir_exp:expand_args(ListArgs, E), - expand_bit_info(Meta, T, Size, [{Expr, [], EArgs}|Types], EE); - size -> - case Size of +env_for_error({E, _}) -> E; +env_for_error(E) -> E. + +%% Expands and normalizes types of a bitstring. + +expand_specs(ExprType, Meta, Info, E, RequireSize) -> + Default = + #{size => default, + unit => default, + sign => default, + type => default, + endianess => default}, + #{size := Size, unit := Unit, type := Type, endianess := Endianess, sign := Sign} = + expand_each_spec(Meta, unpack_specs(Info, []), Default, E), + MergedType = type(Meta, ExprType, Type, E), + validate_size_required(Meta, RequireSize, ExprType, MergedType, Size, E), + SizeAndUnit = size_and_unit(Meta, ExprType, Size, Unit, E), + [H | T] = build_spec(Meta, Size, Unit, MergedType, Endianess, Sign, SizeAndUnit, E), + lists:foldl(fun(I, Acc) -> {'-', Meta, [Acc, I]} end, H, T). + +type(_, default, default, _) -> + integer; +type(_, ExprType, default, _) -> + ExprType; +type(_, binary, Type, _) when Type == binary; Type == bitstring; Type == utf8; Type == utf16; Type == utf32 -> + Type; +type(_, bitstring, Type, _) when Type == binary; Type == bitstring -> + Type; +type(_, integer, Type, _) when Type == integer; Type == float; Type == utf8; Type == utf16; Type == utf32 -> + Type; +type(_, float, Type, _) when Type == float -> + Type; +type(_, default, Type, _) -> + Type; +type(Meta, Other, Value, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {bittype_mismatch, Value, Other, type}). + +expand_each_spec(Meta, [{Expr, _, Args} = H | T], Map, E) when is_atom(Expr) -> + case validate_spec(Expr, Args) of + {Key, Arg} -> + {Value, EE} = expand_spec_arg(Arg, E), + validate_spec_arg(Meta, Key, Value, EE), + + case maps:get(Key, Map) of default -> ok; - _ -> elixir_errors:compile_error(Meta, ?m(E, file), "duplicated size definition in bitstring") + Value -> ok; + Other -> form_error(Meta, ?key(E, file), ?MODULE, {bittype_mismatch, Value, Other, Key}) end, - {EArgs, EE} = elixir_exp:expand_args(ListArgs, E), - expand_bit_info(Meta, T, {Expr, [], EArgs}, Types, EE); + + expand_each_spec(Meta, T, maps:put(Key, Value, Map), EE); none -> - handle_unknown_bit_info(Meta, {Expr, ExprMeta, ListArgs}, T, Size, Types, E) + case 'Elixir.Macro':expand(H, elixir_env:linify({?line(Meta), E})) of + H -> + form_error(Meta, ?key(E, file), ?MODULE, {undefined_bittype, H}); + NewTypes -> + expand_each_spec(Meta, unpack_specs(NewTypes, []) ++ T, Map, E) + end end; - -expand_bit_info(Meta, [Int|T], Size, Types, E) when is_integer(Int) -> - expand_bit_info(Meta, [{size, [], [Int]}|T], Size, Types, E); - -expand_bit_info(Meta, [Expr|_], _Size, _Types, E) -> - elixir_errors:compile_error(Meta, ?m(E, file), - "unknown bitstring specifier ~ts", ['Elixir.Kernel':inspect(Expr)]); - -expand_bit_info(_Meta, [], Size, Types, _) -> - case Size of - default -> lists:reverse(Types); - _ -> [Size|lists:reverse(Types)] - end. - -expand_bit_type_or_size(binary, []) -> type; -expand_bit_type_or_size(integer, []) -> type; -expand_bit_type_or_size(float, []) -> type; -expand_bit_type_or_size(bitstring, []) -> type; -expand_bit_type_or_size(bytes, []) -> type; -expand_bit_type_or_size(bits, []) -> type; -expand_bit_type_or_size(utf8, []) -> type; -expand_bit_type_or_size(utf16, []) -> type; -expand_bit_type_or_size(utf32, []) -> type; -expand_bit_type_or_size(signed, []) -> type; -expand_bit_type_or_size(unsigned, []) -> type; -expand_bit_type_or_size(big, []) -> type; -expand_bit_type_or_size(little, []) -> type; -expand_bit_type_or_size(native, []) -> type; -expand_bit_type_or_size(unit, [_]) -> type; -expand_bit_type_or_size(size, [_]) -> size; -expand_bit_type_or_size(_, _) -> none. - -handle_unknown_bit_info(Meta, {_, ExprMeta, _} = Expr, T, Size, Types, E) -> - case 'Elixir.Macro':expand(Expr, elixir_env:linify({?line(ExprMeta), E})) of - Expr -> - elixir_errors:compile_error(ExprMeta, ?m(E, file), - "unknown bitstring specifier ~ts", ['Elixir.Macro':to_string(Expr)]); - Other -> - List = case is_list(Other) of true -> Other; false -> [Other] end, - expand_bit_info(Meta, List ++ T, Size, Types, E) - end. - -%% Translation - -has_size({bin, _, Elements}) -> - not lists:any(fun({bin_element, _Line, _Expr, Size, Types}) -> - (Types /= default) andalso (Size == default) andalso - lists:any(fun(X) -> lists:member(X, Types) end, - [bits, bytes, bitstring, binary]) - end, Elements). - -translate(Meta, Args, S) -> - case S#elixir_scope.context of - match -> - build_bitstr(fun elixir_translator:translate/2, Args, Meta, S); - _ -> - build_bitstr(fun(X, Acc) -> elixir_translator:translate_arg(X, Acc, S) end, Args, Meta, S) - end. - -build_bitstr(Fun, Exprs, Meta, S) -> - {Final, FinalS} = build_bitstr_each(Fun, Exprs, Meta, S, []), - {{bin, ?line(Meta), lists:reverse(Final)}, FinalS}. - -build_bitstr_each(_Fun, [], _Meta, S, Acc) -> - {Acc, S}; - -build_bitstr_each(Fun, [{'::',_,[H,V]}|T], Meta, S, Acc) -> - {Size, Types} = extract_bit_info(Meta, V, S#elixir_scope{context=nil}), - build_bitstr_each(Fun, T, Meta, S, Acc, H, Size, Types); - -build_bitstr_each(Fun, [H|T], Meta, S, Acc) -> - build_bitstr_each(Fun, T, Meta, S, Acc, H, default, default). - -build_bitstr_each(Fun, T, Meta, S, Acc, H, default, Types) when is_binary(H) -> - Element = - case types_allow_splice(Types, []) of - true -> - %% See explanation in elixir_utils:elixir_to_erl/1 to know - %% why we can simply convert the binary to a list. - {bin_element, ?line(Meta), {string, 0, binary_to_list(H)}, default, default}; - false -> - case types_require_conversion(Types) of - true -> - {bin_element, ?line(Meta), {string, 0, elixir_utils:characters_to_list(H)}, default, Types}; - false -> - elixir_errors:compile_error(Meta, S#elixir_scope.file, "invalid types for literal string in <<>>. " - "Accepted types are: little, big, utf8, utf16, utf32, bits, bytes, binary, bitstring") - end - end, - - build_bitstr_each(Fun, T, Meta, S, [Element|Acc]); - -build_bitstr_each(_Fun, _T, Meta, S, _Acc, H, _Size, _Types) when is_binary(H) -> - elixir_errors:compile_error(Meta, S#elixir_scope.file, "size is not supported for literal string in <<>>"); - -build_bitstr_each(_Fun, _T, Meta, S, _Acc, H, _Size, _Types) when is_list(H); is_atom(H) -> - elixir_errors:compile_error(Meta, S#elixir_scope.file, "invalid literal ~ts in <<>>", - ['Elixir.Macro':to_string(H)]); - -build_bitstr_each(Fun, T, Meta, S, Acc, H, Size, Types) -> - {Expr, NS} = Fun(H, S), - - case Expr of - {bin, _, Elements} -> - case (Size == default) andalso types_allow_splice(Types, Elements) of - true -> build_bitstr_each(Fun, T, Meta, NS, lists:reverse(Elements) ++ Acc); - false -> build_bitstr_each(Fun, T, Meta, NS, [{bin_element, ?line(Meta), Expr, Size, Types}|Acc]) - end; - _ -> - build_bitstr_each(Fun, T, Meta, NS, [{bin_element, ?line(Meta), Expr, Size, Types}|Acc]) - end. - -types_require_conversion([End|T]) when End == little; End == big -> types_require_conversion(T); -types_require_conversion([UTF|T]) when UTF == utf8; UTF == utf16; UTF == utf32 -> types_require_conversion(T); -types_require_conversion([]) -> true; -types_require_conversion(_) -> false. - -types_allow_splice([bytes], Elements) -> is_byte_size(Elements, 0); -types_allow_splice([binary], Elements) -> is_byte_size(Elements, 0); -types_allow_splice([bits], _) -> true; -types_allow_splice([bitstring], _) -> true; -types_allow_splice(default, _) -> true; -types_allow_splice(_, _) -> false. - -is_byte_size([Element|T], Acc) -> - case elem_size(Element) of - {unknown, Unit} when Unit rem 8 == 0 -> is_byte_size(T, Acc); - {unknown, _Unit} -> false; - {Size, Unit} -> is_byte_size(T, Size*Unit + Acc) +expand_each_spec(Meta, [Expr | _], _Map, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {undefined_bittype, Expr}); +expand_each_spec(_Meta, [], Map, _E) -> + Map. + +unpack_specs({'-', _, [H, T]}, Acc) -> + unpack_specs(H, unpack_specs(T, Acc)); +unpack_specs({'*', _, [{'_', _, Atom}, Unit]}, Acc) when is_atom(Atom) and is_integer(Unit) -> + [{unit, [], [Unit]} | Acc]; +unpack_specs({'*', _, [Size, Unit]}, Acc) when is_integer(Size) and is_integer(Unit) -> + [{size, [], [Size]}, {unit, [], [Unit]} | Acc]; +unpack_specs(Size, Acc) when is_integer(Size) -> + [{size, [], [Size]} | Acc]; +unpack_specs({Expr, Meta, Args}, Acc) when is_atom(Expr) -> + ListArgs = if is_atom(Args) -> []; is_list(Args) -> Args end, + [{Expr, Meta, ListArgs} | Acc]; +unpack_specs(Other, Acc) -> + [Other | Acc]. + +validate_spec(big, []) -> {endianess, big}; +validate_spec(little, []) -> {endianess, little}; +validate_spec(native, []) -> {endianess, native}; +validate_spec(size, [Size]) -> {size, Size}; +validate_spec(unit, [Unit]) -> {unit, Unit}; +validate_spec(integer, []) -> {type, integer}; +validate_spec(float, []) -> {type, float}; +validate_spec(binary, []) -> {type, binary}; +validate_spec(bytes, []) -> {type, binary}; +validate_spec(bitstring, []) -> {type, bitstring}; +validate_spec(bits, []) -> {type, bitstring}; +validate_spec(utf8, []) -> {type, utf8}; +validate_spec(utf16, []) -> {type, utf16}; +validate_spec(utf32, []) -> {type, utf32}; +validate_spec(signed, []) -> {sign, signed}; +validate_spec(unsigned, []) -> {sign, unsigned}; +validate_spec(_, _) -> none. + +expand_spec_arg(Expr, E) when is_atom(Expr); is_integer(Expr) -> {Expr, E}; +expand_spec_arg(Expr, E) -> elixir_expand:expand(Expr, E). + +validate_spec_arg(Meta, size, Value, E) -> + case Value of + {Var, _, Context} when is_atom(Var) and is_atom(Context) -> ok; + _ when is_integer(Value) -> ok; + _ -> form_error(Meta, ?key(E, file), ?MODULE, {bad_size_argument, Value}) + end; +validate_spec_arg(Meta, unit, Value, E) when not is_integer(Value) -> + form_error(Meta, ?key(E, file), ?MODULE, {bad_unit_argument, Value}); +validate_spec_arg(_Meta, _Key, _Value, _E) -> + ok. + +validate_size_required(Meta, true, default, Type, default, E) when Type == binary; Type == bitstring -> + form_error(Meta, ?key(E, file), ?MODULE, unsized_binary); +validate_size_required(_, _, _, _, _, _) -> + ok. + +size_and_unit(Meta, bitstring, Size, Unit, E) when Size /= default; Unit /= default -> + form_error(Meta, ?key(E, file), ?MODULE, bittype_literal_bitstring); +size_and_unit(Meta, binary, Size, Unit, E) when Size /= default; Unit /= default -> + form_error(Meta, ?key(E, file), ?MODULE, bittype_literal_string); +size_and_unit(_Meta, _ExprType, Size, Unit, _E) -> + add_arg(unit, Unit, add_arg(size, Size, [])). + +add_arg(_Key, default, Spec) -> Spec; +add_arg(Key, Arg, Spec) -> [{Key, [], [Arg]} | Spec]. + +build_spec(Meta, Size, Unit, Type, Endianess, Sign, Spec, E) when Type == utf8; Type == utf16; Type == utf32 -> + if + Size /= default; Unit /= default -> + form_error(Meta, ?key(E, file), ?MODULE, bittype_utf); + Sign /= default -> + form_error(Meta, ?key(E, file), ?MODULE, bittype_signed); + true -> + add_spec(Type, add_spec(Endianess, Spec)) end; -is_byte_size([], Size) -> - Size rem 8 == 0. - -elem_size({bin_element, _, _, default, _}) -> {0, 0}; -elem_size({bin_element, _, _, {integer,_,Size}, Types}) -> {Size, unit_size(Types, 1)}; -elem_size({bin_element, _, _, _Size, Types}) -> {unknown, unit_size(Types, 1)}. -unit_size([binary|T], _) -> unit_size(T, 8); -unit_size([{unit, Size}|_], _) -> Size; -unit_size([_|T], Guess) -> unit_size(T, Guess); -unit_size([], Guess) -> Guess. +build_spec(Meta, _Size, Unit, Type, _Endianess, Sign, Spec, E) when Type == binary; Type == bitstring -> + if + Type == bitstring, Unit /= default, Unit /= 1 -> + form_error(Meta, ?key(E, file), ?MODULE, {bittype_mismatch, Unit, 1, unit}); + Sign /= default -> + form_error(Meta, ?key(E, file), ?MODULE, bittype_signed); + true -> + %% Endianess is supported but has no effect, so we just ignore it. + add_spec(Type, Spec) + end; -%% Extra bitstring specifiers +build_spec(Meta, Size, Unit, Type, Endianess, Sign, Spec, E) when Type == integer; Type == float -> + NumberSize = number_size(Size, Unit), + if + Type == float, is_integer(NumberSize), NumberSize /= 32, NumberSize /= 64 -> + form_error(Meta, ?key(E, file), ?MODULE, {bittype_float_size, NumberSize}); + Size == default, Unit /= default -> + form_error(Meta, ?key(E, file), ?MODULE, bittype_unit); + true -> + add_spec(Type, add_spec(Endianess, add_spec(Sign, Spec))) + end. -extract_bit_info(Meta, [{size, _, [Arg]}|T], S) -> - case elixir_translator:translate(Arg, S) of - {{Kind, _, _} = Size, _} when Kind == integer; Kind == var -> - {Size, extract_bit_type(Meta, T, S)}; - _ -> - elixir_errors:compile_error(Meta, S#elixir_scope.file, - "size in bitstring expects an integer or a variable as argument, got: ~ts", ['Elixir.Macro':to_string(Arg)]) - end; -extract_bit_info(Meta, T, S) -> - {default, extract_bit_type(Meta, T, S)}. - -extract_bit_type(Meta, [{unit, _, [Arg]}|T], S) when is_integer(Arg) -> - [{unit, Arg}|extract_bit_type(Meta, T, S)]; -extract_bit_type(Meta, [{unit, _, [Arg]}|_], S) -> - elixir_errors:compile_error(Meta, S#elixir_scope.file, - "unit in bitstring expects an integer as argument, got: ~ts", ['Elixir.Macro':to_string(Arg)]); -extract_bit_type(Meta, [{Other, _, []}|T], S) -> - [Other|extract_bit_type(Meta, T, S)]; -extract_bit_type(_Meta, [], _S) -> - []. +number_size(Size, default) when is_integer(Size) -> Size; +number_size(Size, Unit) when is_integer(Size) -> Size * Unit; +number_size(Size, _) -> Size. + +add_spec(default, Spec) -> Spec; +add_spec(Key, Spec) -> [{Key, [], []} | Spec]. + +format_error(unsized_binary) -> + "a binary field without size is only allowed at the end of a binary pattern " + "and never allowed in binary generators"; +format_error(bittype_literal_bitstring) -> + "literal <<>> in bitstring supports only type specifiers, which must be one of: " + "binary or bitstring"; +format_error(bittype_literal_string) -> + "literal string in bitstring supports only endianess and type specifiers, which must be one of: " + "little, big, native, utf8, utf16, utf32, bits, bytes, binary or bitstring"; +format_error(bittype_utf) -> + "size and unit are not supported on utf types"; +format_error(bittype_signed) -> + "signed and unsigned specifiers are supported only on integer and float types"; +format_error(bittype_unit) -> + "integer and float types require a size specifier if the unit specifier is given"; +format_error({bittype_float_size, Other}) -> + io_lib:format("float requires size*unit to be 32 or 64 (default), got: ~p", [Other]); +format_error({invalid_literal, Literal}) -> + io_lib:format("invalid literal ~ts in <<>>", ['Elixir.Macro':to_string(Literal)]); +format_error({undefined_bittype, Expr}) -> + io_lib:format("unknown bitstring specifier: ~ts", ['Elixir.Macro':to_string(Expr)]); +format_error({bittype_mismatch, Val1, Val2, Where}) -> + io_lib:format("conflicting ~ts specification for bit field: \"~p\" and \"~p\"", [Where, Val1, Val2]); +format_error({bad_unit_argument, Unit}) -> + io_lib:format("unit in bitstring expects an integer as argument, got: ~ts", + ['Elixir.Macro':to_string(Unit)]); +format_error({bad_size_argument, Size}) -> + io_lib:format("size in bitstring expects an integer or a variable as argument, got: ~ts", + ['Elixir.Macro':to_string(Size)]). diff --git a/lib/elixir/src/elixir_bootstrap.erl b/lib/elixir/src/elixir_bootstrap.erl index 97cf07fb215..a1bfeba12cd 100644 --- a/lib/elixir/src/elixir_bootstrap.erl +++ b/lib/elixir/src/elixir_bootstrap.erl @@ -18,7 +18,7 @@ 'MACRO-defmacro'(Caller, Call, Expr) -> define(Caller, defmacro, Call, Expr). 'MACRO-defmacrop'(Caller, Call, Expr) -> define(Caller, defmacrop, Call, Expr). -'MACRO-defmodule'(_Caller, Alias, [{do,Block}]) -> +'MACRO-defmodule'(_Caller, Alias, [{do, Block}]) -> {Escaped, _} = elixir_quote:escape(Block, false), Args = [Alias, Escaped, [], env()], {{'.', [], [elixir_module, compile]}, [], Args}. @@ -27,25 +27,25 @@ []; '__info__'(macros) -> [{'@', 1}, - {def,1}, - {def,2}, - {defmacro,1}, - {defmacro,2}, - {defmacrop,2}, - {defmodule,2}, - {defp,2}]. - -define({Line,E}, Kind, Call, Expr) -> + {def, 1}, + {def, 2}, + {defmacro, 1}, + {defmacro, 2}, + {defmacrop, 2}, + {defmodule, 2}, + {defp, 2}]. + +define({Line, E}, Kind, Call, Expr) -> {EscapedCall, UC} = elixir_quote:escape(Call, true), {EscapedExpr, UE} = elixir_quote:escape(Expr, true), - Args = [Line, Kind, not(UC or UE), EscapedCall, EscapedExpr, elixir_locals:cache_env(E)], + Args = [Kind, not(UC or UE), EscapedCall, EscapedExpr, elixir_locals:cache_env(E#{line := Line})], {{'.', [], [elixir_def, store_definition]}, [], Args}. unless_loaded(Fun, Args, Callback) -> case code:is_loaded(?kernel) of {_, _} -> apply(?kernel, Fun, Args); - false -> Callback() + false -> Callback() end. env() -> - {'__ENV__', [], nil}. \ No newline at end of file + {'__ENV__', [], nil}. diff --git a/lib/elixir/src/elixir_clauses.erl b/lib/elixir/src/elixir_clauses.erl index f5b3ef9e688..cef04270330 100644 --- a/lib/elixir/src/elixir_clauses.erl +++ b/lib/elixir/src/elixir_clauses.erl @@ -1,226 +1,255 @@ %% Handle code related to args, guard and -> matching for case, %% fn, receive and friends. try is handled in elixir_try. -module(elixir_clauses). --export([match/3, clause/7, clauses/4, guards/4, get_pairs/3, get_pairs/4, - extract_splat_guards/1, extract_guards/1]). +-export([match/3, clause/5, def/2, head/2, + 'case'/3, 'receive'/3, 'try'/3, 'cond'/3, + format_error/1]). +-import(elixir_errors, [form_error/4]). -include("elixir.hrl"). -%% Get pairs from a clause. - -get_pairs(Key, Clauses, As) -> - get_pairs(Key, Clauses, As, false). -get_pairs(Key, Clauses, As, AllowNil) -> - case lists:keyfind(Key, 1, Clauses) of - {Key, Pairs} when is_list(Pairs) -> - [{As, Meta, Left, Right} || {'->', Meta, [Left, Right]} <- Pairs]; - {Key, nil} when AllowNil -> - []; - false -> - [] - end. - -%% Translate matches - -match(Fun, Args, #elixir_scope{context=Context, match_vars=MatchVars, - backup_vars=BackupVars, vars=Vars} = S) when Context /= match -> - {Result, NewS} = match(Fun, Args, S#elixir_scope{context=match, - match_vars=ordsets:new(), backup_vars=Vars}), - {Result, NewS#elixir_scope{context=Context, - match_vars=MatchVars, backup_vars=BackupVars}}; -match(Fun, Args, S) -> Fun(Args, S). - -%% Translate clauses with args, guards and expressions - -clause(Line, Fun, Args, Expr, Guards, Return, S) when is_integer(Line) -> - {TArgs, SA} = match(Fun, Args, S#elixir_scope{extra_guards=[]}), - {TExpr, SE} = elixir_translator:translate_block(Expr, Return, SA#elixir_scope{extra_guards=nil}), - - Extra = SA#elixir_scope.extra_guards, - TGuards = guards(Line, Guards, Extra, SA), - {{clause, Line, TArgs, TGuards, unblock(TExpr)}, SE}. - -% Translate/Extract guards from the given expression. - -guards(Line, Guards, Extra, S) -> - SG = S#elixir_scope{context=guard, extra_guards=nil}, - - case Guards of - [] -> case Extra of [] -> []; _ -> [Extra] end; - _ -> [translate_guard(Line, Guard, Extra, SG) || Guard <- Guards] - end. - -translate_guard(Line, Guard, Extra, S) -> - [element(1, elixir_translator:translate(elixir_quote:linify(Line, Guard), S))|Extra]. - -extract_guards({'when', _, [Left, Right]}) -> {Left, extract_or_guards(Right)}; -extract_guards(Else) -> {Else, []}. - -extract_or_guards({'when', _, [Left, Right]}) -> [Left|extract_or_guards(Right)]; -extract_or_guards(Term) -> [Term]. - -% Extract guards when multiple left side args are allowed. - -extract_splat_guards([{'when', _, [_,_|_] = Args}]) -> - {Left, Right} = elixir_utils:split_last(Args), - {Left, extract_or_guards(Right)}; -extract_splat_guards(Else) -> - {Else, []}. - -% Function for translating macros with match style like case and receive. - -clauses(Meta, Clauses, Return, #elixir_scope{export_vars=CV} = S) -> - {TC, TS} = do_clauses(Meta, Clauses, Return, S#elixir_scope{export_vars=[]}), - {TC, TS#elixir_scope{export_vars=elixir_scope:merge_opt_vars(CV, TS#elixir_scope.export_vars)}}. - -do_clauses(_Meta, [], _Return, S) -> - {[], S}; - -do_clauses(Meta, DecoupledClauses, Return, S) -> - % Transform tree just passing the variables counter forward - % and storing variables defined inside each clause. - Transformer = fun(X, {SAcc, VAcc}) -> - {TX, TS} = each_clause(Meta, X, Return, SAcc), - {TX, {elixir_scope:mergec(S, TS), [TS#elixir_scope.export_vars|VAcc]}} +match(Fun, Expr, #{context := match} = E) -> + Fun(Expr, E); +match(Fun, Expr, #{context := Context, prematch_vars := nil, vars := Vars} = E) -> + {EExpr, EE} = Fun(Expr, E#{context := match, prematch_vars := Vars}), + {EExpr, EE#{context := Context, prematch_vars := nil}}. + +def({Meta, Args, Guards, Body}, E) -> + {EArgs, EA} = elixir_expand:expand(Args, E#{context := match}), + {EGuards, EG} = guard(Guards, EA#{context := guard}), + {EBody, _} = elixir_expand:expand(Body, EG#{context := ?key(E, context)}), + {Meta, EArgs, EGuards, EBody}. + +clause(Meta, Kind, Fun, {'->', ClauseMeta, [_, _]} = Clause, E) when is_function(Fun, 3) -> + clause(Meta, Kind, fun(X, Acc) -> Fun(ClauseMeta, X, Acc) end, Clause, E); +clause(_Meta, _Kind, Fun, {'->', Meta, [Left, Right]}, #{export_vars := ExportVars} = E) -> + {ELeft, EL} = Fun(Left, E), + {ERight, ER} = elixir_expand:expand(Right, EL#{export_vars := ExportVars}), + {{'->', Meta, [ELeft, ERight]}, ER}; +clause(Meta, Kind, _Fun, _, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {bad_or_missing_clauses, Kind}). + +head([{'when', Meta, [_ | _] = All}], E) -> + {Args, Guard} = elixir_utils:split_last(All), + {EArgs, EA} = match(fun elixir_expand:expand_args/2, Args, E), + {EGuard, EG} = guard(Guard, EA#{context := guard}), + {[{'when', Meta, EArgs ++ [EGuard]}], EG#{context := ?key(E, context)}}; +head(Args, E) -> + match(fun elixir_expand:expand_args/2, Args, E). + +guard({'when', Meta, [Left, Right]}, E) -> + {ELeft, EL} = guard(Left, E), + {ERight, ER} = guard(Right, EL), + {{'when', Meta, [ELeft, ERight]}, ER}; +guard(Other, E) -> + elixir_expand:expand(Other, E). + +%% Case + +'case'(Meta, [], E) -> + form_error(Meta, ?key(E, file), elixir_expand, {missing_option, 'case', [do]}); +'case'(Meta, Opts, E) when not is_list(Opts) -> + form_error(Meta, ?key(E, file), elixir_expand, {invalid_args, 'case'}); +'case'(Meta, Opts, E) -> + ok = assert_at_most_once('do', Opts, 0, fun(Key) -> + form_error(Meta, ?key(E, file), ?MODULE, {duplicated_clauses, 'case', Key}) + end), + EE = E#{export_vars := []}, + {EClauses, EVars} = lists:mapfoldl(fun(X, Acc) -> expand_case(Meta, X, Acc, EE) end, [], Opts), + {EClauses, elixir_env:mergev(EVars, E)}. + +expand_case(Meta, {'do', _} = Do, Acc, E) -> + Fun = expand_one(Meta, 'case', 'do', fun head/2), + expand_with_export(Meta, 'case', Fun, Do, Acc, E); +expand_case(Meta, {Key, _}, _Acc, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {unexpected_option, 'case', Key}). + +%% Cond + +'cond'(Meta, [], E) -> + form_error(Meta, ?key(E, file), elixir_expand, {missing_option, 'cond', [do]}); +'cond'(Meta, Opts, E) when not is_list(Opts) -> + form_error(Meta, ?key(E, file), elixir_expand, {invalid_args, 'cond'}); +'cond'(Meta, Opts, E) -> + ok = assert_at_most_once('do', Opts, 0, fun(Key) -> + form_error(Meta, ?key(E, file), ?MODULE, {duplicated_clauses, 'cond', Key}) + end), + EE = E#{export_vars := []}, + {EClauses, EVars} = lists:mapfoldl(fun(X, Acc) -> expand_cond(Meta, X, Acc, EE) end, [], Opts), + {EClauses, elixir_env:mergev(EVars, E)}. + +expand_cond(Meta, {'do', _} = Do, Acc, E) -> + Fun = expand_one(Meta, 'cond', 'do', fun elixir_expand:expand_args/2), + expand_with_export(Meta, 'cond', Fun, Do, Acc, E); +expand_cond(Meta, {Key, _}, _Acc, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {unexpected_option, 'cond', Key}). + +%% Receive + +'receive'(Meta, [], E) -> + form_error(Meta, ?key(E, file), elixir_expand, {missing_option, 'receive', [do, 'after']}); +'receive'(Meta, Opts, E) when not is_list(Opts) -> + form_error(Meta, ?key(E, file), elixir_expand, {invalid_args, 'receive'}); +'receive'(Meta, Opts, E) -> + RaiseError = fun(Key) -> + form_error(Meta, ?key(E, file), ?MODULE, {duplicated_clauses, 'receive', Key}) end, - - {TClauses, {TS, ReverseCV}} = - lists:mapfoldl(Transformer, {S, []}, DecoupledClauses), - - % Now get all the variables defined inside each clause - CV = lists:reverse(ReverseCV), - AllVars = lists:foldl(fun elixir_scope:merge_vars/2, [], CV), - - % Create a new scope that contains a list of all variables - % defined inside all the clauses. It returns this new scope and - % a list of tuples where the first element is the variable name, - % the second one is the new pointer to the variable and the third - % is the old pointer. - {FinalVars, FS} = lists:mapfoldl(fun({Key, Val}, Acc) -> - normalize_vars(Key, Val, Acc) - end, TS, AllVars), - - % Expand all clauses by adding a match operation at the end - % that defines variables missing in one clause to the others. - expand_clauses(?line(Meta), TClauses, CV, FinalVars, [], FS). - -expand_clauses(Line, [Clause|T], [ClauseVars|V], FinalVars, Acc, S) -> - case generate_match_vars(FinalVars, ClauseVars, [], []) of - {[], []} -> - expand_clauses(Line, T, V, FinalVars, [Clause|Acc], S); - {Left, Right} -> - MatchExpr = generate_match(Line, Left, Right), - ClauseExprs = element(5, Clause), - [Final|RawClauseExprs] = lists:reverse(ClauseExprs), - - % If the last sentence has a match clause, we need to assign its value - % in the variable list. If not, we insert the variable list before the - % final clause in order to keep it tail call optimized. - {FinalClauseExprs, FS} = case has_match_tuple(Final) of - true -> - case Final of - {match, _, {var, _, UserVarName} = UserVar, _} when UserVarName /= '_' -> - {[UserVar,MatchExpr,Final|RawClauseExprs], S}; - _ -> - {VarName, _, SS} = elixir_scope:build_var('_', S), - StorageVar = {var, Line, VarName}, - StorageExpr = {match, Line, StorageVar, Final}, - {[StorageVar,MatchExpr,StorageExpr|RawClauseExprs], SS} - end; - false -> - {[Final,MatchExpr|RawClauseExprs], S} - end, - - FinalClause = setelement(5, Clause, lists:reverse(FinalClauseExprs)), - expand_clauses(Line, T, V, FinalVars, [FinalClause|Acc], FS) + ok = assert_at_most_once('do', Opts, 0, RaiseError), + ok = assert_at_most_once('after', Opts, 0, RaiseError), + EE = E#{export_vars := []}, + {EClauses, EVars} = lists:mapfoldl(fun(X, Acc) -> expand_receive(Meta, X, Acc, EE) end, [], Opts), + {EClauses, elixir_env:mergev(EVars, E)}. + +expand_receive(_Meta, {'do', nil} = Do, Acc, _E) -> + {Do, Acc}; +expand_receive(Meta, {'do', _} = Do, Acc, E) -> + Fun = expand_one(Meta, 'receive', 'do', fun head/2), + expand_with_export(Meta, 'receive', Fun, Do, Acc, E); +expand_receive(Meta, {'after', [_]} = After, Acc, E) -> + Fun = expand_one(Meta, 'receive', 'after', fun elixir_expand:expand_args/2), + expand_with_export(Meta, 'receive', Fun, After, Acc, E); +expand_receive(Meta, {'after', _}, _Acc, E) -> + form_error(Meta, ?key(E, file), ?MODULE, multiple_after_clauses_in_receive); +expand_receive(Meta, {Key, _}, _Acc, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {unexpected_option, 'receive', Key}). + +%% Try + +'try'(Meta, [], E) -> + form_error(Meta, ?key(E, file), elixir_expand, {missing_option, 'try', [do]}); +'try'(Meta, [{do, _}], E) -> + form_error(Meta, ?key(E, file), elixir_expand, {missing_option, 'try', ['catch', 'rescue', 'after', 'else']}); +'try'(Meta, Opts, E) when not is_list(Opts) -> + form_error(Meta, ?key(E, file), elixir_expand, {invalid_args, 'try'}); +'try'(Meta, Opts, E) -> + RaiseError = fun(Key) -> + form_error(Meta, ?key(E, file), ?MODULE, {duplicated_clauses, 'try', Key}) + end, + ok = assert_at_most_once('do', Opts, 0, RaiseError), + ok = assert_at_most_once('rescue', Opts, 0, RaiseError), + ok = assert_at_most_once('catch', Opts, 0, RaiseError), + ok = assert_at_most_once('else', Opts, 0, RaiseError), + ok = assert_at_most_once('after', Opts, 0, RaiseError), + {lists:map(fun(X) -> expand_try(Meta, X, E) end, Opts), E}. + +expand_try(_Meta, {'do', Expr}, E) -> + {EExpr, _} = elixir_expand:expand(Expr, E), + {'do', EExpr}; +expand_try(_Meta, {'after', Expr}, E) -> + {EExpr, _} = elixir_expand:expand(Expr, E), + {'after', EExpr}; +expand_try(Meta, {'else', _} = Else, E) -> + Fun = expand_one(Meta, 'try', 'else', fun head/2), + expand_without_export(Meta, 'try', Fun, Else, E); +expand_try(Meta, {'catch', _} = Catch, E) -> + expand_without_export(Meta, 'try', fun expand_catch/3, Catch, E); +expand_try(Meta, {'rescue', _} = Rescue, E) -> + expand_without_export(Meta, 'try', fun expand_rescue/3, Rescue, E); +expand_try(Meta, {Key, _}, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {unexpected_option, 'try', Key}). + +expand_catch(_Meta, [_] = Args, E) -> + head(Args, E); +expand_catch(_Meta, [_, _] = Args, E) -> + head(Args, E); +expand_catch(Meta, _, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {wrong_number_of_args_for_clause, "one or two args", 'try', 'catch'}). + +expand_rescue(Meta, [Arg], E) -> + case expand_rescue(Arg, E) of + {EArg, EA} -> + {[EArg], EA}; + false -> + form_error(Meta, ?key(E, file), ?MODULE, invalid_rescue_clause) end; - -expand_clauses(_Line, [], [], _FinalVars, Acc, S) -> - {lists:reverse(Acc), S}. - -% Handle each key/value clause pair and translate them accordingly. - -each_clause(Export, {match, Meta, [Condition], Expr}, Return, S) -> - Fun = wrap_export_fun(Export, fun elixir_translator:translate_args/2), - {Arg, Guards} = extract_guards(Condition), - clause(?line(Meta), Fun, [Arg], Expr, Guards, Return, S); - -each_clause(Export, {expr, Meta, [Condition], Expr}, Return, S) -> - {TCondition, SC} = (wrap_export_fun(Export, fun elixir_translator:translate/2))(Condition, S), - {TExpr, SB} = elixir_translator:translate_block(Expr, Return, SC), - {{clause, ?line(Meta), [TCondition], [], unblock(TExpr)}, SB}. - -wrap_export_fun(Meta, Fun) -> - case lists:keyfind(export_head, 1, Meta) of - {export_head, true} -> - Fun; +expand_rescue(Meta, _, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {wrong_number_of_args_for_clause, "one arg", 'try', 'rescue'}). + +%% rescue var +expand_rescue({Name, _, Atom} = Var, E) when is_atom(Name), is_atom(Atom) -> + match(fun elixir_expand:expand/2, Var, E); + +%% rescue var in [Exprs] +expand_rescue({in, Meta, [Left, Right]}, E) -> + {ELeft, EL} = match(fun elixir_expand:expand/2, Left, E), + {ERight, ER} = elixir_expand:expand(Right, EL), + + case ELeft of + {Name, _, Atom} when is_atom(Name), is_atom(Atom) -> + case normalize_rescue(ERight) of + false -> false; + Other -> {{in, Meta, [ELeft, Other]}, ER} + end; _ -> - fun(Args, S) -> - {TArgs, TS} = Fun(Args, S), - {TArgs, TS#elixir_scope{export_vars = S#elixir_scope.export_vars}} - end - end. - -% Check if the given expression is a match tuple. -% This is a small optimization to allow us to change -% existing assignments instead of creating new ones every time. - -has_match_tuple({'receive', _, _, _, _}) -> - true; -has_match_tuple({'receive', _, _}) -> - true; -has_match_tuple({'case', _, _, _}) -> - true; -has_match_tuple({match, _, _, _}) -> - true; -has_match_tuple({'fun', _, {clauses, _}}) -> - false; -has_match_tuple(H) when is_tuple(H) -> - has_match_tuple(tuple_to_list(H)); -has_match_tuple(H) when is_list(H) -> - lists:any(fun has_match_tuple/1, H); -has_match_tuple(_) -> false. - -% Normalize the given var in between clauses -% by picking one value as reference and retriving -% its previous value. - -normalize_vars(Key, Value, #elixir_scope{vars=Vars,export_vars=ClauseVars} = S) -> - VS = S#elixir_scope{ - vars=orddict:store(Key, Value, Vars), - export_vars=orddict:store(Key, Value, ClauseVars) - }, - - Expr = case orddict:find(Key, Vars) of - {ok, {PreValue, _}} -> {var, 0, PreValue}; - error -> {atom, 0, nil} - end, - - {{Key, Value, Expr}, VS}. - -% Generate match vars by checking if they were updated -% or not and assigning the previous value. - -generate_match_vars([{Key, Value, Expr}|T], ClauseVars, Left, Right) -> - case orddict:find(Key, ClauseVars) of - {ok, Value} -> - generate_match_vars(T, ClauseVars, Left, Right); - {ok, Clause} -> - generate_match_vars(T, ClauseVars, - [{var, 0, element(1, Value)}|Left], - [{var, 0, element(1, Clause)}|Right]); - error -> - generate_match_vars(T, ClauseVars, - [{var, 0, element(1, Value)}|Left], [Expr|Right]) + false end; -generate_match_vars([], _ClauseVars, Left, Right) -> - {Left, Right}. - -generate_match(Line, [Left], [Right]) -> - {match, Line, Left, Right}; - -generate_match(Line, LeftVars, RightVars) -> - {match, Line, {tuple, Line, LeftVars}, {tuple, Line, RightVars}}. +%% rescue Error => _ in [Error] +expand_rescue(Arg, E) -> + expand_rescue({in, [], [{'_', [], ?key(E, module)}, Arg]}, E). + +normalize_rescue({'_', _, Atom} = N) when is_atom(Atom) -> N; +normalize_rescue(Atom) when is_atom(Atom) -> [Atom]; +normalize_rescue(Other) -> + is_list(Other) andalso lists:all(fun is_atom/1, Other) andalso Other. + +%% Expansion helpers + +%% Returns a function that expands arguments +%% considering we have at maximum one entry. +expand_one(Meta, Kind, Key, Fun) -> + fun + ([_] = Args, E) -> + Fun(Args, E); + (_, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {wrong_number_of_args_for_clause, "one arg", Kind, Key}) + end. -unblock({'block', _, Exprs}) -> Exprs; -unblock(Exprs) -> [Exprs]. +%% Expands all -> pairs in a given key keeping the overall vars. +expand_with_export(Meta, Kind, Fun, {Key, Clauses}, Acc, E) when is_list(Clauses) -> + Transformer = fun(Clause, Vars) -> + {EClause, EC} = clause(Meta, {Kind, Key}, Fun, Clause, E), + {EClause, elixir_env:merge_vars(Vars, ?key(EC, export_vars))} + end, + {EClauses, EVars} = lists:mapfoldl(Transformer, Acc, Clauses), + {{Key, EClauses}, EVars}; +expand_with_export(Meta, Kind, _Fun, {Key, _}, _Acc, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {bad_or_missing_clauses, {Kind, Key}}). + +%% Expands all -> pairs in a given key but do not keep the overall vars. +expand_without_export(Meta, Kind, Fun, {Key, Clauses}, E) when is_list(Clauses) -> + Transformer = fun(Clause) -> + {EClause, _} = clause(Meta, {Kind, Key}, Fun, Clause, E), + EClause + end, + {Key, lists:map(Transformer, Clauses)}; +expand_without_export(Meta, Kind, _Fun, {Key, _}, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {bad_or_missing_clauses, {Kind, Key}}). + +assert_at_most_once(_Kind, [], _Count, _Fun) -> ok; +assert_at_most_once(Kind, [{Kind, _} | _], 1, ErrorFun) -> + ErrorFun(Kind); +assert_at_most_once(Kind, [{Kind, _} | Rest], Count, Fun) -> + assert_at_most_once(Kind, Rest, Count + 1, Fun); +assert_at_most_once(Kind, [_ | Rest], Count, Fun) -> + assert_at_most_once(Kind, Rest, Count, Fun). + +format_error({bad_or_missing_clauses, {Kind, Key}}) -> + io_lib:format("expected -> clauses for :~ts in \"~ts\"", [Key, Kind]); +format_error({bad_or_missing_clauses, Kind}) -> + io_lib:format("expected -> clauses in \"~ts\"", [Kind]); + +format_error({duplicated_clauses, Kind, Key}) -> + io_lib:format("duplicated :~ts clauses given for \"~ts\"", [Key, Kind]); + +format_error({unexpected_option, Kind, Option}) -> + io_lib:format("unexpected option ~ts in \"~ts\"", ['Elixir.Macro':to_string(Option), Kind]); + +format_error({wrong_number_of_args_for_clause, Expected, Kind, Key}) -> + io_lib:format("expected ~ts for :~ts clauses (->) in \"~ts\"", [Expected, Key, Kind]); + +format_error(multiple_after_clauses_in_receive) -> + "expected a single -> clause for :after in \"receive\""; + +format_error(invalid_rescue_clause) -> + "invalid \"rescue\" clause. The clause should match on an alias, a variable " + "or be in the \"var in [alias]\" format". diff --git a/lib/elixir/src/elixir_code_server.erl b/lib/elixir/src/elixir_code_server.erl index 1a1f5e08bf5..1709b33e164 100644 --- a/lib/elixir/src/elixir_code_server.erl +++ b/lib/elixir/src/elixir_code_server.erl @@ -6,13 +6,10 @@ -define(timeout, 30000). -record(elixir_code_server, { - compilation_status=[], - argv=[], - loaded=[], - at_exit=[], - pool={[],0}, - compiler_options=[{docs,true},{debug_info,true},{warnings_as_errors,false}], - erl_compiler_options=nil + loaded=#{}, + mod_pool={[], 0}, + mod_ets=#{}, + compilation_status=#{} }). call(Args) -> @@ -27,114 +24,102 @@ start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, ok, []). init(ok) -> - code:ensure_loaded('Elixir.Macro.Env'), - code:ensure_loaded('Elixir.Module.LocalsTracker'), - code:ensure_loaded('Elixir.Kernel.LexicalTracker'), + %% We attempt to load those modules here so throughout + %% the codebase we can avoid code:is_loaded/1 checks. + _ = code:ensure_loaded('Elixir.Macro.Env'), + + %% The table where we store module definitions + _ = ets:new(elixir_modules, [set, protected, named_table, {read_concurrency, true}]), + {ok, #elixir_code_server{}}. +handle_call({defmodule, Pid, Tuple}, _From, Config) -> + {Ref, New} = defmodule(Pid, Tuple, Config), + {reply, Ref, New}; + +handle_call({lookup, Module}, _From, Config) -> + {reply, ets:lookup(elixir_modules, Module), Config}; + +handle_call({undefmodule, Ref}, _From, Config) -> + {reply, ok, undefmodule(Ref, Config)}; + handle_call({acquire, Path}, From, Config) -> Current = Config#elixir_code_server.loaded, - case orddict:find(Path, Current) of + case maps:find(Path, Current) of {ok, true} -> {reply, loaded, Config}; {ok, {Ref, List}} when is_list(List), is_reference(Ref) -> - Queued = orddict:store(Path, {Ref, [From|List]}, Current), + Queued = maps:put(Path, {Ref, [From | List]}, Current), {reply, {queued, Ref}, Config#elixir_code_server{loaded=Queued}}; error -> - Queued = orddict:store(Path, {make_ref(), []}, Current), + Queued = maps:put(Path, {make_ref(), []}, Current), {reply, proceed, Config#elixir_code_server{loaded=Queued}} end; handle_call(loaded, _From, Config) -> - {reply, [F || {F, true} <- Config#elixir_code_server.loaded], Config}; - -handle_call(at_exit, _From, Config) -> - {reply, Config#elixir_code_server.at_exit, Config}; - -handle_call(flush_at_exit, _From, Config) -> - {reply, Config#elixir_code_server.at_exit, Config#elixir_code_server{at_exit=[]}}; - -handle_call(argv, _From, Config) -> - {reply, Config#elixir_code_server.argv, Config}; - -handle_call(compiler_options, _From, Config) -> - {reply, Config#elixir_code_server.compiler_options, Config}; + {reply, [F || {F, true} <- maps:to_list(Config#elixir_code_server.loaded)], Config}; handle_call({compilation_status, CompilerPid}, _From, Config) -> - CompilationStatusList = Config#elixir_code_server.compilation_status, - CompilationStatusListNew = orddict:erase(CompilerPid, CompilationStatusList), - CompilationStatus = orddict:fetch(CompilerPid, CompilationStatusList), - {reply, CompilationStatus, Config#elixir_code_server{compilation_status=CompilationStatusListNew}}; - -handle_call(retrieve_module_name, _From, Config) -> - case Config#elixir_code_server.pool of - {[H|T], Counter} -> - {reply, module_tuple(H), Config#elixir_code_server{pool={T,Counter}}}; + CompilationStatusList = Config#elixir_code_server.compilation_status, + CompilationStatusListNew = maps:remove(CompilerPid, CompilationStatusList), + CompilationStatus = maps:get(CompilerPid, CompilationStatusList), + {reply, CompilationStatus, + Config#elixir_code_server{compilation_status=CompilationStatusListNew}}; + +handle_call(retrieve_compiler_module, _From, Config) -> + case Config#elixir_code_server.mod_pool of + {[H | T], Counter} -> + {reply, module_tuple(H), Config#elixir_code_server{mod_pool={T, Counter}}}; {[], Counter} -> - {reply, module_tuple(Counter), Config#elixir_code_server{pool={[],Counter+1}}} - end; - -handle_call(erl_compiler_options, _From, Config) -> - case Config#elixir_code_server.erl_compiler_options of - nil -> - Opts = erl_compiler_options(), - {reply, Opts, Config#elixir_code_server{erl_compiler_options=Opts}}; - Opts -> - {reply, Opts, Config} + {reply, module_tuple(Counter), Config#elixir_code_server{mod_pool={[], Counter+1}}} end; handle_call(Request, _From, Config) -> {stop, {badcall, Request}, Config}. -handle_cast({at_exit, AtExit}, Config) -> - {noreply, Config#elixir_code_server{at_exit=[AtExit|Config#elixir_code_server.at_exit]}}; - -handle_cast({argv, Argv}, Config) -> - {noreply, Config#elixir_code_server{argv=Argv}}; - -handle_cast({compiler_options, Options}, Config) -> - Final = orddict:merge(fun(_,_,V) -> V end, Config#elixir_code_server.compiler_options, Options), - {noreply, Config#elixir_code_server{compiler_options=Final}}; - handle_cast({register_warning, CompilerPid}, Config) -> CompilationStatusCurrent = Config#elixir_code_server.compilation_status, - CompilationStatusNew = orddict:store(CompilerPid, error, CompilationStatusCurrent), - case orddict:find(warnings_as_errors, Config#elixir_code_server.compiler_options) of + CompilationStatusNew = maps:put(CompilerPid, error, CompilationStatusCurrent), + CompilerOptions = elixir_config:get(compiler_options), + case maps:find(warnings_as_errors, CompilerOptions) of {ok, true} -> {noreply, Config#elixir_code_server{compilation_status=CompilationStatusNew}}; _ -> {noreply, Config} end; handle_cast({reset_warnings, CompilerPid}, Config) -> CompilationStatusCurrent = Config#elixir_code_server.compilation_status, - CompilationStatusNew = orddict:store(CompilerPid, ok, CompilationStatusCurrent), + CompilationStatusNew = maps:put(CompilerPid, ok, CompilationStatusCurrent), {noreply, Config#elixir_code_server{compilation_status=CompilationStatusNew}}; handle_cast({loaded, Path}, Config) -> Current = Config#elixir_code_server.loaded, - case orddict:find(Path, Current) of + case maps:find(Path, Current) of {ok, true} -> {noreply, Config}; {ok, {Ref, List}} when is_list(List), is_reference(Ref) -> - [Pid ! {elixir_code_server, Ref, loaded} || {Pid, _Tag} <- lists:reverse(List)], - Done = orddict:store(Path, true, Current), + _ = [Pid ! {elixir_code_server, Ref, loaded} || {Pid, _Tag} <- lists:reverse(List)], + Done = maps:put(Path, true, Current), {noreply, Config#elixir_code_server{loaded=Done}}; error -> - Done = orddict:store(Path, true, Current), + Done = maps:put(Path, true, Current), {noreply, Config#elixir_code_server{loaded=Done}} end; handle_cast({unload_files, Files}, Config) -> Current = Config#elixir_code_server.loaded, - Unloaded = lists:foldl(fun(File, Acc) -> orddict:erase(File, Acc) end, Current, Files), + Unloaded = maps:without(Files, Current), {noreply, Config#elixir_code_server{loaded=Unloaded}}; -handle_cast({return_module_name, H}, #elixir_code_server{pool={T,Counter}} = Config) -> - {noreply, Config#elixir_code_server{pool={[H|T],Counter}}}; +handle_cast({return_compiler_module, H}, #elixir_code_server{mod_pool={T, Counter}} = Config) -> + {noreply, Config#elixir_code_server{mod_pool={[H | T], Counter}}}; handle_cast(Request, Config) -> {stop, {badcast, Request}, Config}. -handle_info(_Request, Config) -> +handle_info({'DOWN', Ref, process, _Pid, _Reason}, Config) -> + {noreply, undefmodule(Ref, Config)}; + +handle_info(_Msg, Config) -> {noreply, Config}. terminate(_Reason, _Config) -> @@ -146,22 +131,17 @@ code_change(_Old, Config, _Extra) -> module_tuple(I) -> {list_to_atom("elixir_compiler_" ++ integer_to_list(I)), I}. -erl_compiler_options() -> - Key = "ERL_COMPILER_OPTIONS", - case os:getenv(Key) of - false -> []; - Str when is_list(Str) -> - case erl_scan:string(Str) of - {ok,Tokens,_} -> - case erl_parse:parse_term(Tokens ++ [{dot, 1}]) of - {ok,List} when is_list(List) -> List; - {ok,Term} -> [Term]; - {error,_Reason} -> - io:format("Ignoring bad term in ~ts\n", [Key]), - [] - end; - {error, {_,_,_Reason}, _} -> - io:format("Ignoring bad term in ~ts\n", [Key]), - [] - end +defmodule(Pid, Tuple, #elixir_code_server{mod_ets=ModEts} = Config) -> + ets:insert(elixir_modules, Tuple), + Ref = erlang:monitor(process, Pid), + Mod = erlang:element(1, Tuple), + {Ref, Config#elixir_code_server{mod_ets=maps:put(Ref, Mod, ModEts)}}. + +undefmodule(Ref, #elixir_code_server{mod_ets=ModEts} = Config) -> + case maps:find(Ref, ModEts) of + {ok, Mod} -> + ets:delete(elixir_modules, Mod), + Config#elixir_code_server{mod_ets=maps:remove(Ref, ModEts)}; + error -> + Config end. diff --git a/lib/elixir/src/elixir_compiler.erl b/lib/elixir/src/elixir_compiler.erl index cd5f52700a2..8531339122c 100644 --- a/lib/elixir/src/elixir_compiler.erl +++ b/lib/elixir/src/elixir_compiler.erl @@ -1,93 +1,92 @@ +%% Elixir compiler front-end to the Erlang backend. -module(elixir_compiler). --export([get_opt/1, string/2, quoted/2, file/1, file_to_path/2]). --export([core/0, module/4, eval_forms/3]). +-export([get_opt/1, string/2, quoted/2, bootstrap/0, + file/1, file/2, file_to_path/2, eval_forms/3]). -include("elixir.hrl"). -%% Public API - get_opt(Key) -> - Dict = elixir_code_server:call(compiler_options), - case lists:keyfind(Key, 1, Dict) of - false -> false; - {Key, Value} -> Value + Map = elixir_config:get(compiler_options), + case maps:find(Key, Map) of + {ok, Value} -> Value; + error -> false end. -%% Compilation entry points. - string(Contents, File) when is_list(Contents), is_binary(File) -> + string(Contents, File, nil). +string(Contents, File, Dest) -> Forms = elixir:'string_to_quoted!'(Contents, 1, File, []), - quoted(Forms, File). + quoted(Forms, File, Dest). quoted(Forms, File) when is_binary(File) -> - Previous = get(elixir_compiled), + quoted(Forms, File, nil). +quoted(Forms, File, Dest) -> + Previous = get(elixir_module_binaries), try - put(elixir_compiled, []), - elixir_lexical:run(File, fun + put(elixir_module_binaries, []), + elixir_lexical:run(File, Dest, fun (Pid) -> - Env = elixir:env_for_eval([{line,1},{file,File}]), + Env = elixir:env_for_eval([{line, 1}, {file, File}]), eval_forms(Forms, [], Env#{lexical_tracker := Pid}) end), - lists:reverse(get(elixir_compiled)) + lists:reverse(get(elixir_module_binaries)) after - put(elixir_compiled, Previous) + put(elixir_module_binaries, Previous) end. file(Relative) when is_binary(Relative) -> + file(Relative, nil). +file(Relative, Dest) -> File = filename:absname(Relative), {ok, Bin} = file:read_file(File), - string(elixir_utils:characters_to_list(Bin), File). + string(elixir_utils:characters_to_list(Bin), File, case Dest of + nil -> Dest; + _ -> filename:absname(Dest) + end). -file_to_path(File, Path) when is_binary(File), is_binary(Path) -> - Lists = file(File), - [binary_to_path(X, Path) || X <- Lists], - Lists. +file_to_path(File, Dest) when is_binary(File), is_binary(Dest) -> + Comp = file(File, Dest), + Abs = filename:absname(Dest), + _ = [binary_to_path(X, Abs) || X <- Comp], + Comp. -%% Evaluation +%% Evaluates the given code through the Erlang compiler. +%% It may end-up evaluating the code if it is deemed a +%% more efficient strategy depending on the code snippet. eval_forms(Forms, Vars, E) -> - case (?m(E, module) == nil) andalso allows_fast_compilation(Forms) of - true -> eval_compilation(Forms, Vars, E); - false -> code_loading_compilation(Forms, Vars, E) + case (?key(E, module) == nil) andalso allows_fast_compilation(Forms) of + true -> + Binding = [{Key, Value} || {_Name, _Kind, Key, Value} <- Vars], + {Result, _Binding, EE, _S} = elixir:eval_forms(Forms, Binding, E), + {Result, EE}; + false -> + compile(Forms, Vars, E) end. -eval_compilation(Forms, Vars, E) -> - Binding = [{Key, Value} || {_Name, _Kind, Key, Value} <- Vars], - {Result, _Binding, EE, _S} = elixir:eval_forms(Forms, Binding, E), - {Result, EE}. - -code_loading_compilation(Forms, Vars, #{line := Line} = E) -> - Dict = [{{Name, Kind}, {Value, 0}} || {Name, Kind, Value, _} <- Vars], +compile(Forms, Vars, #{line := Line, file := File} = E) -> + Dict = [{{Name, Kind}, {Value, 0, true}} || {Name, Kind, Value, _} <- Vars], S = elixir_env:env_to_scope_with_vars(E, Dict), {Expr, EE, _S} = elixir:quoted_to_erl(Forms, E, S), - {Module, I} = retrieve_module_name(), - Fun = code_fun(?m(E, module)), - Form = code_mod(Fun, Expr, Line, ?m(E, file), Module, Vars), + {Module, I} = retrieve_compiler_module(), + Fun = code_fun(?key(E, module)), + Form = code_mod(Fun, Expr, Line, File, Module, Vars), Args = list_to_tuple([V || {_, _, _, V} <- Vars]), - %% Pass {native, false} to speed up bootstrap - %% process when native is set to true - AllOpts = elixir_code_server:call(erl_compiler_options), - FinalOpts = AllOpts -- [native, warn_missing_spec], - module(Form, ?m(E, file), FinalOpts, true, fun(_, Binary) -> - %% If we have labeled locals, anonymous functions - %% were created and therefore we cannot ditch the - %% module - Purgeable = - case beam_lib:chunks(Binary, [labeled_locals]) of - {ok, {_, [{labeled_locals, []}]}} -> true; - _ -> false - end, - dispatch_loaded(Module, Fun, Args, Purgeable, I, EE) - end). + {Module, Binary} = elixir_erl_compiler:noenv_forms(Form, File, [nowarn_nomatch]), + code:load_binary(Module, in_memory, Binary), + + Purgeable = beam_lib:chunks(Binary, [labeled_locals]) == + {ok, {Module, [{labeled_locals, []}]}}, + dispatch(Module, Fun, Args, Purgeable, I, EE). -dispatch_loaded(Module, Fun, Args, Purgeable, I, E) -> +dispatch(Module, Fun, Args, Purgeable, I, E) -> Res = Module:Fun(Args), code:delete(Module), if Purgeable -> code:purge(Module), - return_module_name(I); + return_compiler_module(I); true -> ok end, @@ -100,71 +99,41 @@ code_mod(Fun, Expr, Line, File, Module, Vars) when is_binary(File), is_integer(L Tuple = {tuple, Line, [{var, Line, K} || {_, _, K, _} <- Vars]}, Relative = elixir_utils:relative_to_cwd(File), - [ - {attribute, Line, file, {elixir_utils:characters_to_list(Relative), 1}}, - {attribute, Line, module, Module}, - {attribute, Line, export, [{Fun, 1}, {'__RELATIVE__', 0}]}, - {function, Line, Fun, 1, [ - {clause, Line, [Tuple], [], [Expr]} - ]}, - {function, Line, '__RELATIVE__', 0, [ - {clause, Line, [], [], [elixir_utils:elixir_to_erl(Relative)]} - ]} - ]. + [{attribute, Line, file, {elixir_utils:characters_to_list(Relative), 1}}, + {attribute, Line, module, Module}, + {attribute, Line, compile, no_auto_import}, + {attribute, Line, export, [{Fun, 1}, {'__RELATIVE__', 0}]}, + {function, Line, Fun, 1, [ + {clause, Line, [Tuple], [], [Expr]} + ]}, + {function, Line, '__RELATIVE__', 0, [ + {clause, Line, [], [], [elixir_erl:elixir_to_erl(Relative)]} + ]}]. -retrieve_module_name() -> - elixir_code_server:call(retrieve_module_name). +retrieve_compiler_module() -> + elixir_code_server:call(retrieve_compiler_module). -return_module_name(I) -> - elixir_code_server:cast({return_module_name, I}). +return_compiler_module(I) -> + elixir_code_server:cast({return_compiler_module, I}). allows_fast_compilation({'__block__', _, Exprs}) -> lists:all(fun allows_fast_compilation/1, Exprs); -allows_fast_compilation({defmodule,_,_}) -> true; +allows_fast_compilation({defmodule, _, _}) -> true; allows_fast_compilation(_) -> false. -%% INTERNAL API - -%% Compile the module by forms based on the scope information -%% executes the callback in case of success. This automatically -%% handles errors and warnings. Used by this module and elixir_module. -module(Forms, File, Opts, Callback) -> - Final = - case (get_opt(debug_info) == true) orelse - lists:member(debug_info, Opts) of - true -> [debug_info] ++ elixir_code_server:call(erl_compiler_options); - false -> elixir_code_server:call(erl_compiler_options) - end, - module(Forms, File, Final, false, Callback). - -module(Forms, File, Options, Bootstrap, Callback) when - is_binary(File), is_list(Forms), is_list(Options), is_boolean(Bootstrap), is_function(Callback) -> - Listname = elixir_utils:characters_to_list(File), - - case compile:noenv_forms([no_auto_import()|Forms], [return,{source,Listname}|Options]) of - {ok, ModuleName, Binary, Warnings} -> - format_warnings(Bootstrap, Warnings), - code:load_binary(ModuleName, Listname, Binary), - Callback(ModuleName, Binary); - {error, Errors, Warnings} -> - format_warnings(Bootstrap, Warnings), - format_errors(Errors) - end. - -no_auto_import() -> - {attribute, 0, compile, no_auto_import}. - -%% CORE HANDLING +%% Bootstraper -core() -> - application:start(elixir), - elixir_code_server:cast({compiler_options, [{docs,false},{internal,true}]}), - [core_file(File) || File <- core_main()]. +bootstrap() -> + {ok, _} = application:ensure_all_started(elixir), + Update = fun(Old) -> maps:merge(Old, #{docs => false, internal => true, + relative_paths => false}) end, + _ = elixir_config:update(compiler_options, Update), + [bootstrap_file(File) || File <- bootstrap_main()]. -core_file(File) -> +bootstrap_file(File) -> try Lists = file(File), - [binary_to_path(X, "lib/elixir/ebin") || X <- Lists], + _ = [binary_to_path(X, "lib/elixir/ebin") || X <- Lists], io:format("Compiled ~ts~n", [File]) catch Kind:Reason -> @@ -172,7 +141,7 @@ core_file(File) -> erlang:halt(1) end. -core_main() -> +bootstrap_main() -> [<<"lib/elixir/lib/kernel.ex">>, <<"lib/elixir/lib/macro/env.ex">>, <<"lib/elixir/lib/keyword.ex">>, @@ -182,6 +151,8 @@ core_main() -> <<"lib/elixir/lib/code.ex">>, <<"lib/elixir/lib/module/locals_tracker.ex">>, <<"lib/elixir/lib/kernel/typespec.ex">>, + <<"lib/elixir/lib/kernel/utils.ex">>, + <<"lib/elixir/lib/behaviour.ex">>, <<"lib/elixir/lib/exception.ex">>, <<"lib/elixir/lib/protocol.ex">>, <<"lib/elixir/lib/stream/reducers.ex">>, @@ -203,22 +174,7 @@ core_main() -> binary_to_path({ModuleName, Binary}, CompilePath) -> Path = filename:join(CompilePath, atom_to_list(ModuleName) ++ ".beam"), - ok = file:write_file(Path, Binary), - Path. - -%% ERROR HANDLING - -format_errors([]) -> - exit({nocompile, "compilation failed but no error was raised"}); - -format_errors(Errors) -> - lists:foreach(fun ({File, Each}) -> - BinFile = elixir_utils:characters_to_binary(File), - lists:foreach(fun (Error) -> elixir_errors:handle_file_error(BinFile, Error) end, Each) - end, Errors). - -format_warnings(Bootstrap, Warnings) -> - lists:foreach(fun ({File, Each}) -> - BinFile = elixir_utils:characters_to_binary(File), - lists:foreach(fun (Warning) -> elixir_errors:handle_file_warning(Bootstrap, BinFile, Warning) end, Each) - end, Warnings). + case file:write_file(Path, Binary) of + ok -> Path; + {error, Reason} -> error('Elixir.File.Error':exception([{action, "write to"}, {path, Path}, {reason, Reason}])) + end. diff --git a/lib/elixir/src/elixir_config.erl b/lib/elixir/src/elixir_config.erl new file mode 100644 index 00000000000..9a878806cad --- /dev/null +++ b/lib/elixir/src/elixir_config.erl @@ -0,0 +1,65 @@ +-module(elixir_config). +-compile({no_auto_import, [get/1]}). +-export([new/1, delete/1, put/2, get/1, update/2, get_and_put/2]). +-export([start_link/0, init/1, handle_call/3, handle_cast/2, + handle_info/2, code_change/3, terminate/2]). +-behaviour(gen_server). + +%% public api + +new(Opts) -> + Tab = ets:new(?MODULE, [named_table, public, {read_concurrency, true}]), + true = ets:insert_new(?MODULE, Opts), + Tab. + +delete(?MODULE) -> + ets:delete(?MODULE). + +put(Key, Value) -> + gen_server:call(?MODULE, {put, Key, Value}). + +get(Key) -> + case ets:lookup(?MODULE, Key) of + [{_, Value}] -> Value; + [] -> nil + end. + +update(Key, Fun) -> + gen_server:call(?MODULE, {update, Key, Fun}). + +get_and_put(Key, Value) -> + gen_server:call(?MODULE, {get_and_put, Key, Value}). + +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, ?MODULE, []). + +%% gen_server api + +init(Tab) -> + %% Ets table must be writable + public = ets:info(Tab, protection), + {ok, Tab}. + +handle_call({put, Key, Value}, _From, Tab) -> + ets:insert(Tab, {Key, Value}), + {reply, ok, Tab}; +handle_call({update, Key, Fun}, _From, Tab) -> + Value = Fun(get(Key)), + ets:insert(Tab, {Key, Value}), + {reply, Value, Tab}; +handle_call({get_and_put, Key, Value}, _From, Tab) -> + OldValue = get(Key), + ets:insert(Tab, {Key, Value}), + {reply, OldValue, Tab}. + +handle_cast(Cast, Tab) -> + {stop, {bad_cast, Cast}, Tab}. + +handle_info(_Msg, Tab) -> + {noreply, Tab}. + +code_change(_OldVsn, Tab, _Extra) -> + {ok, Tab}. + +terminate(_Reason, _Tab) -> + ok. diff --git a/lib/elixir/src/elixir_counter.erl b/lib/elixir/src/elixir_counter.erl deleted file mode 100644 index 35cfd160338..00000000000 --- a/lib/elixir/src/elixir_counter.erl +++ /dev/null @@ -1,38 +0,0 @@ --module(elixir_counter). --export([start_link/0, init/1, handle_call/3, handle_cast/2, - handle_info/2, terminate/2, code_change/3, next/0]). --behaviour(gen_server). - --define(timeout, 1000). %% 1 second --define(limit, 4294967296). %% 2^32 - -next() -> - gen_server:call(?MODULE, next, ?timeout). - -start_link() -> - gen_server:start_link({local, ?MODULE}, ?MODULE, 0, []). - -init(Counter) -> - {ok, Counter}. - -handle_call(next, _From, Counter) -> - {reply, Counter, bump(Counter)}; -handle_call(Request, _From, Counter) -> - {stop, {badcall, Request}, Counter}. - -handle_cast(Request, Counter) -> - {stop, {badcast, Request}, Counter}. - -handle_info(_Request, Counter) -> - {noreply, Counter}. - -terminate(_Reason, _Counter) -> - ok. - -code_change(_Old, Counter, _Extra) -> - {ok, Counter}. - -bump(Counter) when Counter < ?limit -> - Counter + 1; -bump(_Counter) -> - 0. diff --git a/lib/elixir/src/elixir_def.erl b/lib/elixir/src/elixir_def.erl index e7c134e851a..94e1980d9ee 100644 --- a/lib/elixir/src/elixir_def.erl +++ b/lib/elixir/src/elixir_def.erl @@ -1,61 +1,94 @@ % Holds the logic responsible for function definitions (def(p) and defmacro(p)). -module(elixir_def). --export([table/1, clauses_table/1, setup/1, - cleanup/1, reset_last/1, lookup_definition/2, - delete_definition/2, store_definition/6, unwrap_definitions/1, - store_each/8, format_error/1]). +-export([setup/1, reset_last/1, local_for/4, + take_definition/2, store_definition/5, store_definition/9, + fetch_definitions/2, format_error/1]). -include("elixir.hrl"). - --define(attr, '__def_table'). --define(clauses_attr, '__clauses_table'). - -%% Table management functions. Called internally. - -table(Module) -> - ets:lookup_element(Module, ?attr, 2). - -clauses_table(Module) -> - ets:lookup_element(Module, ?clauses_attr, 2). +-define(last_def, {elixir, last_def}). setup(Module) -> - ets:insert(Module, {?attr, ets:new(Module, [set, public])}), - ets:insert(Module, {?clauses_attr, ets:new(Module, [bag, public])}), reset_last(Module), ok. -cleanup(Module) -> - ets:delete(table(Module)), - ets:delete(clauses_table(Module)). - -%% Reset the last item. Useful when evaling code. reset_last(Module) -> - ets:insert(table(Module), {last, []}). + ets:insert(elixir_module:data_table(Module), {?last_def, []}). + +local_for(Module, Name, Arity, Kinds) -> + Tuple = {Name, Arity}, + + try + Table = elixir_module:defs_table(Module), + {ets:lookup(Table, {def, Tuple}), ets:lookup(Table, {clauses, Tuple})} + of + {[{_, Kind, Meta, File, _, _}], Clauses} -> + case (Kinds == all) orelse (lists:member(Kind, Kinds)) of + true -> elixir_erl:definition_to_anonymous(File, Module, Tuple, Kind, Meta, + [Clause || {_, Clause} <- Clauses]); + false -> false + end; + {[], _} -> + false + catch + _:_ -> false + end. + +%% Take a definition out of the table -%% Looks up a definition from the database. -lookup_definition(Module, Tuple) -> - case ets:lookup(table(Module), Tuple) of +take_definition(Module, Tuple) -> + Table = elixir_module:defs_table(Module), + case ets:take(Table, {def, Tuple}) of [Result] -> - CTable = clauses_table(Module), - {Result, [Clause || {_, Clause} <- ets:lookup(CTable, Tuple)]}; - _ -> + {Result, [Clause || {_, Clause} <- ets:take(Table, {clauses, Tuple})]}; + [] -> false end. -delete_definition(Module, Tuple) -> - ets:delete(table(Module), Tuple), - ets:delete(clauses_table(Module), Tuple). +%% Fetch all available definitions + +fetch_definitions(File, Module) -> + Table = elixir_module:defs_table(Module), + Entries = ets:match(Table, {{def, '$1'}, '_', '_', '_', '_', '_'}), + {All, Private} = fetch_definition(lists:sort(Entries), File, Module, Table, [], []), + Unreachable = elixir_locals:warn_unused_local(File, Module, Private), + elixir_locals:ensure_no_import_conflict(File, Module, All), + {All, Unreachable}. + +fetch_definition([[Tuple] | T], File, Module, Table, All, Private) -> + [{_, Kind, Meta, _, Check, {Defaults, _, _}}] = ets:lookup(Table, {def, Tuple}), + + try ets:lookup_element(Table, {clauses, Tuple}, 2) of + Clauses -> + Unwrapped = + {Tuple, Kind, Meta, Clauses}, + NewPrivate = + case (Kind == defp) orelse (Kind == defmacrop) of + true -> + WarnMeta = case Check of true -> Meta; false -> false end, + [{Tuple, Kind, WarnMeta, Defaults} | Private]; + false -> + Private + end, + fetch_definition(T, File, Module, Table, [Unwrapped | All], NewPrivate) + catch + error:badarg -> + warn_bodyless_function(Check, Meta, File, Module, Kind, Tuple), + fetch_definition(T, File, Module, Table, All, Private) + end; + +fetch_definition([], _File, _Module, _Table, All, Private) -> + {All, Private}. -% Invoked by the wrap definition with the function abstract tree. -% Each function is then added to the function table. +%% Section for storing definitions -store_definition(Line, Kind, CheckClauses, Call, Body, Pos) -> - E = (elixir_locals:get_cached_env(Pos))#{line := Line}, - {NameAndArgs, Guards} = elixir_clauses:extract_guards(Call), +store_definition(Kind, CheckClauses, Call, Body, Pos) -> + #{line := Line} = E = elixir_locals:get_cached_env(Pos), + {NameAndArgs, Guards} = elixir_utils:extract_guards(Call), {Name, Args} = case NameAndArgs of {N, _, A} when is_atom(N), is_atom(A) -> {N, []}; {N, _, A} when is_atom(N), is_list(A) -> {N, A}; - _ -> elixir_errors:form_error(Line, ?m(E, file), ?MODULE, {invalid_def, Kind, NameAndArgs}) + _ -> elixir_errors:form_error([{line, Line}], ?key(E, file), ?MODULE, + {invalid_def, Kind, NameAndArgs}) end, %% Now that we have verified the call format, @@ -64,300 +97,231 @@ store_definition(Line, Kind, CheckClauses, Call, Body, Pos) -> DoCheckClauses = (not lists:keymember(context, 1, Meta)) andalso (CheckClauses), %% Check if there is a file information in the definition. - %% If so, we assume this come from another source and we need - %% to linify taking into account keep line numbers. - {File, Key} = case lists:keyfind(file, 1, Meta) of - {file, Bin} when is_binary(Bin) -> {Bin, keep}; - _ -> {nil, line} - end, + %% If so, we assume this come from another source and + %% we need to linify taking into account keep line numbers. + %% + %% Line and File will always point to the caller. __ENV__.line + %% will always point to the quoted one and __ENV__.file will + %% always point to the one at @file or the quoted one. + {Location, Key} = + case elixir_utils:meta_location(Meta) of + {_, _} = Keep -> {Keep, keep}; + nil -> {nil, line} + end, + Arity = length(Args), LinifyArgs = elixir_quote:linify(Line, Key, Args), LinifyGuards = elixir_quote:linify(Line, Key, Guards), LinifyBody = elixir_quote:linify(Line, Key, Body), + Generated = case DoCheckClauses of true -> []; false -> ?generated([]) end, + + {EL, MetaLocation} = + case retrieve_location(Location, ?key(E, module)) of + {F, L} -> + {E#{file := F}, [{line, Line}, {location, {F, L}} | Generated]}; + nil -> + {E, [{line, Line} | Generated]} + end, - assert_no_aliases_name(Line, Name, Args, E), - store_definition(Line, Kind, DoCheckClauses, Name, - LinifyArgs, LinifyGuards, LinifyBody, File, E). + assert_no_aliases_name(MetaLocation, Name, Args, EL), + assert_valid_name(MetaLocation, Kind, Name, Args, EL), + store_definition(MetaLocation, Kind, DoCheckClauses, Name, Arity, + LinifyArgs, LinifyGuards, LinifyBody, ?key(E, file), EL). -store_definition(Line, Kind, CheckClauses, Name, Args, Guards, Body, MetaFile, #{module := Module} = ER) -> - Arity = length(Args), +store_definition(Meta, Kind, CheckClauses, Name, Arity, DefaultsArgs, Guards, Body, File, ER) -> + Module = ?key(ER, module), Tuple = {Name, Arity}, E = ER#{function := Tuple}, - elixir_locals:record_definition(Tuple, Kind, Module), - Location = retrieve_location(Line, MetaFile, Module), - {Function, Defaults, Super} = translate_definition(Kind, Line, Module, Name, Args, Guards, Body, E), + elixir_locals:record_definition(Tuple, Kind, Module), + {Args, Defaults} = unpack_defaults(Kind, Meta, Name, DefaultsArgs, E), + Clauses = [elixir_clauses:def(Clause, E) || + Clause <- def_to_clauses(Kind, Meta, Args, Guards, Body, E)], DefaultsLength = length(Defaults), elixir_locals:record_defaults(Tuple, Kind, Module, DefaultsLength), - - File = ?m(E, file), - Table = table(Module), - CTable = clauses_table(Module), - - compile_super(Module, Super, E), - check_previous_defaults(Table, Line, Name, Arity, Kind, DefaultsLength, E), - - store_each(CheckClauses, Kind, File, Location, - Table, CTable, DefaultsLength, Function), - [store_each(false, Kind, File, Location, Table, CTable, 0, - default_function_for(Kind, Name, Default)) || Default <- Defaults], - - make_struct_available(Kind, Module, Name, Args), - {Name, Arity}. - -%% @on_definition - -run_on_definition_callbacks(Kind, Line, Module, Name, Args, Guards, Expr, E) -> - case elixir_compiler:get_opt(internal) of - true -> - ok; - _ -> - Env = elixir_env:linify({Line, E}), - Callbacks = 'Elixir.Module':get_attribute(Module, on_definition), - [Mod:Fun(Env, Kind, Name, Args, Guards, Expr) || {Mod, Fun} <- Callbacks] - end. - -make_struct_available(def, Module, '__struct__', []) -> - case erlang:get(elixir_compiler_pid) of - undefined -> ok; - Pid -> Pid ! {struct_available, Module} - end; -make_struct_available(_, _, _, _) -> - ok. - -%% Retrieve location from meta file or @file, otherwise nil - -retrieve_location(Line, File, Module) -> - case get_location_attribute(Module) of - nil when not is_binary(File) -> + check_previous_defaults(Meta, Module, Name, Arity, Kind, DefaultsLength, E), + run_on_definition_callbacks(Kind, Module, Name, DefaultsArgs, Guards, Body, E), + + store_definition(CheckClauses, Kind, Meta, Name, Arity, File, + Module, DefaultsLength, Clauses), + [store_definition(false, Kind, Meta, Name, length(DefaultArgs), File, + Module, 0, [Default]) || {_, DefaultArgs, _, _} = Default <- Defaults], + Tuple. + +retrieve_location(Location, Module) -> + case ets:take(elixir_module:data_table(Module), file) of + [] when is_tuple(Location) -> + {File, Line} = Location, + {elixir_utils:relative_to_cwd(File), Line}; + [] -> nil; - nil -> - {normalize_location(File), Line}; - X when is_binary(X) -> + [{file, File, _, _}] when is_binary(File) -> 'Elixir.Module':delete_attribute(Module, file), - {normalize_location(X), 0}; - {X, L} when is_binary(X) andalso is_integer(L) -> + {elixir_utils:relative_to_cwd(File), 0}; + [{file, {File, Line}, _, _}] when is_binary(File) andalso is_integer(Line) -> 'Elixir.Module':delete_attribute(Module, file), - {normalize_location(X), L} + {elixir_utils:relative_to_cwd(File), Line} end. -get_location_attribute(Module) -> - case elixir_compiler:get_opt(internal) of - true -> nil; - false -> 'Elixir.Module':get_attribute(Module, file) - end. - -normalize_location(X) -> - elixir_utils:characters_to_list(elixir_utils:relative_to_cwd(X)). - -%% Compile super - -compile_super(Module, true, #{function := Function}) -> - elixir_def_overridable:store(Module, Function, true); -compile_super(_Module, _, _E) -> ok. - -%% Translate the given call and expression given -%% and then store it in memory. - -translate_definition(Kind, Line, Module, Name, Args, Guards, Body, E) when is_integer(Line) -> - Arity = length(Args), - - {EArgs, EGuards, EBody, _} = elixir_exp_clauses:def(fun elixir_def_defaults:expand/2, - Args, Guards, expr_from_body(Line, Body), E), - - Body == nil andalso check_args_for_bodyless_clause(Line, EArgs, E), - - S = elixir_env:env_to_scope(E), - {Unpacked, Defaults} = elixir_def_defaults:unpack(Kind, Name, EArgs, S), - {Clauses, Super} = translate_clause(Body, Line, Kind, Unpacked, EGuards, EBody, S), - - run_on_definition_callbacks(Kind, Line, Module, Name, EArgs, EGuards, EBody, E), - Function = {function, Line, Name, Arity, Clauses}, - {Function, Defaults, Super}. - -translate_clause(nil, _Line, _Kind, _Args, [], _Body, _S) -> - {[], false}; -translate_clause(nil, Line, Kind, _Args, _Guards, _Body, #elixir_scope{file=File}) -> - elixir_errors:form_error(Line, File, ?MODULE, {missing_do, Kind}); -translate_clause(_, Line, Kind, Args, Guards, Body, S) -> - {TClause, TS} = elixir_clauses:clause(Line, - fun elixir_translator:translate_args/2, Args, Body, Guards, true, S), - - FClause = case is_macro(Kind) of - true -> - FArgs = {var, Line, '_@CALLER'}, - MClause = setelement(3, TClause, [FArgs|element(3, TClause)]), - - case TS#elixir_scope.caller of - true -> - FBody = {'match', Line, - {'var', Line, '__CALLER__'}, - elixir_utils:erl_call(Line, elixir_env, linify, [{var, Line, '_@CALLER'}]) - }, - setelement(5, MClause, [FBody|element(5, TClause)]); - false -> - MClause - end; - false -> - TClause - end, +def_to_clauses(_Kind, Meta, Args, [], nil, E) -> + check_args_for_bodyless_clause(Meta, Args, E), + []; +def_to_clauses(Kind, Meta, _Args, _Guards, nil, E) -> + elixir_errors:form_error(Meta, ?key(E, file), elixir_expand, {missing_option, Kind, [do]}); +def_to_clauses(_Kind, Meta, Args, Guards, [{do, Body}], _E) -> + [{Meta, Args, Guards, Body}]; +def_to_clauses(_Kind, Meta, Args, Guards, Body, _E) -> + [{Meta, Args, Guards, {'try', Meta, [Body]}}]. + +run_on_definition_callbacks(Kind, Module, Name, Args, Guards, Body, E) -> + Callbacks = ets:lookup_element(elixir_module:data_table(Module), on_definition, 2), + _ = [Mod:Fun(E, Kind, Name, Args, Guards, Body) || {Mod, Fun} <- Callbacks], + ok. - {[FClause], TS#elixir_scope.super}. - -expr_from_body(_Line, nil) -> nil; -expr_from_body(_Line, [{do, Expr}]) -> Expr; -expr_from_body(Line, Else) -> {'try', [{line,Line}], [Else]}. - -is_macro(defmacro) -> true; -is_macro(defmacrop) -> true; -is_macro(_) -> false. - -% Unwrap the functions stored in the functions table. -% It returns a list of all functions to be exported, plus the macros, -% and the body of all functions. -unwrap_definitions(Module) -> - Table = table(Module), - CTable = clauses_table(Module), - ets:delete(Table, last), - unwrap_definition(ets:tab2list(Table), CTable, [], [], [], [], [], [], []). - -unwrap_definition([Fun|T], CTable, All, Exports, Private, Def, Defmacro, Functions, Tail) -> - Tuple = element(1, Fun), - Clauses = [Clause || {_, Clause} <- ets:lookup(CTable, Tuple)], - - {NewFun, NewExports, NewPrivate, NewDef, NewDefmacro} = - case Clauses of - [] -> {false, Exports, Private, Def, Defmacro}; - _ -> unwrap_definition(element(2, Fun), Tuple, Fun, Exports, Private, Def, Defmacro) +store_definition(Check, Kind, Meta, Name, Arity, File, Module, Defaults, Clauses) -> + Data = elixir_module:data_table(Module), + Defs = elixir_module:defs_table(Module), + + Tuple = {Name, Arity}, + HasBody = Clauses =/= [], + + MaxDefaults = + case ets:take(Defs, {def, Tuple}) of + [{_, StoredKind, StoredMeta, StoredFile, StoredCheck, + {StoredDefaults, LastHasBody, LastDefaults}}] -> + check_valid_kind(Meta, File, Name, Arity, Kind, StoredKind), + (Check and StoredCheck) andalso + check_valid_clause(Meta, File, Name, Arity, Kind, Data, StoredMeta, StoredFile), + check_valid_defaults(Meta, File, Name, Arity, Kind, Defaults, StoredDefaults, LastDefaults, LastHasBody), + {max(Defaults, StoredDefaults), HasBody, Defaults}; + [] -> + {Defaults, HasBody, Defaults} end, - {NewFunctions, NewTail} = case NewFun of - false -> - NewAll = All, - {Functions, Tail}; - _ -> - NewAll = [Tuple|All], - function_for_stored_definition(NewFun, Clauses, Functions, Tail) - end, - - unwrap_definition(T, CTable, NewAll, NewExports, NewPrivate, - NewDef, NewDefmacro, NewFunctions, NewTail); -unwrap_definition([], _CTable, All, Exports, Private, Def, Defmacro, Functions, Tail) -> - {All, Exports, Private, ordsets:from_list(Def), - ordsets:from_list(Defmacro), lists:reverse(Tail ++ Functions)}. - -unwrap_definition(def, Tuple, Fun, Exports, Private, Def, Defmacro) -> - {Fun, [Tuple|Exports], Private, [Tuple|Def], Defmacro}; -unwrap_definition(defmacro, {Name, Arity} = Tuple, Fun, Exports, Private, Def, Defmacro) -> - Macro = {elixir_utils:macro_name(Name), Arity + 1}, - {setelement(1, Fun, Macro), [Macro|Exports], Private, Def, [Tuple|Defmacro]}; -unwrap_definition(defp, Tuple, Fun, Exports, Private, Def, Defmacro) -> - %% {Name, Arity}, Kind, Line, Check, Defaults - Info = {Tuple, defp, element(3, Fun), element(5, Fun), element(7, Fun)}, - {Fun, Exports, [Info|Private], Def, Defmacro}; -unwrap_definition(defmacrop, Tuple, Fun, Exports, Private, Def, Defmacro) -> - %% {Name, Arity}, Kind, Line, Check, Defaults - Info = {Tuple, defmacrop, element(3, Fun), element(5, Fun), element(7, Fun)}, - {false, Exports, [Info|Private], Def, Defmacro}. - -%% Helpers - -function_for_stored_definition({{Name,Arity}, _, Line, _, _, nil, _}, Clauses, Functions, Tail) -> - {[{function, Line, Name, Arity, Clauses}|Functions], Tail}; - -function_for_stored_definition({{Name,Arity}, _, Line, _, _, Location, _}, Clauses, Functions, Tail) -> - {Functions, [ - {function, Line, Name, Arity, Clauses}, - {attribute, Line, file, Location} | Tail - ]}. - -default_function_for(Kind, Name, {clause, Line, Args, _Guards, _Exprs} = Clause) - when Kind == defmacro; Kind == defmacrop -> - {function, Line, Name, length(Args) - 1, [Clause]}; - -default_function_for(_, Name, {clause, Line, Args, _Guards, _Exprs} = Clause) -> - {function, Line, Name, length(Args), [Clause]}. - -%% Store each definition in the table. -%% This function also checks and emit warnings in case -%% the kind, of the visibility of the function changes. - -store_each(Check, Kind, File, Location, Table, CTable, Defaults, {function, Line, Name, Arity, Clauses}) -> - Tuple = {Name, Arity}, - case ets:lookup(Table, Tuple) of - [{Tuple, StoredKind, StoredLine, StoredFile, StoredCheck, StoredLocation, StoredDefaults}] -> - FinalLine = StoredLine, - FinalLocation = StoredLocation, - FinalDefaults = max(Defaults, StoredDefaults), - check_valid_kind(Line, File, Name, Arity, Kind, StoredKind), - (Check and StoredCheck) andalso - check_valid_clause(Line, File, Name, Arity, Kind, Table, StoredLine, StoredFile), - check_valid_defaults(Line, File, Name, Arity, Kind, Defaults, StoredDefaults); - [] -> - FinalLine = Line, - FinalLocation = Location, - FinalDefaults = Defaults - end, - Check andalso ets:insert(Table, {last, {Name, Arity}}), - ets:insert(CTable, [{Tuple, Clause} || Clause <- Clauses ]), - ets:insert(Table, {Tuple, Kind, FinalLine, File, Check, FinalLocation, FinalDefaults}). + Check andalso ets:insert(Data, {?last_def, Tuple}), + ets:insert(Defs, [{{clauses, Tuple}, Clause} || Clause <- Clauses]), + ets:insert(Defs, {{def, Tuple}, Kind, Meta, File, Check, MaxDefaults}). + +%% Handling of defaults + +unpack_defaults(Kind, Meta, Name, Args, E) -> + Expanded = expand_defaults(Args, E#{context := nil}), + unpack_defaults(Kind, Meta, Name, Expanded, [], []). + +unpack_defaults(Kind, Meta, Name, [{'\\\\', DefaultMeta, [Expr, _]} | T] = List, Acc, Clauses) -> + Base = match_defaults(Acc, length(Acc), []), + {Args, Invoke} = extract_defaults(List, length(Base), [], []), + Clause = {Meta, Base ++ Args, [], {super, DefaultMeta, Base ++ Invoke}}, + unpack_defaults(Kind, Meta, Name, T, [Expr | Acc], [Clause | Clauses]); +unpack_defaults(Kind, Meta, Name, [H | T], Acc, Clauses) -> + unpack_defaults(Kind, Meta, Name, T, [H | Acc], Clauses); +unpack_defaults(_Kind, _Meta, _Name, [], Acc, Clauses) -> + {lists:reverse(Acc), lists:reverse(Clauses)}. + +expand_defaults([{'\\\\', Meta, [Expr, Default]} | Args], E) -> + {ExpandedDefault, _} = elixir_expand:expand(Default, E), + [{'\\\\', Meta, [Expr, ExpandedDefault]} | expand_defaults(Args, E)]; +expand_defaults([Arg | Args], E) -> + [Arg | expand_defaults(Args, E)]; +expand_defaults([], _E) -> + []. + +extract_defaults([{'\\\\', _, [_Expr, Default]} | T], Counter, NewArgs, NewInvoke) -> + extract_defaults(T, Counter, NewArgs, [Default | NewInvoke]); +extract_defaults([_ | T], Counter, NewArgs, NewInvoke) -> + H = default_var(Counter), + extract_defaults(T, Counter + 1, [H | NewArgs], [H | NewInvoke]); +extract_defaults([], _Counter, NewArgs, NewInvoke) -> + {lists:reverse(NewArgs), lists:reverse(NewInvoke)}. + +match_defaults([], 0, Acc) -> + Acc; +match_defaults([_ | T], Counter, Acc) -> + NewCounter = Counter - 1, + match_defaults(T, NewCounter, [default_var(NewCounter) | Acc]). + +default_var(Counter) -> + {list_to_atom([$x | integer_to_list(Counter)]), [{generated, true}], ?var_context}. %% Validations -check_valid_kind(_Line, _File, _Name, _Arity, Kind, Kind) -> []; -check_valid_kind(Line, File, Name, Arity, Kind, StoredKind) -> - elixir_errors:form_error(Line, File, ?MODULE, +check_valid_kind(_Meta, _File, _Name, _Arity, Kind, Kind) -> []; +check_valid_kind(Meta, File, Name, Arity, Kind, StoredKind) -> + elixir_errors:form_error(Meta, File, ?MODULE, {changed_kind, {Name, Arity, StoredKind, Kind}}). -check_valid_clause(Line, File, Name, Arity, Kind, Table, StoredLine, StoredFile) -> - case ets:lookup_element(Table, last, 2) of - {Name,Arity} -> []; +check_valid_clause(Meta, File, Name, Arity, Kind, Data, StoredMeta, StoredFile) -> + case ets:lookup_element(Data, ?last_def, 2) of + {Name, Arity} -> []; [] -> []; _ -> Relative = elixir_utils:relative_to_cwd(StoredFile), - elixir_errors:handle_file_warning(File, {Line, ?MODULE, - {ungrouped_clause, {Kind, Name, Arity, StoredLine, Relative}}}) + elixir_errors:form_warn(Meta, File, ?MODULE, + {ungrouped_clause, {Kind, Name, Arity, ?line(StoredMeta), Relative}}) end. -check_valid_defaults(_Line, _File, _Name, _Arity, _Kind, 0, _) -> []; -check_valid_defaults(Line, File, Name, Arity, Kind, _, 0) -> - elixir_errors:handle_file_warning(File, {Line, ?MODULE, {out_of_order_defaults, {Kind, Name, Arity}}}); -check_valid_defaults(Line, File, Name, Arity, Kind, _, _) -> - elixir_errors:form_error(Line, File, ?MODULE, {clauses_with_defaults, {Kind, Name, Arity}}). +% Clause with defaults after clause with defaults +check_valid_defaults(Meta, File, Name, Arity, Kind, Defaults, StoredDefaults, _, _) when Defaults > 0, StoredDefaults > 0 -> + elixir_errors:form_error(Meta, File, ?MODULE, {clauses_with_defaults, {Kind, Name, Arity}}); +% Clause with defaults after clause(s) without defaults +check_valid_defaults(Meta, File, Name, Arity, Kind, Defaults, 0, 0, _) when Defaults > 0 -> + elixir_errors:form_warn(Meta, File, ?MODULE, {clauses_with_defaults, {Kind, Name, Arity}}); +% Clause without defaults directly after clause with defaults (body less does not count) +check_valid_defaults(Meta, File, Name, Arity, Kind, 0, _, LastDefaults, true) when LastDefaults > 0 -> + elixir_errors:form_warn(Meta, File, ?MODULE, {clauses_with_defaults, {Kind, Name, Arity}}); +% Clause without defaults +check_valid_defaults(_Meta, _File, _Name, _Arity, _Kind, 0, _, _, _) -> []. + +warn_bodyless_function(Check, _Meta, _File, Module, _Kind, _Tuple) + when Check == false; Module == 'Elixir.Module' -> + ok; +warn_bodyless_function(_Check, Meta, File, _Module, Kind, Tuple) -> + elixir_errors:form_warn(Meta, File, ?MODULE, {bodyless_clause, Kind, Tuple}), + ok. -check_previous_defaults(Table, Line, Name, Arity, Kind, Defaults, E) -> - Matches = ets:match(Table, {{Name, '$2'}, '$1', '_', '_', '_', '_', '$3'}), - [ begin - elixir_errors:form_error(Line, ?m(E, file), ?MODULE, - {defs_with_defaults, Name, {Kind, Arity}, {K, A}}) - end || [K, A, D] <- Matches, A /= Arity, D /= 0, defaults_conflict(A, D, Arity, Defaults)]. +check_args_for_bodyless_clause(Meta, Args, E) -> + [begin + elixir_errors:form_error(Meta, ?key(E, file), ?MODULE, invalid_args_for_bodyless_clause) + end || Arg <- Args, invalid_arg(Arg)]. + +invalid_arg({Name, _, Kind}) when is_atom(Name), is_atom(Kind) -> false; +invalid_arg(_) -> true. + +check_previous_defaults(Meta, Module, Name, Arity, Kind, Defaults, E) -> + Matches = ets:match(elixir_module:defs_table(Module), + {{def, {Name, '$2'}}, '$1', '_', '_', '_', {'$3', '_', '_'}}), + [begin + elixir_errors:form_error(Meta, ?key(E, file), ?MODULE, + {defs_with_defaults, Name, {Kind, Arity}, {K, A}}) + end || [K, A, D] <- Matches, A /= Arity, D /= 0, defaults_conflict(A, D, Arity, Defaults)]. defaults_conflict(A, D, Arity, Defaults) -> ((Arity >= (A - D)) andalso (Arity < A)) orelse ((A >= (Arity - Defaults)) andalso (A < Arity)). -check_args_for_bodyless_clause(Line, Args, E) -> - [ begin - elixir_errors:form_error(Line, ?m(E, file), ?MODULE, invalid_args_for_bodyless_clause) - end || Arg <- Args, invalid_arg(Arg) ]. - -invalid_arg({Name, _, Kind}) when is_atom(Name), is_atom(Kind) -> - false; -invalid_arg({'\\\\', _, [{Name, _, Kind}, _]}) when is_atom(Name), is_atom(Kind) -> - false; -invalid_arg(_) -> - true. - -assert_no_aliases_name(Line, '__aliases__', [Atom], #{file := File}) when is_atom(Atom) -> - elixir_errors:form_error(Line, File, ?MODULE, {no_alias, Atom}); - +assert_no_aliases_name(Meta, '__aliases__', [Atom], #{file := File}) when is_atom(Atom) -> + elixir_errors:form_error(Meta, File, ?MODULE, {no_alias, Atom}); assert_no_aliases_name(_Meta, _Aliases, _Args, _S) -> ok. +assert_valid_name(Meta, Kind, '__info__', [_], #{file := File, module := Module}) when Module /= 'Elixir.Module' -> + elixir_errors:form_error(Meta, File, ?MODULE, {'__info__', Kind}); +assert_valid_name(Meta, Kind, 'module_info', [], #{file := File}) -> + elixir_errors:form_error(Meta, File, ?MODULE, {module_info, Kind, 0}); +assert_valid_name(Meta, Kind, 'module_info', [_], #{file := File}) -> + elixir_errors:form_error(Meta, File, ?MODULE, {module_info, Kind, 1}); +assert_valid_name(Meta, Kind, is_record, [_, _], #{file := File}) when Kind == defp; Kind == def -> + elixir_errors:form_error(Meta, File, ?MODULE, {is_record, Kind}); +assert_valid_name(_Meta, _Kind, _Name, _Args, _S) -> + ok. + %% Format errors -format_error({no_module,{Kind,Name,Arity}}) -> +format_error({bodyless_clause, Kind, {Name, Arity}}) -> + io_lib:format("implementation not provided for predefined ~ts ~ts/~B", [Kind, Name, Arity]); + +format_error({no_module, {Kind, Name, Arity}}) -> io_lib:format("cannot define function outside module, invalid scope for ~ts ~ts/~B", [Kind, Name, Arity]); format_error({defs_with_defaults, Name, {Kind, Arity}, {K, A}}) when Arity > A -> @@ -368,18 +332,26 @@ format_error({defs_with_defaults, Name, {Kind, Arity}, {K, A}}) when Arity < A - io_lib:format("~ts ~ts/~B conflicts with defaults from ~ts ~ts/~B", [Kind, Name, Arity, K, Name, A]); -format_error({clauses_with_defaults,{Kind,Name,Arity}}) -> - io_lib:format("~ts ~ts/~B has default values and multiple clauses, " - "define a function head with the defaults", [Kind, Name, Arity]); - -format_error({out_of_order_defaults,{Kind,Name,Arity}}) -> - io_lib:format("clause with defaults should be the first clause in ~ts ~ts/~B", [Kind, Name, Arity]); - -format_error({ungrouped_clause,{Kind,Name,Arity,OrigLine,OrigFile}}) -> +format_error({clauses_with_defaults, {Kind, Name, Arity}}) -> + io_lib:format("" + "definitions with multiple clauses and default values require a header. Instead of:\n" + "\n" + " def foo(:first_clause, b \\\\ :default) do ... end\n" + " def foo(:second_clause, b) do ... end\n" + "\n" + "one should write:\n" + "\n" + " def foo(a, b \\\\ :default)\n" + " def foo(:first_clause, b) do ... end\n" + " def foo(:second_clause, b) do ... end\n" + "\n" + "~ts ~ts/~B has multiple clauses and defines defaults in one or more clauses", [Kind, Name, Arity]); + +format_error({ungrouped_clause, {Kind, Name, Arity, OrigLine, OrigFile}}) -> io_lib:format("clauses for the same ~ts should be grouped together, ~ts ~ts/~B was previously defined (~ts:~B)", [Kind, Kind, Name, Arity, OrigFile, OrigLine]); -format_error({changed_kind,{Name,Arity,Previous,Current}}) -> +format_error({changed_kind, {Name, Arity, Previous, Current}}) -> io_lib:format("~ts ~ts/~B already defined as ~ts", [Current, Name, Arity, Previous]); format_error({no_alias, Atom}) -> @@ -389,7 +361,18 @@ format_error({invalid_def, Kind, NameAndArgs}) -> io_lib:format("invalid syntax in ~ts ~ts", [Kind, 'Elixir.Macro':to_string(NameAndArgs)]); format_error(invalid_args_for_bodyless_clause) -> - "can use only variables and \\\\ as arguments of bodyless clause"; + "only variables and \\\\ are allowed as arguments in definition header.\n" + "\n" + "If you did not intend to define a header, make sure your function " + "definition has the proper syntax by wrapping the arguments in parentheses " + "and ensuring there is no space between the function name and arguments"; + +format_error({'__info__', Kind}) -> + io_lib:format("cannot define ~ts __info__/1 as it is automatically defined by Elixir", [Kind]); + +format_error({module_info, Kind, Arity}) -> + io_lib:format("cannot define ~ts module_info/~B as it is automatically defined by Erlang", [Kind, Arity]); -format_error({missing_do, Kind}) -> - io_lib:format("missing do keyword in ~ts", [Kind]). +format_error({is_record, Kind}) -> + io_lib:format("cannot define ~ts is_record/2 due to compatibility " + "issues with the Erlang compiler (it is a known limitation)", [Kind]). diff --git a/lib/elixir/src/elixir_def_defaults.erl b/lib/elixir/src/elixir_def_defaults.erl deleted file mode 100644 index 0b0a2683c55..00000000000 --- a/lib/elixir/src/elixir_def_defaults.erl +++ /dev/null @@ -1,73 +0,0 @@ -% Handle default clauses for function definitions. --module(elixir_def_defaults). --export([expand/2, unpack/4]). --include("elixir.hrl"). - -expand(Args, E) -> - lists:mapfoldl(fun - ({'\\\\', Meta, [Left, Right]}, Acc) -> - {ELeft, EL} = elixir_exp:expand(Left, Acc), - {ERight, _} = elixir_exp:expand(Right, Acc#{context := nil}), - {{'\\\\', Meta, [ELeft, ERight]}, EL}; - (Left, Acc) -> - elixir_exp:expand(Left, Acc) - end, E, Args). - -unpack(Kind, Name, Args, S) -> - unpack_each(Kind, Name, Args, [], [], S). - -%% Helpers - -%% Unpack default from given args. -%% Returns the given arguments without their default -%% clauses and a list of clauses for the default calls. -unpack_each(Kind, Name, [{'\\\\', DefMeta, [Expr, _]}|T] = List, Acc, Clauses, S) -> - Base = wrap_kind(Kind, build_match(Acc, [])), - {Args, Invoke} = extract_defaults(List, length(Base), [], []), - - {DefArgs, SA} = elixir_clauses:match(fun elixir_translator:translate_args/2, Base ++ Args, S), - {DefInvoke, _} = elixir_translator:translate_args(Base ++ Invoke, SA), - - Line = ?line(DefMeta), - - Call = {call, Line, - {atom, Line, name_for_kind(Kind, Name)}, - DefInvoke - }, - - Clause = {clause, Line, DefArgs, [], [Call]}, - unpack_each(Kind, Name, T, [Expr|Acc], [Clause|Clauses], S); - -unpack_each(Kind, Name, [H|T], Acc, Clauses, S) -> - unpack_each(Kind, Name, T, [H|Acc], Clauses, S); - -unpack_each(_Kind, _Name, [], Acc, Clauses, _S) -> - {lists:reverse(Acc), lists:reverse(Clauses)}. - -% Extract default values from args following the current default clause. - -extract_defaults([{'\\\\', _, [_Expr, Default]}|T], Counter, NewArgs, NewInvoke) -> - extract_defaults(T, Counter, NewArgs, [Default|NewInvoke]); - -extract_defaults([_|T], Counter, NewArgs, NewInvoke) -> - H = {elixir_utils:atom_concat(["x", Counter]), [], nil}, - extract_defaults(T, Counter + 1, [H|NewArgs], [H|NewInvoke]); - -extract_defaults([], _Counter, NewArgs, NewInvoke) -> - {lists:reverse(NewArgs), lists:reverse(NewInvoke)}. - -% Build matches for all the previous argument until the current default clause. - -build_match([], Acc) -> Acc; - -build_match([_|T], Acc) -> - Var = {elixir_utils:atom_concat(["x", length(T)]), [], nil}, - build_match(T, [Var|Acc]). - -% Given the invoked function name based on the kind - -wrap_kind(Kind, Args) when Kind == defmacro; Kind == defmacrop -> [{c, [], nil}|Args]; -wrap_kind(_Kind, Args) -> Args. - -name_for_kind(Kind, Name) when Kind == defmacro; Kind == defmacrop -> elixir_utils:macro_name(Name); -name_for_kind(_Kind, Name) -> Name. \ No newline at end of file diff --git a/lib/elixir/src/elixir_def_overridable.erl b/lib/elixir/src/elixir_def_overridable.erl deleted file mode 100644 index f0cf2b1a7a6..00000000000 --- a/lib/elixir/src/elixir_def_overridable.erl +++ /dev/null @@ -1,75 +0,0 @@ -% Holds the logic responsible for defining overridable functions and handling super. --module(elixir_def_overridable). --export([store_pending/1, ensure_defined/4, - name/2, store/3, format_error/1]). --include("elixir.hrl"). - -overridable(Module) -> - ets:lookup_element(elixir_module:data_table(Module), '__overridable', 2). - -overridable(Module, Value) -> - ets:insert(elixir_module:data_table(Module), {'__overridable', Value}). - -%% Check if an overridable function is defined. - -ensure_defined(Meta, Module, Tuple, S) -> - Overridable = overridable(Module), - case orddict:find(Tuple, Overridable) of - {ok, {_, _, _, _}} -> ok; - _ -> elixir_errors:form_error(Meta, S#elixir_scope.file, ?MODULE, {no_super, Module, Tuple}) - end. - -%% Gets the name based on the function and stored overridables - -name(Module, Function) -> - name(Module, Function, overridable(Module)). - -name(_Module, {Name, _} = Function, Overridable) -> - {Count, _, _, _} = orddict:fetch(Function, Overridable), - elixir_utils:atom_concat([Name, " (overridable ", Count, ")"]). - -%% Store - -store(Module, Function, GenerateName) -> - Overridable = overridable(Module), - case orddict:fetch(Function, Overridable) of - {_Count, _Clause, _Neighbours, true} -> ok; - {Count, Clause, Neighbours, false} -> - overridable(Module, orddict:store(Function, {Count, Clause, Neighbours, true}, Overridable)), - {{{Name, Arity}, Kind, Line, File, _Check, Location, Defaults}, Clauses} = Clause, - - {FinalKind, FinalName} = case GenerateName of - true -> {defp, name(Module, Function, Overridable)}; - false -> {Kind, Name} - end, - - case code:is_loaded('Elixir.Module.LocalsTracker') of - {_, _} -> - 'Elixir.Module.LocalsTracker':reattach(Module, Kind, {Name, Arity}, Neighbours); - _ -> - ok - end, - - Def = {function, Line, FinalName, Arity, Clauses}, - elixir_def:store_each(false, FinalKind, File, Location, - elixir_def:table(Module), elixir_def:clauses_table(Module), Defaults, Def) - end. - -%% Store pending declarations that were not manually made concrete. - -store_pending(Module) -> - [store(Module, X, false) || {X, {_, _, _, false}} <- overridable(Module), - not 'Elixir.Module':'defines?'(Module, X)]. - -%% Error handling - -format_error({no_super, Module, {Name, Arity}}) -> - Bins = [format_fa(X) || {X, {_, _, _, _}} <- overridable(Module)], - Joined = 'Elixir.Enum':join(Bins, <<", ">>), - io_lib:format("no super defined for ~ts/~B in module ~ts. Overridable functions available are: ~ts", - [Name, Arity, elixir_aliases:inspect(Module), Joined]). - -format_fa({Name, Arity}) -> - A = atom_to_binary(Name, utf8), - B = integer_to_binary(Arity), - << A/binary, $/, B/binary >>. \ No newline at end of file diff --git a/lib/elixir/src/elixir_dispatch.erl b/lib/elixir/src/elixir_dispatch.erl index e4cbac4610f..b4b45da6d9c 100644 --- a/lib/elixir/src/elixir_dispatch.erl +++ b/lib/elixir/src/elixir_dispatch.erl @@ -4,24 +4,12 @@ -module(elixir_dispatch). -export([dispatch_import/5, dispatch_require/6, require_function/5, import_function/4, - expand_import/5, expand_require/5, + expand_import/6, expand_require/5, default_functions/0, default_macros/0, default_requires/0, find_import/4, format_error/1]). -include("elixir.hrl"). -import(ordsets, [is_element/2]). - --define(atom, 'Elixir.Atom'). --define(float, 'Elixir.Float'). --define(io, 'Elixir.IO'). --define(integer, 'Elixir.Integer'). -define(kernel, 'Elixir.Kernel'). --define(list, 'Elixir.List'). --define(map, 'Elixir.Map'). --define(node, 'Elixir.Node'). --define(process, 'Elixir.Process'). --define(string, 'Elixir.String'). --define(system, 'Elixir.System'). --define(tuple, 'Elixir.Tuple'). default_functions() -> [{?kernel, elixir_imported_functions()}]. @@ -30,17 +18,18 @@ default_macros() -> default_requires() -> ['Elixir.Kernel', 'Elixir.Kernel.Typespec']. +%% This is used by elixir_quote. Note we don't record the +%% import locally because at that point there is no +%% ambiguity. find_import(Meta, Name, Arity, E) -> Tuple = {Name, Arity}, case find_dispatch(Meta, Tuple, [], E) of {function, Receiver} -> - elixir_lexical:record_import(Receiver, ?m(E, lexical_tracker)), - elixir_locals:record_import(Tuple, Receiver, ?m(E, module), ?m(E, function)), + elixir_lexical:record_import(Receiver, Name, Arity, ?key(E, function), ?line(Meta), ?key(E, lexical_tracker)), Receiver; {macro, Receiver} -> - elixir_lexical:record_import(Receiver, ?m(E, lexical_tracker)), - elixir_locals:record_import(Tuple, Receiver, ?m(E, module), ?m(E, function)), + elixir_lexical:record_import(Receiver, Name, Arity, nil, ?line(Meta), ?key(E, lexical_tracker)), Receiver; _ -> false @@ -52,8 +41,8 @@ import_function(Meta, Name, Arity, E) -> Tuple = {Name, Arity}, case find_dispatch(Meta, Tuple, [], E) of {function, Receiver} -> - elixir_lexical:record_import(Receiver, ?m(E, lexical_tracker)), - elixir_locals:record_import(Tuple, Receiver, ?m(E, module), ?m(E, function)), + elixir_lexical:record_import(Receiver, Name, Arity, ?key(E, function), ?line(Meta), ?key(E, lexical_tracker)), + elixir_locals:record_import(Tuple, Receiver, ?key(E, module), ?key(E, function)), remote_function(Meta, Receiver, Name, Arity, E); {macro, _Receiver} -> false; @@ -63,22 +52,24 @@ import_function(Meta, Name, Arity, E) -> case elixir_import:special_form(Name, Arity) of true -> false; false -> - elixir_locals:record_local(Tuple, ?m(E, module), ?m(E, function)), + elixir_locals:record_local(Tuple, ?key(E, module), ?key(E, function)), {local, Name, Arity} end end. require_function(Meta, Receiver, Name, Arity, E) -> - case is_element({Name, Arity}, get_optional_macros(Receiver)) of + Required = is_element(Receiver, ?key(E, requires)), + case is_element({Name, Arity}, get_macros(Receiver, Required)) of true -> false; - false -> remote_function(Meta, Receiver, Name, Arity, E) + false -> + elixir_lexical:record_remote(Receiver, ?key(E, function), ?key(E, lexical_tracker)), + remote_function(Meta, Receiver, Name, Arity, E) end. remote_function(Meta, Receiver, Name, Arity, E) -> check_deprecation(Meta, Receiver, Name, Arity, E), - elixir_lexical:record_remote(Receiver, ?m(E, lexical_tracker)), - case inline(Receiver, Name, Arity) of + case elixir_rewrite:inline(Receiver, Name, Arity) of {AR, AN} -> {remote, AR, AN, Arity}; false -> {remote, Receiver, Name, Arity} end. @@ -87,11 +78,11 @@ remote_function(Meta, Receiver, Name, Arity, E) -> dispatch_import(Meta, Name, Args, E, Callback) -> Arity = length(Args), - case expand_import(Meta, {Name, Arity}, Args, E, []) of + case expand_import(Meta, {Name, Arity}, Args, E, [], false) of {ok, Receiver, Quoted} -> expand_quoted(Meta, Receiver, Name, Arity, Quoted, E); {ok, Receiver, NewName, NewArgs} -> - elixir_exp:expand({{'.', [], [Receiver, NewName]}, Meta, NewArgs}, E); + elixir_expand:expand({{'.', [], [Receiver, NewName]}, Meta, NewArgs}, E); error -> Callback() end. @@ -99,9 +90,9 @@ dispatch_import(Meta, Name, Args, E, Callback) -> dispatch_require(Meta, Receiver, Name, Args, E, Callback) when is_atom(Receiver) -> Arity = length(Args), - case rewrite(Receiver, Name, Args, Arity) of - {ok, AR, AN, AA} -> - Callback(AR, AN, AA); + case elixir_rewrite:inline(Receiver, Name, Arity) of + {AR, AN} -> + Callback(AR, AN, Args); false -> case expand_require(Meta, Receiver, {Name, Arity}, Args, E) of {ok, Receiver, Quoted} -> expand_quoted(Meta, Receiver, Name, Arity, Quoted, E); @@ -114,57 +105,56 @@ dispatch_require(_Meta, Receiver, Name, Args, _E, Callback) -> %% Macros expansion -expand_import(Meta, {Name, Arity} = Tuple, Args, E, Extra) -> - Module = ?m(E, module), +expand_import(Meta, {Name, Arity} = Tuple, Args, E, Extra, External) -> + Module = ?key(E, module), + Function = ?key(E, function), Dispatch = find_dispatch(Meta, Tuple, Extra, E), - Function = ?m(E, function), - Local = (Function /= nil) andalso (Function /= Tuple) andalso - elixir_locals:macro_for(Module, Name, Arity), case Dispatch of - %% In case it is an import, we dispatch the import. {import, _} -> do_expand_import(Meta, Tuple, Args, Module, E, Dispatch); - - %% There is a local and an import. This is a conflict unless - %% the receiver is the same as module (happens on bootstrap). - {_, Receiver} when Local /= false, Receiver /= Module -> - Error = {macro_conflict, {Receiver, Name, Arity}}, - elixir_errors:form_error(Meta, ?m(E, file), ?MODULE, Error); - - %% There is no local. Dispatch the import. - _ when Local == false -> - do_expand_import(Meta, Tuple, Args, Module, E, Dispatch); - - %% Dispatch to the local. _ -> - elixir_locals:record_local(Tuple, Module, Function), - {ok, Module, expand_macro_fun(Meta, Local(), Module, Name, Args, E)} + AllowLocals = External orelse ((Function /= nil) andalso (Function /= Tuple)), + Local = AllowLocals andalso + elixir_def:local_for(Module, Name, Arity, [defmacro, defmacrop]), + + case Dispatch of + %% There is a local and an import. This is a conflict unless + %% the receiver is the same as module (happens on bootstrap). + {_, Receiver} when Local /= false, Receiver /= Module -> + Error = {macro_conflict, {Receiver, Name, Arity}}, + elixir_errors:form_error(Meta, ?key(E, file), ?MODULE, Error); + + %% There is no local. Dispatch the import. + _ when Local == false -> + do_expand_import(Meta, Tuple, Args, Module, E, Dispatch); + + %% Dispatch to the local. + _ -> + elixir_locals:record_local(Tuple, Module, Function), + {ok, Module, expand_macro_fun(Meta, Local, Module, Name, Args, E)} + end end. do_expand_import(Meta, {Name, Arity} = Tuple, Args, Module, E, Result) -> case Result of {function, Receiver} -> - elixir_lexical:record_import(Receiver, ?m(E, lexical_tracker)), - elixir_locals:record_import(Tuple, Receiver, Module, ?m(E, function)), - - case rewrite(Receiver, Name, Args, Arity) of - {ok, _, _, _} = Res -> Res; - false -> {ok, Receiver, Name, Args} - end; + elixir_lexical:record_import(Receiver, Name, Arity, ?key(E, function), ?line(Meta), ?key(E, lexical_tracker)), + elixir_locals:record_import(Tuple, Receiver, Module, ?key(E, function)), + {ok, Receiver, Name, Args}; {macro, Receiver} -> check_deprecation(Meta, Receiver, Name, Arity, E), - elixir_lexical:record_import(Receiver, ?m(E, lexical_tracker)), - elixir_locals:record_import(Tuple, Receiver, Module, ?m(E, function)), + elixir_lexical:record_import(Receiver, Name, Arity, nil, ?line(Meta), ?key(E, lexical_tracker)), + elixir_locals:record_import(Tuple, Receiver, Module, ?key(E, function)), {ok, Receiver, expand_macro_named(Meta, Receiver, Name, Arity, Args, E)}; {import, Receiver} -> - case expand_require([{require,false}|Meta], Receiver, Tuple, Args, E) of + case expand_require([{required, true} | Meta], Receiver, Tuple, Args, E) of {ok, _, _} = Response -> Response; error -> {ok, Receiver, Name, Args} end; false when Module == ?kernel -> - case rewrite(Module, Name, Args, Arity) of - {ok, _, _, _} = Res -> Res; + case elixir_rewrite:inline(Module, Name, Arity) of + {AR, AN} -> {ok, AR, AN, Args}; false -> error end; false -> @@ -173,19 +163,15 @@ do_expand_import(Meta, {Name, Arity} = Tuple, Args, Module, E, Result) -> expand_require(Meta, Receiver, {Name, Arity} = Tuple, Args, E) -> check_deprecation(Meta, Receiver, Name, Arity, E), - Module = ?m(E, module), + Required = (Receiver == ?key(E, module)) orelse is_element(Receiver, ?key(E, requires)) orelse required(Meta), - case is_element(Tuple, get_optional_macros(Receiver)) of + case is_element(Tuple, get_macros(Receiver, Required)) of + true when Required -> + elixir_lexical:record_remote(Receiver, Name, Arity, nil, ?line(Meta), ?key(E, lexical_tracker)), + {ok, Receiver, expand_macro_named(Meta, Receiver, Name, Arity, Args, E)}; true -> - Requires = ?m(E, requires), - case (Receiver == Module) orelse is_element(Receiver, Requires) orelse skip_require(Meta) of - true -> - elixir_lexical:record_remote(Receiver, ?m(E, lexical_tracker)), - {ok, Receiver, expand_macro_named(Meta, Receiver, Name, Arity, Args, E)}; - false -> - Info = {unrequired_module, {Receiver, Name, length(Args), Requires}}, - elixir_errors:form_error(Meta, ?m(E, file), ?MODULE, Info) - end; + Info = {unrequired_module, {Receiver, Name, length(Args), ?key(E, requires)}}, + elixir_errors:form_error(Meta, ?key(E, file), ?MODULE, Info); false -> error end. @@ -197,13 +183,14 @@ expand_macro_fun(Meta, Fun, Receiver, Name, Args, E) -> EArg = {Line, E}, try - apply(Fun, [EArg|Args]) + apply(Fun, [EArg | Args]) catch Kind:Reason -> + Stacktrace = erlang:get_stacktrace(), Arity = length(Args), MFA = {Receiver, elixir_utils:macro_name(Name), Arity+1}, Info = [{Receiver, Name, Arity, [{file, "expanding macro"}]}, caller(Line, E)], - erlang:raise(Kind, Reason, prune_stacktrace(erlang:get_stacktrace(), MFA, Info, EArg)) + erlang:raise(Kind, Reason, prune_stacktrace(Stacktrace, MFA, Info, {ok, EArg})) end. expand_macro_named(Meta, Receiver, Name, Arity, Args, E) -> @@ -214,42 +201,35 @@ expand_macro_named(Meta, Receiver, Name, Arity, Args, E) -> expand_quoted(Meta, Receiver, Name, Arity, Quoted, E) -> Line = ?line(Meta), - Next = elixir_counter:next(), + Next = erlang:unique_integer(), try - elixir_exp:expand( + elixir_expand:expand( elixir_quote:linify_with_context_counter(Line, {Receiver, Next}, Quoted), E) catch Kind:Reason -> + Stacktrace = erlang:get_stacktrace(), MFA = {Receiver, elixir_utils:macro_name(Name), Arity+1}, Info = [{Receiver, Name, Arity, [{file, "expanding macro"}]}, caller(Line, E)], - erlang:raise(Kind, Reason, prune_stacktrace(erlang:get_stacktrace(), MFA, Info, nil)) + erlang:raise(Kind, Reason, prune_stacktrace(Stacktrace, MFA, Info, error)) end. -caller(Line, #{module := nil} = E) -> - {elixir_compiler, '__FILE__', 2, location(Line, E)}; -caller(Line, #{module := Module, function := nil} = E) -> - {Module, '__MODULE__', 0, location(Line, E)}; -caller(Line, #{module := Module, function := {Name, Arity}} = E) -> - {Module, Name, Arity, location(Line, E)}. - -location(Line, E) -> - [{file, elixir_utils:characters_to_list(elixir_utils:relative_to_cwd(?m(E, file)))}, - {line, Line}]. +caller(Line, E) -> + elixir_utils:caller(Line, ?key(E, file), ?key(E, module), ?key(E, function)). %% Helpers -skip_require(Meta) -> - lists:keyfind(require, 1, Meta) == {require, false}. +required(Meta) -> + lists:keyfind(required, 1, Meta) == {required, true}. find_dispatch(Meta, Tuple, Extra, E) -> case is_import(Meta) of {import, _} = Import -> Import; false -> - Funs = ?m(E, functions), - Macs = Extra ++ ?m(E, macros), + Funs = ?key(E, functions), + Macs = Extra ++ ?key(E, macros), FunMatch = find_dispatch(Tuple, Funs), MacMatch = find_dispatch(Tuple, Macs), @@ -259,9 +239,9 @@ find_dispatch(Meta, Tuple, Extra, E) -> {[], []} -> false; _ -> {Name, Arity} = Tuple, - [First, Second|_] = FunMatch ++ MacMatch, + [First, Second | _] = FunMatch ++ MacMatch, Error = {ambiguous_call, {First, Second, Name, Arity}}, - elixir_errors:form_error(Meta, ?m(E, file), ?MODULE, Error) + elixir_errors:form_error(Meta, ?key(E, file), ?MODULE, Error) end end. @@ -273,24 +253,22 @@ is_import(Meta) -> {import, _} = Import -> case lists:keyfind(context, 1, Meta) of {context, _} -> Import; - false -> - false + false -> false end; - false -> - false + false -> false end. % %% We've reached the macro wrapper fun, skip it with the rest -prune_stacktrace([{_, _, [E|_], _}|_], _MFA, Info, E) -> +prune_stacktrace([{_, _, [E | _], _} | _], _MFA, Info, {ok, E}) -> Info; %% We've reached the invoked macro, skip it -prune_stacktrace([{M, F, A, _}|_], {M, F, A}, Info, _E) -> +prune_stacktrace([{M, F, A, _} | _], {M, F, A}, Info, _E) -> Info; %% We've reached the elixir_dispatch internals, skip it with the rest -prune_stacktrace([{Mod, _, _, _}|_], _MFA, Info, _E) when Mod == elixir_dispatch; Mod == elixir_exp -> +prune_stacktrace([{Mod, _, _, _} | _], _MFA, Info, _E) when Mod == elixir_dispatch; Mod == elixir_exp -> Info; -prune_stacktrace([H|T], MFA, Info, E) -> - [H|prune_stacktrace(T, MFA, Info, E)]; +prune_stacktrace([H | T], MFA, Info, E) -> + [H | prune_stacktrace(T, MFA, Info, E)]; prune_stacktrace([], _MFA, Info, _E) -> Info. @@ -311,9 +289,20 @@ format_error({ambiguous_call, {Mod1, Mod2, Name, Arity}}) -> %% INTROSPECTION %% Do not try to get macros from Erlang. Speeds up compilation a bit. -get_optional_macros(erlang) -> []; +get_macros(erlang, _) -> []; + +get_macros(Receiver, false) -> + case code:is_loaded(Receiver) of + {file, _} -> + try + Receiver:'__info__'(macros) + catch + error:undef -> [] + end; + false -> [] + end; -get_optional_macros(Receiver) -> +get_macros(Receiver, true) -> case code:ensure_loaded(Receiver) of {module, Receiver} -> try @@ -338,152 +327,6 @@ elixir_imported_macros() -> error:undef -> [] end. -rewrite(?atom, to_string, [Arg], _) -> - {ok, erlang, atom_to_binary, [Arg, utf8]}; -rewrite(?kernel, elem, [Tuple, Index], _) -> - {ok, erlang, element, [increment(Index), Tuple]}; -rewrite(?kernel, put_elem, [Tuple, Index, Value], _) -> - {ok, erlang, setelement, [increment(Index), Tuple, Value]}; -rewrite(?map, 'has_key?', [Map, Key], _) -> - {ok, maps, is_key, [Key, Map]}; -rewrite(?map, fetch, [Map, Key], _) -> - {ok, maps, find, [Key, Map]}; -rewrite(?map, put, [Map, Key, Value], _) -> - {ok, maps, put, [Key, Value, Map]}; -rewrite(?map, delete, [Map, Key], _) -> - {ok, maps, remove, [Key, Map]}; -rewrite(?process, monitor, [Arg], _) -> - {ok, erlang, monitor, [process, Arg]}; -rewrite(?string, to_atom, [Arg], _) -> - {ok, erlang, binary_to_atom, [Arg, utf8]}; -rewrite(?string, to_existing_atom, [Arg], _) -> - {ok, erlang, binary_to_existing_atom, [Arg, utf8]}; -rewrite(?tuple, insert_at, [Tuple, Index, Term], _) -> - {ok, erlang, insert_element, [increment(Index), Tuple, Term]}; -rewrite(?tuple, delete_at, [Tuple, Index], _) -> - {ok, erlang, delete_element, [increment(Index), Tuple]}; -rewrite(?tuple, duplicate, [Data, Size], _) -> - {ok, erlang, make_tuple, [Size, Data]}; - -rewrite(Receiver, Name, Args, Arity) -> - case inline(Receiver, Name, Arity) of - {AR, AN} -> {ok, AR, AN, Args}; - false -> false - end. - -increment(Number) when is_number(Number) -> - Number + 1; -increment(Other) -> - {{'.', [], [erlang, '+']}, [], [Other, 1]}. - -inline(?atom, to_char_list, 1) -> {erlang, atom_to_list}; -inline(?io, iodata_length, 1) -> {erlang, iolist_size}; -inline(?io, iodata_to_binary, 1) -> {erlang, iolist_to_binary}; -inline(?integer, to_string, 1) -> {erlang, integer_to_binary}; -inline(?integer, to_string, 2) -> {erlang, integer_to_binary}; -inline(?integer, to_char_list, 1) -> {erlang, integer_to_list}; -inline(?integer, to_char_list, 2) -> {erlang, integer_to_list}; -inline(?float, to_string, 1) -> {erlang, float_to_binary}; -inline(?float, to_char_list, 1) -> {erlang, float_to_list}; -inline(?list, to_atom, 1) -> {erlang, list_to_atom}; -inline(?list, to_existing_atom, 1) -> {erlang, list_to_existing_atom}; -inline(?list, to_float, 1) -> {erlang, list_to_float}; -inline(?list, to_integer, 1) -> {erlang, list_to_integer}; -inline(?list, to_integer, 2) -> {erlang, list_to_integer}; -inline(?list, to_tuple, 1) -> {erlang, list_to_tuple}; - -inline(?kernel, '+', 2) -> {erlang, '+'}; -inline(?kernel, '-', 2) -> {erlang, '-'}; -inline(?kernel, '+', 1) -> {erlang, '+'}; -inline(?kernel, '-', 1) -> {erlang, '-'}; -inline(?kernel, '*', 2) -> {erlang, '*'}; -inline(?kernel, '/', 2) -> {erlang, '/'}; -inline(?kernel, '++', 2) -> {erlang, '++'}; -inline(?kernel, '--', 2) -> {erlang, '--'}; -inline(?kernel, 'not', 1) -> {erlang, 'not'}; -inline(?kernel, '<', 2) -> {erlang, '<'}; -inline(?kernel, '>', 2) -> {erlang, '>'}; -inline(?kernel, '<=', 2) -> {erlang, '=<'}; -inline(?kernel, '>=', 2) -> {erlang, '>='}; -inline(?kernel, '==', 2) -> {erlang, '=='}; -inline(?kernel, '!=', 2) -> {erlang, '/='}; -inline(?kernel, '===', 2) -> {erlang, '=:='}; -inline(?kernel, '!==', 2) -> {erlang, '=/='}; -inline(?kernel, abs, 1) -> {erlang, abs}; -inline(?kernel, apply, 2) -> {erlang, apply}; -inline(?kernel, apply, 3) -> {erlang, apply}; -inline(?kernel, binary_part, 3) -> {erlang, binary_part}; -inline(?kernel, bit_size, 1) -> {erlang, bit_size}; -inline(?kernel, byte_size, 1) -> {erlang, byte_size}; -inline(?kernel, 'div', 2) -> {erlang, 'div'}; -inline(?kernel, exit, 1) -> {erlang, exit}; -inline(?kernel, hd, 1) -> {erlang, hd}; -inline(?kernel, is_atom, 1) -> {erlang, is_atom}; -inline(?kernel, is_binary, 1) -> {erlang, is_binary}; -inline(?kernel, is_bitstring, 1) -> {erlang, is_bitstring}; -inline(?kernel, is_boolean, 1) -> {erlang, is_boolean}; -inline(?kernel, is_float, 1) -> {erlang, is_float}; -inline(?kernel, is_function, 1) -> {erlang, is_function}; -inline(?kernel, is_function, 2) -> {erlang, is_function}; -inline(?kernel, is_integer, 1) -> {erlang, is_integer}; -inline(?kernel, is_list, 1) -> {erlang, is_list}; -inline(?kernel, is_map, 1) -> {erlang, is_map}; -inline(?kernel, is_number, 1) -> {erlang, is_number}; -inline(?kernel, is_pid, 1) -> {erlang, is_pid}; -inline(?kernel, is_port, 1) -> {erlang, is_port}; -inline(?kernel, is_reference, 1) -> {erlang, is_reference}; -inline(?kernel, is_tuple, 1) -> {erlang, is_tuple}; -inline(?kernel, length, 1) -> {erlang, length}; -inline(?kernel, make_ref, 0) -> {erlang, make_ref}; -inline(?kernel, map_size, 1) -> {erlang, map_size}; -inline(?kernel, max, 2) -> {erlang, max}; -inline(?kernel, min, 2) -> {erlang, min}; -inline(?kernel, node, 0) -> {erlang, node}; -inline(?kernel, node, 1) -> {erlang, node}; -inline(?kernel, 'rem', 2) -> {erlang, 'rem'}; -inline(?kernel, round, 1) -> {erlang, round}; -inline(?kernel, self, 0) -> {erlang, self}; -inline(?kernel, send, 2) -> {erlang, send}; -inline(?kernel, spawn, 1) -> {erlang, spawn}; -inline(?kernel, spawn, 3) -> {erlang, spawn}; -inline(?kernel, spawn_link, 1) -> {erlang, spawn_link}; -inline(?kernel, spawn_link, 3) -> {erlang, spawn_link}; -inline(?kernel, spawn_monitor, 1) -> {erlang, spawn_monitor}; -inline(?kernel, spawn_monitor, 3) -> {erlang, spawn_monitor}; -inline(?kernel, throw, 1) -> {erlang, throw}; -inline(?kernel, tl, 1) -> {erlang, tl}; -inline(?kernel, trunc, 1) -> {erlang, trunc}; -inline(?kernel, tuple_size, 1) -> {erlang, tuple_size}; - -inline(?map, keys, 1) -> {maps, keys}; -inline(?map, merge, 2) -> {maps, merge}; -inline(?map, size, 1) -> {maps, size}; -inline(?map, values, 1) -> {maps, values}; -inline(?map, to_list, 1) -> {maps, to_list}; - -inline(?node, spawn, 2) -> {erlang, spawn}; -inline(?node, spawn, 3) -> {erlang, spawn_opt}; -inline(?node, spawn, 4) -> {erlang, spawn}; -inline(?node, spawn, 5) -> {erlang, spawn_opt}; -inline(?node, spawn_link, 2) -> {erlang, spawn_link}; -inline(?node, spawn_link, 4) -> {erlang, spawn_link}; - -inline(?process, exit, 2) -> {erlang, exit}; -inline(?process, spawn, 2) -> {erlang, spawn_opt}; -inline(?process, spawn, 4) -> {erlang, spawn_opt}; -inline(?process, demonitor, 1) -> {erlang, demonitor}; -inline(?process, demonitor, 2) -> {erlang, demonitor}; -inline(?process, link, 1) -> {erlang, link}; -inline(?process, unlink, 1) -> {erlang, unlink}; - -inline(?string, to_float, 1) -> {erlang, binary_to_float}; -inline(?string, to_integer, 1) -> {erlang, binary_to_integer}; -inline(?string, to_integer, 2) -> {erlang, binary_to_integer}; -inline(?system, stacktrace, 0) -> {erlang, get_stacktrace}; -inline(?tuple, to_list, 1) -> {erlang, tuple_to_list}; - -inline(_, _, _) -> false. - check_deprecation(Meta, Receiver, Name, Arity, #{file := File}) -> case deprecation(Receiver, Name, Arity) of false -> ok; @@ -507,15 +350,73 @@ deprecation_message(Warning, Message) -> Message -> Warning ++ ", " ++ Message end. -deprecation('Elixir.Mix.Generator', 'from_file', _) -> - "instead pass [from_file: file] to embed_text/2 and embed_template/2 macros. " - "Note that [from_file: file] expects paths relative to the current working " - "directory and not to the current file"; -deprecation('Elixir.EEx.TransformerEngine', '__using__', _) -> - "check EEx.SmartEngine for how to build custom engines"; -deprecation('Elixir.EEx.AssignsEngine', '__using__', _) -> - "check EEx.SmartEngine for how to build custom engines"; -deprecation('Elixir.Kernel', 'xor', _) -> - true; %% Remember to remove xor operator from tokenizer +%% Modules +deprecation('Elixir.Dict', _, _) -> + "use the Map module for working with maps or the Keyword module for working with keyword lists"; +deprecation('Elixir.GenEvent', _, _) -> + "use one of the alternatives described in the documentation for the GenEvent module"; +deprecation('Elixir.HashDict', _, _) -> + "use maps and the Map module instead"; +deprecation('Elixir.HashSet', _, _) -> + "use the MapSet module instead"; +deprecation('Elixir.Set', _, _) -> + "use the MapSet module for working with sets"; + +%% Single functions +deprecation('Elixir.Atom', to_char_list, 1) -> + "use Atom.to_charlist/1"; +deprecation('Elixir.Enum', filter_map, 3) -> + "use Enum.filter/2 + Enum.map/2 or for comprehensions"; +deprecation('Elixir.Enum', uniq, 2) -> + "use Enum.uniq_by/2"; +deprecation('Elixir.Float', to_char_list, 1) -> + "use Float.to_charlist/1"; +deprecation('Elixir.Float', to_char_list, 2) -> + "use :erlang.float_to_list/2"; +deprecation('Elixir.Float', to_string, 2) -> + "use :erlang.float_to_binary/2"; +deprecation('Elixir.Integer', to_char_list, 1) -> + "use Integer.to_charlist/1"; +deprecation('Elixir.Integer', to_char_list, 2) -> + "use Integer.to_charlist/2"; +deprecation('Elixir.Kernel', to_char_list, 1) -> + "use Kernel.to_charlist/1"; +deprecation('Elixir.Keyword', size, 1) -> + "use Kernel.length/1"; +deprecation('Elixir.List.Chars', to_char_list, 1) -> + "use List.Chars.to_charlist/1"; +deprecation('Elixir.Map', size, 1) -> + "use Kernel.map_size/1"; +deprecation('Elixir.Stream', filter_map, 3) -> + "use Stream.filter/2 + Stream.map/2"; +deprecation('Elixir.Stream', uniq, 2) -> + "use Stream.uniq_by/2"; +deprecation('Elixir.String', ljust, 2) -> + "use String.pad_trailing/2"; +deprecation('Elixir.String', ljust, 3) -> + "use String.pad_trailing/3 with a binary padding"; +deprecation('Elixir.String', lstrip, 1) -> + "use String.trim_leading/1"; +deprecation('Elixir.String', lstrip, 2) -> + "use String.trim_leading/2 with a binary as second argument"; +deprecation('Elixir.String', rjust, 2) -> + "use String.pad_leading/2"; +deprecation('Elixir.String', rjust, 3) -> + "use String.pad_leading/3 with a binary padding"; +deprecation('Elixir.String', rstrip, 1) -> + "use String.trim_trailing/1"; +deprecation('Elixir.String', rstrip, 2) -> + "use String.trim_trailing/2 with a binary as second argument"; +deprecation('Elixir.String', strip, 1) -> + "use String.trim/1"; +deprecation('Elixir.String', strip, 2) -> + "use String.trim/2 with a binary second argument"; +deprecation('Elixir.String', to_char_list, 1) -> + "use String.to_charlist/1"; +deprecation('Elixir.String', 'valid_character?', 1) -> + "use String.valid?/1"; +deprecation('Elixir.Task', find, 2) -> + "match on the message directly"; + deprecation(_, _, _) -> false. diff --git a/lib/elixir/src/elixir_env.erl b/lib/elixir/src/elixir_env.erl index fc32e1b0599..477fc7fbf0d 100644 --- a/lib/elixir/src/elixir_env.erl +++ b/lib/elixir/src/elixir_env.erl @@ -9,29 +9,29 @@ new() -> file => <<"nofile">>, %% the current filename line => 1, %% the current line function => nil, %% the current function - context => nil, %% can be match_vars, guards or nil + context => nil, %% can be match, guard or nil requires => [], %% a set with modules required - aliases => [], %% an orddict with aliases by new -> old names + aliases => [], %% a list of aliases by new -> old names functions => [], %% a list with functions imported from module macros => [], %% a list with macros imported from module macro_aliases => [], %% keep aliases defined inside a macro context_modules => [], %% modules defined in the current context + lexical_tracker => nil, %% holds the lexical tracker PID vars => [], %% a set of defined variables export_vars => nil, %% a set of variables to be exported in some constructs - lexical_tracker => nil, %% holds the lexical tracker pid - local => nil}. %% the module to delegate local functions to + prematch_vars => nil}. %% a set of variables defined before the current match linify({Line, Env}) -> Env#{line := Line}. -env_to_scope(#{module := Module, file := File, function := Function, context := Context}) -> - #elixir_scope{module=Module, file=File, function=Function, context=Context}. +env_to_scope(#{file := File, context := Context}) -> + #elixir_erl{file=File, context=Context}. env_to_scope_with_vars(Env, Vars) -> - (env_to_scope(Env))#elixir_scope{ - vars=orddict:from_list(Vars), - counter=[{'_',length(Vars)}] - }. + Map = maps:from_list(Vars), + (env_to_scope(Env))#elixir_erl{ + vars=Map, counter=#{'_' => map_size(Map)} + }. %% SCOPE MERGING @@ -39,21 +39,21 @@ env_to_scope_with_vars(Env, Vars) -> %% with their variables merged. mergev(E1, E2) when is_list(E1) -> E2#{ - vars := merge_vars(E1, ?m(E2, vars)), - export_vars := merge_opt_vars(E1, ?m(E2, export_vars)) - }; + vars := merge_vars(E1, ?key(E2, vars)), + export_vars := merge_opt_vars(E1, ?key(E2, export_vars)) + }; mergev(E1, E2) -> E2#{ - vars := merge_vars(?m(E1, vars), ?m(E2, vars)), - export_vars := merge_opt_vars(?m(E1, export_vars), ?m(E2, export_vars)) - }. + vars := merge_vars(?key(E1, vars), ?key(E2, vars)), + export_vars := merge_opt_vars(?key(E1, export_vars), ?key(E2, export_vars)) + }. %% Receives two scopes and return the later scope %% keeping the variables from the first (imports %% and everything else are passed forward). mergea(E1, E2) -> - E2#{vars := ?m(E1, vars)}. + E2#{vars := ?key(E1, vars)}. merge_vars(V1, V2) -> ordsets:union(V1, V2). diff --git a/lib/elixir/src/elixir_erl.erl b/lib/elixir/src/elixir_erl.erl new file mode 100644 index 00000000000..48abf0de608 --- /dev/null +++ b/lib/elixir/src/elixir_erl.erl @@ -0,0 +1,485 @@ +%% Compiler backend to Erlang. +-module(elixir_erl). +-export([elixir_to_erl/1, definition_to_anonymous/6, compile/1, + get_ann/1, remote/4, add_beam_chunks/2, debug_info/4, + definition_scope/5]). +-include("elixir.hrl"). + +%% TODO: Remove extra chunk functionality when OTP 20+. + +add_beam_chunks(Bin, []) when is_binary(Bin) -> + Bin; +add_beam_chunks(Bin, NewChunks) when is_binary(Bin), is_list(NewChunks) -> + {ok, _, OldChunks} = beam_lib:all_chunks(Bin), + Chunks = [{binary_to_list(K), V} || {K, V} <- NewChunks] ++ OldChunks, + {ok, NewBin} = beam_lib:build_module(Chunks), + NewBin. + +%% debug_info callback + +debug_info(elixir_v1, _Module, none, _Opts) -> + {error, missing}; +debug_info(elixir_v1, _Module, {elixir_v1, Map, _Specs}, _Opts) -> + {ok, Map}; +debug_info(erlang_v1, _Module, {elixir_v1, Map, Specs}, _Opts) -> + {Prefix, Forms, _, _} = dynamic_form(Map), + {ok, Prefix ++ Specs ++ Forms}; +debug_info(core_v1, _Module, {elixir_v1, Map, Specs}, Opts) -> + {Prefix, Forms, _, _} = dynamic_form(Map), + #{compile_opts := CompileOpts} = Map, + + %% Do not rely on elixir_erl_compiler because we don't + %% warnings nor the other functionality provided there. + try compile:noenv_forms(Prefix ++ Specs ++ Forms, [core, return | CompileOpts] ++ Opts) of + {ok, _, Core, _} -> {ok, Core}; + _What -> {error, failed_conversion} + catch + error:_ -> {error, failed_conversion} + end; +debug_info(_, _, _, _) -> + {error, unknown_format}. + +%% Builds Erlang AST annotation. + +get_ann(Opts) when is_list(Opts) -> + get_ann(Opts, false, 0). + +get_ann([{generated, true} | T], _, Line) -> get_ann(T, true, Line); +get_ann([{line, Line} | T], Gen, _) -> get_ann(T, Gen, Line); +get_ann([_ | T], Gen, Line) -> get_ann(T, Gen, Line); +%% TODO: Remove next clause when we no longer support Erlang 18. +get_ann([], _, Line) when Line < 0 -> Line; +get_ann([], Gen, Line) -> erl_anno:set_generated(Gen, Line). + +%% Builds a remote call annotation. + +remote(Ann, Module, Function, Args) when is_atom(Module), is_atom(Function), is_list(Args) -> + {call, Ann, + {remote, Ann, {atom, Ann, Module}, {atom, Ann, Function}}, + Args + }. + +%% Converts an Elixir definition to an anonymous function. + +definition_to_anonymous(File, Module, {Name, Arity}, Kind, Meta, Clauses) -> + ErlClauses = [translate_clause(Kind, Name, Arity, Clause, File) || Clause <- Clauses], + Fun = {'fun', ?ann(Meta), {clauses, ErlClauses}}, + LocalHandler = fun(LocalName, LocalArgs) -> invoke_local(Module, LocalName, LocalArgs) end, + {value, Result, _Binding} = erl_eval:expr(Fun, [], {value, LocalHandler}), + Result. + +invoke_local(Module, RawName, Args) -> + %% If we have a macro, its arity in the table is + %% actually one less than in the function call + {Name, Arity} = case atom_to_list(RawName) of + "MACRO-" ++ Rest -> {list_to_atom(Rest), length(Args) - 1}; + _ -> {RawName, length(Args)} + end, + + case elixir_def:local_for(Module, Name, Arity, all) of + false -> + {current_stacktrace, [_ | T]} = erlang:process_info(self(), current_stacktrace), + erlang:raise(error, undef, [{Module, Name, Arity, []} | T]); + Fun -> + apply(Fun, Args) + end. + +%% Converts Elixir quoted literals to Erlang AST. + +elixir_to_erl(Tree) when is_tuple(Tree) -> + {tuple, 0, [elixir_to_erl(X) || X <- tuple_to_list(Tree)]}; +elixir_to_erl([]) -> + {nil, 0}; +elixir_to_erl(<<>>) -> + {bin, 0, []}; +elixir_to_erl(Tree) when is_list(Tree) -> + elixir_to_erl_cons1(Tree, []); +elixir_to_erl(Tree) when is_atom(Tree) -> + {atom, 0, Tree}; +elixir_to_erl(Tree) when is_integer(Tree) -> + {integer, 0, Tree}; +elixir_to_erl(Tree) when is_float(Tree) -> + {float, 0, Tree}; +elixir_to_erl(Tree) when is_binary(Tree) -> + %% Note that our binaries are UTF-8 encoded and we are converting + %% to a list using binary_to_list. The reason for this is that Erlang + %% considers a string in a binary to be encoded in latin1, so the bytes + %% are not changed in any fashion. + {bin, 0, [{bin_element, 0, {string, 0, binary_to_list(Tree)}, default, default}]}; +elixir_to_erl(Function) when is_function(Function) -> + case (erlang:fun_info(Function, type) == {type, external}) andalso + (erlang:fun_info(Function, env) == {env, []}) of + true -> + {module, Module} = erlang:fun_info(Function, module), + {name, Name} = erlang:fun_info(Function, name), + {arity, Arity} = erlang:fun_info(Function, arity), + + {'fun', 0, {function, + {atom, 0, Module}, + {atom, 0, Name}, + {integer, 0, Arity}}}; + false -> + error(badarg) + end; +elixir_to_erl(PidOrRef) when is_pid(PidOrRef); is_reference(PidOrRef) -> + elixir_erl:remote(0, erlang, binary_to_term, + [elixir_erl:elixir_to_erl(term_to_binary(PidOrRef))]); +elixir_to_erl(_Other) -> + error(badarg). + +elixir_to_erl_cons1([H | T], Acc) -> elixir_to_erl_cons1(T, [H | Acc]); +elixir_to_erl_cons1(Other, Acc) -> elixir_to_erl_cons2(Acc, elixir_to_erl(Other)). + +elixir_to_erl_cons2([H | T], Acc) -> + elixir_to_erl_cons2(T, {cons, 0, elixir_to_erl(H), Acc}); +elixir_to_erl_cons2([], Acc) -> + Acc. + +%% Returns a definition scope for translation. + +definition_scope(Meta, Kind, Name, Arity, File) -> + %% TODO: We only need to do this dance because some + %% warnings are raised in elixir_erl_pass. Once we remove + %% all warnings from the Erlang pass, we can remove the + %% file field from #elixir_erl and clean up the code. + case lists:keyfind(location, 1, Meta) of + {location, {F, _}} -> #elixir_erl{def = {Kind, Name, Arity}, file = F}; + false -> #elixir_erl{def = {Kind, Name, Arity}, file = File} + end. + +%% Compilation hook. + +compile(#{module := Module} = Map) -> + Data = elixir_module:data_table(Module), + {Prefix, Forms, Defmacro, Unreachable} = dynamic_form(Map), + Specs = + case elixir_compiler:get_opt(internal) of + true -> []; + false -> specs_form(Data, Defmacro, Unreachable, types_form(Data, [])) + end, + load_form(Map, Data, Prefix, Forms, Specs). + +% Definitions + +split_definition([{Tuple, def, Meta, Clauses} | T], File, Unreachable, + Def, Defmacro, Exports, Functions) -> + {_, _, N, A, _} = Function = translate_definition(def, Meta, File, Tuple, Clauses), + split_definition(T, File, Unreachable, [Tuple | Def], Defmacro, [{N, A} | Exports], + add_definition(Meta, Function, Functions)); + +split_definition([{Tuple, defp, Meta, Clauses} | T], File, Unreachable, + Def, Defmacro, Exports, Functions) -> + Function = translate_definition(defp, Meta, File, Tuple, Clauses), + case lists:member(Tuple, Unreachable) of + false -> + split_definition(T, File, Unreachable, Def, Defmacro, Exports, + add_definition(Meta, Function, Functions)); + true -> + split_definition(T, File, Unreachable, Def, Defmacro, Exports, Functions) + end; + +split_definition([{Tuple, defmacro, Meta, Clauses} | T], File, Unreachable, + Def, Defmacro, Exports, Functions) -> + {_, _, N, A, _} = Function = translate_definition(defmacro, Meta, File, Tuple, Clauses), + split_definition(T, File, Unreachable, Def, [Tuple | Defmacro], [{N, A} | Exports], + add_definition(Meta, Function, Functions)); + +split_definition([{_, defmacrop, _Meta, _Clauses} | T], File, Unreachable, + Def, Defmacro, Exports, Functions) -> + split_definition(T, File, Unreachable, Def, Defmacro, Exports, Functions); + +split_definition([], _File, _Unreachable, Def, Defmacro, Exports, {Head, Tail}) -> + {Def, Defmacro, Exports, Head ++ Tail}. + +add_definition(Meta, Body, {Head, Tail}) -> + case lists:keyfind(location, 1, Meta) of + {location, {F, L}} -> + %% Erlang's epp attempts to perform offsetting when generated is set to true + %% and that causes cover to fail when processing modules. Therefore we never + %% pass the generated annotation forward for file attributes. The function + %% will still be marked as generated though if that's the case. + FileMeta = erl_anno:set_generated(false, ?ann(Meta)), + Attr = {attribute, FileMeta, file, {elixir_utils:characters_to_list(F), L}}, + {Head, [Attr, Body | Tail]}; + false -> + {[Body | Head], Tail} + end. + +translate_definition(Kind, Meta, File, {Name, Arity}, Clauses) -> + ErlClauses = [translate_clause(Kind, Name, Arity, Clause, File) || Clause <- Clauses], + + case is_macro(Kind) of + true -> {function, ?ann(Meta), elixir_utils:macro_name(Name), Arity + 1, ErlClauses}; + false -> {function, ?ann(Meta), Name, Arity, ErlClauses} + end. + +translate_clause(Kind, Name, Arity, {Meta, Args, Guards, Body}, File) -> + S = definition_scope(Meta, Kind, Name, Arity, File), + + {TClause, TS} = elixir_erl_clauses:clause(Meta, + fun elixir_erl_pass:translate_args/2, Args, Body, Guards, S), + + case is_macro(Kind) of + true -> + Ann = ?ann(Meta), + FArgs = {var, Ann, '_@CALLER'}, + MClause = setelement(3, TClause, [FArgs | element(3, TClause)]), + + case TS#elixir_erl.caller of + true -> + FBody = {'match', Ann, + {'var', Ann, '__CALLER__'}, + elixir_erl:remote(Ann, elixir_env, linify, [{var, Ann, '_@CALLER'}]) + }, + setelement(5, MClause, [FBody | element(5, TClause)]); + false -> + MClause + end; + false -> + TClause + end. + +is_macro(defmacro) -> true; +is_macro(defmacrop) -> true; +is_macro(_) -> false. + +% Functions + +dynamic_form(#{module := Module, line := Line, file := File, attributes := Attributes, + definitions := Definitions, unreachable := Unreachable}) -> + {Def, Defmacro, Exports, Functions} = + split_definition(Definitions, File, Unreachable, [], [], [], {[], []}), + + Location = {elixir_utils:characters_to_list(elixir_utils:relative_to_cwd(File)), Line}, + Prefix = [{attribute, Line, file, Location}, + {attribute, Line, module, Module}, + {attribute, Line, compile, no_auto_import}], + + Forms0 = functions_form(Line, Module, Def, Defmacro, Exports, Functions), + Forms1 = attributes_form(Line, Attributes, Forms0), + {Prefix, Forms1, Defmacro, Unreachable}. + +functions_form(Line, Module, Def, Defmacro, Exports, Body) -> + {Spec, Info} = add_info_function(Line, Module, Def, Defmacro), + [{attribute, Line, export, lists:sort([{'__info__', 1} | Exports])}, Spec, Info | Body]. + +add_info_function(Line, Module, Def, Defmacro) -> + AllowedArgs = + lists:map(fun(Atom) -> {atom, Line, Atom} end, + [attributes, compile, exports, functions, macros, md5, module]), + + Spec = + {attribute, Line, spec, {{'__info__', 1}, + [{type, Line, 'fun', [ + {type, Line, product, [ + {type, Line, union, AllowedArgs} + ]}, + {type, Line, union, [ + {type, Line, atom, []}, + {type, Line, list, [ + {type, Line, union, [ + {type, Line, tuple, [ + {type, Line, atom, []}, + {type, Line, any, []} + ]}, + {type, Line, tuple, [ + {type, Line, atom, []}, + {type, Line, byte, []}, + {type, Line, integer, []} + ]} + ]} + ]} + ]} + ]}] + }}, + + Info = + {function, 0, '__info__', 1, [ + functions_info(Def), + macros_info(Defmacro), + others_info(Module) + ]}, + + {Spec, Info}. + +functions_info(Def) -> + {clause, 0, [{atom, 0, functions}], [], [elixir_erl:elixir_to_erl(lists:sort(Def))]}. + +macros_info(Defmacro) -> + {clause, 0, [{atom, 0, macros}], [], [elixir_erl:elixir_to_erl(lists:sort(Defmacro))]}. + +others_info(Module) -> + Info = {call, 0, + {remote, 0, {atom, 0, erlang}, {atom, 0, get_module_info}}, + [{atom, 0, Module}, {var, 0, info}]}, + {clause, 0, [{var, 0, info}], [], [Info]}. + +% Types + +types_form(Data, Forms) -> + ExTypes = + take_type_spec(Data, type) ++ take_type_spec(Data, typep) ++ take_type_spec(Data, opaque), + + Types = + ['Elixir.Kernel.Typespec':translate_type(Kind, Expr, Caller) || {Kind, Expr, Caller} <- ExTypes], + + Fun = fun + ({{Kind, NameArity, Expr}, Line, true}, Acc) -> + [{attribute, Line, export_type, [NameArity]}, {attribute, Line, Kind, Expr} | Acc]; + ({{Kind, _NameArity, Expr}, Line, false}, Acc) -> + [{attribute, Line, Kind, Expr} | Acc] + end, + + lists:foldl(Fun, Forms, Types). + +% Specs + +specs_form(Data, Defmacro, Unreachable, Forms) -> + Specs = + ['Elixir.Kernel.Typespec':translate_spec(Kind, Expr, Caller) || + {Kind, Expr, Caller} <- take_type_spec(Data, spec)], + + Callbacks = + ['Elixir.Kernel.Typespec':translate_spec(Kind, Expr, Caller) || + {Kind, Expr, Caller} <- take_type_spec(Data, callback)], + + Macrocallbacks = + ['Elixir.Kernel.Typespec':translate_spec(Kind, Expr, Caller) || + {Kind, Expr, Caller} <- take_type_spec(Data, macrocallback)], + + Optional = lists:flatten(take_type_spec(Data, optional_callbacks)), + SpecsForms = specs_form(spec, Specs, Unreachable, [], Defmacro, Forms), + specs_form(callback, Callbacks ++ Macrocallbacks, [], Optional, + [NameArity || {{_, NameArity, _}, _} <- Macrocallbacks], SpecsForms). + +specs_form(_Kind, [], _Unreacheable, _Optional, _Macros, Forms) -> + Forms; +specs_form(Kind, Entries, Unreachable, Optional, Macros, Forms) -> + Map = + lists:foldl(fun({{_, NameArity, Spec}, Line}, Acc) -> + case lists:member(NameArity, Unreachable) of + false -> + case Acc of + #{NameArity := List} -> Acc#{NameArity := [{Spec, Line} | List]}; + #{} -> Acc#{NameArity => [{Spec, Line}]} + end; + true -> + Acc + end + end, #{}, Entries), + + maps:fold(fun(NameArity, ExprsLines, Acc) -> + {Exprs, Lines} = lists:unzip(lists:reverse(ExprsLines)), + Line = lists:min(Lines), + + {Key, Value} = + case lists:member(NameArity, Macros) of + true -> + {Name, Arity} = NameArity, + {{elixir_utils:macro_name(Name), Arity + 1}, + lists:map(fun spec_for_macro/1, Exprs)}; + false -> + {NameArity, Exprs} + end, + + case lists:member(NameArity, Optional) of + true -> + [{attribute, Line, Kind, {Key, Value}}, + {attribute, Line, optional_callbacks, [Key]} | Acc]; + false -> + [{attribute, Line, Kind, {Key, Value}} | Acc] + end + end, Forms, Map). + +spec_for_macro({type, Line, 'fun', [{type, _, product, Args} | T]}) -> + NewArgs = [{type, Line, term, []} | Args], + {type, Line, 'fun', [{type, Line, product, NewArgs} | T]}; +spec_for_macro(Else) -> + Else. + +take_type_spec(Data, Key) -> + case ets:take(Data, Key) of + [{Key, Value, _, _}] -> Value; + [] -> [] + end. + +% Attributes + +attributes_form(Line, Attributes, Forms) -> + Fun = fun({Key, Value}, Acc) -> + [{attribute, Line, Key, Value} | Acc] + end, + lists:foldl(Fun, Forms, Attributes). + +% Loading forms + +load_form(#{line := Line, file := File, compile_opts := Opts} = Map, Data, Prefix, Forms, Specs) -> + {ExtraChunks, CompileOpts} = extra_chunks(Data, Line, debug_opts(Map, Specs, Opts)), + {_, Binary} = elixir_erl_compiler:forms(Prefix ++ Specs ++ Forms, File, CompileOpts), + add_beam_chunks(Binary, ExtraChunks). + +debug_opts(Map, Specs, Opts) -> + case {supports_debug_tuple(), include_debug_opts(Opts)} of + {true, true} -> [{debug_info, {?MODULE, {elixir_v1, Map, Specs}}}]; + {true, false} -> [{debug_info, {?MODULE, none}}]; + {false, true} -> [debug_info]; + {false, false} -> [] + end. + +include_debug_opts(Opts) -> + case proplists:get_value(debug_info, Opts) of + true -> true; + false -> false; + undefined -> elixir_compiler:get_opt(debug_info) + end. + +supports_debug_tuple() -> + case erlang:system_info(otp_release) of + "18" -> false; + "19" -> false; + _ -> true + end. + +extra_chunks(Data, Line, Opts) -> + Supported = supports_extra_chunks_option(), + case docs_chunk(Data, Line, elixir_compiler:get_opt(docs)) of + [] -> {[], Opts}; + Chunks when Supported -> {[], [{extra_chunks, Chunks} | Opts]}; + Chunks -> {Chunks, Opts} + end. + +supports_extra_chunks_option() -> + case erlang:system_info(otp_release) of + "18" -> false; + "19" -> false; + _ -> true + end. + +docs_chunk(Data, Line, true) -> + ChunkData = term_to_binary({elixir_docs_v1, [ + {docs, get_docs(Data)}, + {moduledoc, get_moduledoc(Line, Data)}, + {callback_docs, get_callback_docs(Data)}, + {type_docs, get_type_docs(Data)} + ]}, [compressed]), + [{<<"ExDc">>, ChunkData}]; +docs_chunk(_, _, _) -> + []. + +get_moduledoc(Line, Data) -> + case ets:lookup_element(Data, moduledoc, 2) of + nil -> {Line, nil}; + {DocLine, Doc} -> {DocLine, Doc} + end. + +get_docs(Data) -> + lists:usort(ets:select(Data, [{{{doc, '$1'}, '$2', '$3', '$4', '$5'}, + [], [{{'$1', '$2', '$3', '$4', '$5'}}]}])). + +get_callback_docs(Data) -> + lists:usort(ets:select(Data, [{{{callbackdoc, '$1'}, '$2', '$3', '$4'}, + [], [{{'$1', '$2', '$3', '$4'}}]}])). + +get_type_docs(Data) -> + lists:usort(ets:select(Data, [{{{typedoc, '$1'}, '$2', '$3', '$4'}, + [], [{{'$1', '$2', '$3', '$4'}}]}])). diff --git a/lib/elixir/src/elixir_erl_clauses.erl b/lib/elixir/src/elixir_erl_clauses.erl new file mode 100644 index 00000000000..86a6b085e60 --- /dev/null +++ b/lib/elixir/src/elixir_erl_clauses.erl @@ -0,0 +1,210 @@ +%% Handle code related to args, guard and -> matching for case, +%% fn, receive and friends. try is handled in elixir_erl_try. +-module(elixir_erl_clauses). +-export([match/3, clause/6, clauses/3, guards/3, get_clauses/3, get_clauses/4]). +-include("elixir.hrl"). + +%% Get clauses under the given key. + +get_clauses(Key, Keyword, As) -> + get_clauses(Key, Keyword, As, false). +get_clauses(Key, Keyword, As, AllowNil) -> + case lists:keyfind(Key, 1, Keyword) of + {Key, Clauses} when is_list(Clauses) -> + [{As, Meta, Left, Right} || {'->', Meta, [Left, Right]} <- Clauses]; + {Key, nil} when AllowNil -> + []; + false -> + [] + end. + +%% Translate matches + +match(Fun, Args, #elixir_erl{context=Context, match_vars=MatchVars, + backup_vars=BackupVars, vars=Vars} = S) when Context /= match -> + {Result, NewS} = match(Fun, Args, S#elixir_erl{context=match, + match_vars=#{}, backup_vars=Vars}), + {Result, NewS#elixir_erl{context=Context, + match_vars=MatchVars, backup_vars=BackupVars}}; +match(Fun, Args, S) -> Fun(Args, S). + +%% Translate clauses with args, guards and expressions + +clause(Meta, Fun, Args, Expr, Guards, S) when is_list(Meta) -> + {TArgs, SA} = match(Fun, Args, S#elixir_erl{extra_guards=[]}), + {TExpr, SE} = elixir_erl_pass:translate(Expr, + SA#elixir_erl{extra_guards=nil, export_vars=S#elixir_erl.export_vars}), + + Extra = SA#elixir_erl.extra_guards, + TGuards = guards(Guards, Extra, SA), + {{clause, ?ann(Meta), TArgs, TGuards, unblock(TExpr)}, SE}. + +% Translate/Extract guards from the given expression. + +guards(Guards, Extra, S) -> + SG = S#elixir_erl{context=guard, extra_guards=nil}, + + case Guards of + [] -> case Extra of [] -> []; _ -> [Extra] end; + _ -> [translate_guard(Guard, Extra, SG) || Guard <- Guards] + end. + +translate_guard(Guard, Extra, S) -> + [element(1, elixir_erl_pass:translate(Guard, S)) | Extra]. + +% Function for translating macros with match style like case and receive. + +clauses(Meta, Clauses, #elixir_erl{export_vars=CV} = S) -> + {TC, TS} = do_clauses(Meta, Clauses, S#elixir_erl{export_vars=#{}}), + {TC, TS#elixir_erl{export_vars=elixir_erl_var:merge_opt_vars(CV, TS#elixir_erl.export_vars)}}. + +do_clauses(_Meta, [], S) -> + {[], S}; + +do_clauses(Meta, DecoupledClauses, S) -> + % Transform tree just passing the variables counter forward + % and storing variables defined inside each clause. + Transformer = fun(X, {SAcc, VAcc}) -> + {TX, TS} = each_clause(X, SAcc), + {TX, {elixir_erl_var:mergec(S, TS), [TS#elixir_erl.export_vars | VAcc]}} + end, + + {TClauses, {TS, ReverseCV}} = + lists:mapfoldl(Transformer, {S, []}, DecoupledClauses), + + % Now get all the variables defined inside each clause + CV = lists:reverse(ReverseCV), + AllVars = lists:foldl(fun elixir_erl_var:merge_vars/2, #{}, CV), + + % Create a new scope that contains a list of all variables + % defined inside all the clauses. It returns this new scope and + % a list of tuples where the first element is the variable name, + % the second one is the new pointer to the variable and the third + % is the old pointer. + {FinalVars, FS} = lists:mapfoldl(fun({Key, Val}, Acc) -> + normalize_vars(Key, Val, Acc) + end, TS, maps:to_list(AllVars)), + + % Expand all clauses by adding a match operation at the end + % that defines variables missing in one clause to the others. + expand_clauses(?ann(Meta), TClauses, CV, FinalVars, [], FS). + +expand_clauses(Ann, [Clause | T], [ClauseVars | V], FinalVars, Acc, S) -> + case generate_match_vars(FinalVars, ClauseVars, [], []) of + {[], []} -> + expand_clauses(Ann, T, V, FinalVars, [Clause | Acc], S); + {Left, Right} -> + MatchExpr = generate_match(Ann, Left, Right), + ClauseExprs = element(5, Clause), + [Final | RawClauseExprs] = lists:reverse(ClauseExprs), + + % If the last sentence has a match clause, we need to assign its value + % in the variable list. If not, we insert the variable list before the + % final clause in order to keep it tail call optimized. + {FinalClauseExprs, FS} = case has_match_tuple(Final) of + true -> + case Final of + {match, _, {var, _, UserVarName} = UserVar, _} when UserVarName /= '_' -> + {[UserVar, MatchExpr, Final | RawClauseExprs], S}; + _ -> + {VarName, _, SS} = elixir_erl_var:build('_', S), + StorageVar = {var, Ann, VarName}, + StorageExpr = {match, Ann, StorageVar, Final}, + {[StorageVar, MatchExpr, StorageExpr | RawClauseExprs], SS} + end; + false -> + {[Final, MatchExpr | RawClauseExprs], S} + end, + + FinalClause = setelement(5, Clause, lists:reverse(FinalClauseExprs)), + expand_clauses(Ann, T, V, FinalVars, [FinalClause | Acc], FS) + end; + +expand_clauses(_Ann, [], [], _FinalVars, Acc, S) -> + {lists:reverse(Acc), S}. + +% Handle each key/value clause pair and translate them accordingly. + +each_clause({match, Meta, [Condition], Expr}, S) -> + {Arg, Guards} = elixir_utils:extract_guards(Condition), + clause(Meta, fun elixir_erl_pass:translate_args/2, [Arg], Expr, Guards, S); + +each_clause({expr, Meta, [Condition], Expr}, S) -> + {TCondition, SC} = elixir_erl_pass:translate(Condition, S), + {TExpr, SB} = elixir_erl_pass:translate(Expr, SC#elixir_erl{export_vars = S#elixir_erl.export_vars}), + {{clause, ?ann(Meta), [TCondition], [], unblock(TExpr)}, SB}. + +% Check if the given expression is a match tuple. +% This is a small optimization to allow us to change +% existing assignments instead of creating new ones every time. + +has_match_tuple({'receive', _, _, _, _}) -> + true; +has_match_tuple({'receive', _, _}) -> + true; +has_match_tuple({'case', _, _, _}) -> + true; +has_match_tuple({match, _, _, _}) -> + true; +has_match_tuple({'fun', _, {clauses, _}}) -> + false; +has_match_tuple(H) when is_tuple(H) -> + has_match_tuple(tuple_to_list(H)); +has_match_tuple(H) when is_list(H) -> + lists:any(fun has_match_tuple/1, H); +has_match_tuple(_) -> false. + +% Normalize the given var between clauses +% by picking one value as reference and retrieving +% its previous value. + +normalize_vars(Key, {Ref, Counter, _Safe}, + #elixir_erl{vars=Vars, export_vars=ClauseVars} = S) -> + Expr = + case maps:find(Key, Vars) of + {ok, {PrevRef, _, _}} -> + {var, 0, PrevRef}; + error -> + {atom, 0, nil} + end, + + %% TODO: For v2.0, we will never export unsafe vars but + %% we need to consider if we want to raise or a emit a warning. + %% Such a warning should be applied consistently to the language + %% (for example, case/try/receive/fn/etc). + Value = {Ref, Counter, false}, + + VS = S#elixir_erl{ + vars=maps:put(Key, Value, Vars), + export_vars=maps:put(Key, Value, ClauseVars) + }, + + {{Key, Value, Expr}, VS}. + +% Generate match vars by checking if they were updated +% or not and assigning the previous value. + +generate_match_vars([{Key, {Value, _, _}, Expr} | T], ClauseVars, Left, Right) -> + case maps:find(Key, ClauseVars) of + {ok, {Value, _, _}} -> + generate_match_vars(T, ClauseVars, Left, Right); + {ok, {Clause, _, _}} -> + generate_match_vars(T, ClauseVars, + [{var, 0, Value} | Left], + [{var, 0, Clause} | Right]); + error -> + generate_match_vars(T, ClauseVars, + [{var, 0, Value} | Left], [Expr | Right]) + end; + +generate_match_vars([], _ClauseVars, Left, Right) -> + {Left, Right}. + +generate_match(Ann, [Left], [Right]) -> + {match, Ann, Left, Right}; + +generate_match(Ann, LeftVars, RightVars) -> + {match, Ann, {tuple, Ann, LeftVars}, {tuple, Ann, RightVars}}. + +unblock({'block', _, Exprs}) -> Exprs; +unblock(Exprs) -> [Exprs]. diff --git a/lib/elixir/src/elixir_erl_compiler.erl b/lib/elixir/src/elixir_erl_compiler.erl new file mode 100644 index 00000000000..42ed3554bbd --- /dev/null +++ b/lib/elixir/src/elixir_erl_compiler.erl @@ -0,0 +1,166 @@ +-module(elixir_erl_compiler). +-export([forms/3, noenv_forms/3]). + +forms(Forms, File, Opts) -> + compile(fun compile:forms/2, Forms, File, Opts). + +noenv_forms(Forms, File, Opts) -> + compile(fun compile:noenv_forms/2, Forms, File, Opts). + +compile(Fun, Forms, File, Opts) when is_list(Forms), is_list(Opts), is_binary(File) -> + Source = elixir_utils:characters_to_list(File), + case Fun(Forms, [return, {source, Source} | Opts]) of + {ok, Module, Binary, Warnings} -> + format_warnings(Opts, Warnings), + {Module, Binary}; + {error, Errors, Warnings} -> + format_warnings(Opts, Warnings), + format_errors(Errors) + end. + +format_errors([]) -> + exit({nocompile, "compilation failed but no error was raised"}); +format_errors(Errors) -> + lists:foreach(fun ({File, Each}) -> + BinFile = elixir_utils:characters_to_binary(File), + lists:foreach(fun(Error) -> handle_file_error(BinFile, Error) end, Each) + end, Errors). + +format_warnings(Opts, Warnings) -> + NoWarnNoMatch = proplists:get_value(nowarn_nomatch, Opts, false), + lists:foreach(fun ({File, Each}) -> + BinFile = elixir_utils:characters_to_binary(File), + lists:foreach(fun(Warning) -> + handle_file_warning(NoWarnNoMatch, BinFile, Warning) + end, Each) + end, Warnings). + +%% Handle warnings from Erlang land + +%% Ignore nomatch warnings +handle_file_warning(true, _File, {_Line, sys_core_fold, nomatch_guard}) -> ok; +handle_file_warning(true, _File, {_Line, sys_core_fold, {nomatch_shadow, _}}) -> ok; + +%% Ignore always +handle_file_warning(_, _File, {_Line, sys_core_fold, useless_building}) -> ok; + +%% This is an Erlang bug, it considers {tuple, _}.call to always fail +handle_file_warning(_, _File, {_Line, v3_kernel, bad_call}) -> ok; + +%% We handle unused local warnings ourselves +handle_file_warning(_, _File, {_Line, erl_lint, {unused_function, _}}) -> ok; + +%% Ignore unused vars at "weird" lines (<= 0) +handle_file_warning(_, _File, {Line, erl_lint, {unused_var, _Var}}) when Line =< 0 -> ok; + +%% Ignore shadowed and exported vars as we guarantee no conflicts ourselves +handle_file_warning(_, _File, {_Line, erl_lint, {shadowed_var, _Var, _Where}}) -> ok; +handle_file_warning(_, _File, {_Line, erl_lint, {exported_var, _Var, _Where}}) -> ok; + +handle_file_warning(_, File, {Line, erl_lint, {undefined_behaviour, Module}}) -> + case elixir_compiler:get_opt(internal) of + true -> + ok; + false -> + elixir_errors:warn(Line, File, ["behaviour ", elixir_aliases:inspect(Module), " is undefined"]) + end; + +handle_file_warning(_, File, {Line, Module, Desc}) -> + Message = format_error(Module, Desc), + elixir_errors:warn(Line, File, Message). + +%% Handle warnings + +handle_file_error(File, {beam_validator, Rest}) -> + elixir_errors:form_error([{line, 0}], File, beam_validator, Rest); +handle_file_error(File, {Line, Module, Desc}) -> + Message = format_error(Module, Desc), + elixir_errors:compile_error([{line, Line}], File, Message). + +%% Custom formatting + +%% Normalize formatting of functions +format_error(erl_lint, {undefined_function, {F, A}}) -> + io_lib:format("undefined function ~ts/~B", [F, A]); + +%% Normalize formatting of specs +format_error(erl_lint, {spec_fun_undefined, {M, F, A}}) -> + io_lib:format("spec for undefined function ~ts.~ts/~B", [elixir_aliases:inspect(M), F, A]); + +%% TODO: Remove this clause when we depend only on Erlang 19. +format_error(erl_lint, {bittype_mismatch, Val1, Val2, Kind}) -> + Desc = "conflict in ~s specification for bit field: \"~p\" and \"~p\"", + io_lib:format(Desc, [Kind, Val1, Val2]); + +%% Make no_effect clauses pretty +format_error(sys_core_fold, {no_effect, {erlang, F, A}}) -> + {Fmt, Args} = case erl_internal:comp_op(F, A) of + true -> {"use of operator ~ts has no effect", [translate_comp_op(F)]}; + false -> + case erl_internal:bif(F, A) of + false -> {"the call to :erlang.~ts/~B has no effect", [F, A]}; + true -> {"the call to ~ts/~B has no effect", [F, A]} + end + end, + io_lib:format(Fmt, Args); + +%% Rewrite undefined behaviour to check for protocols +format_error(erl_lint, {undefined_behaviour_func, {Fun, Arity}, Module}) -> + {DefKind, Def, DefArity} = + case atom_to_list(Fun) of + "MACRO-" ++ Rest -> {macro, list_to_atom(Rest), Arity - 1}; + _ -> {function, Fun, Arity} + end, + + Kind = protocol_or_behaviour(Module), + Raw = "undefined ~ts ~ts ~ts/~B (for ~ts ~ts)", + io_lib:format(Raw, [Kind, DefKind, Def, DefArity, Kind, elixir_aliases:inspect(Module)]); + +%% Rewrite nomatch_guard to be more generic it can happen inside if, unless, etc +format_error(sys_core_fold, nomatch_guard) -> + "this check/guard will always yield the same result"; + +%% Properly format other unused vars +format_error(erl_lint, {unused_var, Var}) -> + ["variable \"", format_var(Var), "\" is unused"]; + +%% Properly format keys using inspect. +format_error(v3_core, {map_key_repeated, Key}) -> + io_lib:format("key ~ts will be overridden in map", ['Elixir.Kernel':inspect(Key)]); + +%% Handle literal eval failures +format_error(sys_core_fold, {eval_failure, Error}) -> + #{'__struct__' := Struct} = 'Elixir.Exception':normalize(error, Error), + ["this expression will fail with ", elixir_aliases:inspect(Struct)]; + +format_error([], Desc) -> + io_lib:format("~p", [Desc]); + +format_error(Module, Desc) -> + Module:format_error(Desc). + +%% Helpers + +format_var(Var) -> + lists:takewhile(fun(X) -> X /= $@ end, atom_to_list(Var)). + +protocol_or_behaviour(Module) -> + case is_protocol(Module) of + true -> protocol; + false -> behaviour + end. + +is_protocol(Module) -> + case code:ensure_loaded(Module) of + {module, _} -> + erlang:function_exported(Module, '__protocol__', 1) andalso + Module:'__protocol__'(module) == Module; + {error, _} -> + false + end. + +translate_comp_op('/=') -> '!='; +translate_comp_op('=<') -> '<='; +translate_comp_op('=:=') -> '==='; +translate_comp_op('=/=') -> '!=='; +translate_comp_op(Other) -> Other. diff --git a/lib/elixir/src/elixir_erl_for.erl b/lib/elixir/src/elixir_erl_for.erl new file mode 100644 index 00000000000..3aad6c36cce --- /dev/null +++ b/lib/elixir/src/elixir_erl_for.erl @@ -0,0 +1,285 @@ +-module(elixir_erl_for). +-export([translate/4]). +-include("elixir.hrl"). + +translate(Meta, Args, Return, S) -> + {AccName, _, SA} = elixir_erl_var:build('_', S), + {VarName, _, SV} = elixir_erl_var:build('_', SA), + + Ann = ?ann(Meta), + Acc = {var, Ann, AccName}, + Var = {var, Ann, VarName}, + + {Cases, [{do, Expr} | Opts]} = elixir_utils:split_last(Args), + + {TInto, SI} = + case lists:keyfind(into, 1, Opts) of + {into, Into} -> elixir_erl_pass:translate(Into, SV); + false when Return -> {{nil, Ann}, SV}; + false -> {false, SV} + end, + + {TCases, SC} = translate_gen(Meta, Cases, [], SI), + {TExpr, SE} = elixir_erl_pass:translate(wrap_expr(Expr, TInto), SC), + SF = elixir_erl_var:mergec(SI, SE), + + case comprehension_expr(TInto, TExpr) of + {inline, TIntoExpr} -> + {build_inline(Ann, TCases, TIntoExpr, TInto, Var, Acc, SE), SF}; + {into, TIntoExpr} -> + build_into(Ann, TCases, TIntoExpr, TInto, Var, Acc, SF) + end. + +%% In case we have no return, we wrap the expression +%% in a block that returns nil. +wrap_expr(Expr, false) -> {'__block__', [], [Expr, nil]}; +wrap_expr(Expr, _) -> Expr. + +translate_gen(ForMeta, [{'<-', Meta, [Left, Right]} | T], Acc, S) -> + {TLeft, TRight, TFilters, TT, TS} = translate_gen(Meta, Left, Right, T, S), + TAcc = [{enum, Meta, TLeft, TRight, TFilters} | Acc], + translate_gen(ForMeta, TT, TAcc, TS); +translate_gen(ForMeta, [{'<<>>', _, [{'<-', Meta, [Left, Right]}]} | T], Acc, S) -> + {TLeft, TRight, TFilters, TT, TS} = translate_gen(Meta, Left, Right, T, S), + TAcc = [{bin, Meta, TLeft, TRight, TFilters} | Acc], + translate_gen(ForMeta, TT, TAcc, TS); +translate_gen(_ForMeta, [], Acc, S) -> + {lists:reverse(Acc), S}. + +translate_gen(_Meta, Left, Right, T, S) -> + {TRight, SR} = elixir_erl_pass:translate(Right, S), + {LeftArgs, LeftGuards} = elixir_utils:extract_guards(Left), + {TLeft, SL} = elixir_erl_clauses:match(fun elixir_erl_pass:translate/2, LeftArgs, + SR#elixir_erl{extra=pin_guard, extra_guards=[]}), + + TLeftGuards = elixir_erl_clauses:guards(LeftGuards, [], SL), + ExtraGuards = [{nil, X} || X <- SL#elixir_erl.extra_guards], + SF = SL#elixir_erl{extra=S#elixir_erl.extra, extra_guards=nil}, + + {TT, {TFilters, TS}} = translate_filters(T, SF), + + %% The list of guards is kept in reverse order + Guards = TFilters ++ translate_guards(TLeftGuards) ++ ExtraGuards, + {TLeft, TRight, Guards, TT, TS}. + +translate_guards([]) -> + []; +translate_guards([[Guards]]) -> + [{nil, Guards}]; +translate_guards([[Left], [Right] | Rest]) -> + translate_guards([[{op, element(2, Left), 'orelse', Left, Right}] | Rest]). + +translate_filters(T, S) -> + {Filters, Rest} = collect_filters(T, []), + {Rest, lists:mapfoldr(fun translate_filter/2, S, Filters)}. + +translate_filter(Filter, S) -> + {TFilter, TS} = elixir_erl_pass:translate(Filter, S), + case elixir_utils:returns_boolean(Filter) of + true -> + {{nil, TFilter}, TS}; + false -> + {Name, _, VS} = elixir_erl_var:build('_', TS), + {{{var, 0, Name}, TFilter}, VS} + end. + +collect_filters([{'<-', _, [_, _]} | _] = T, Acc) -> + {Acc, T}; +collect_filters([{'<<>>', _, [{'<-', _, [_, _]}]} | _] = T, Acc) -> + {Acc, T}; +collect_filters([H | T], Acc) -> + collect_filters(T, [H | Acc]); +collect_filters([], Acc) -> + {Acc, []}. + +build_inline(Ann, Clauses, Expr, Into, _Var, Acc, S) -> + case lists:all(fun(Clause) -> element(1, Clause) == bin end, Clauses) of + true -> build_comprehension(Ann, Clauses, Expr, Into); + false -> build_reduce(Clauses, Expr, Into, Acc, S) + end. + +build_into(Ann, Clauses, Expr, {map, _, []} = Into, _Var, Acc, S) -> + {Key, SK} = build_var(Ann, S), + {Val, SV} = build_var(Ann, SK), + MapExpr = + {block, Ann, [ + {match, Ann, {tuple, Ann, [Key, Val]}, Expr}, + {call, Ann, {remote, Ann, {atom, Ann, maps}, {atom, Ann, put}}, [Key, Val, Acc]} + ]}, + {build_reduce_clause(Clauses, MapExpr, Into, Acc, SV), SV}; + +build_into(Ann, Clauses, Expr, Into, Fun, Acc, S) -> + {Kind, SK} = build_var(Ann, S), + {Reason, SR} = build_var(Ann, SK), + {Stack, ST} = build_var(Ann, SR), + {Done, SD} = build_var(Ann, ST), + + IntoExpr = {call, Ann, Fun, [Acc, pair(Ann, cont, Expr)]}, + MatchExpr = {match, Ann, + {tuple, Ann, [Acc, Fun]}, + elixir_erl:remote(Ann, 'Elixir.Collectable', into, [Into]) + }, + + TryExpr = + {'try', Ann, + [build_reduce_clause(Clauses, IntoExpr, Acc, Acc, SD)], + [{clause, Ann, + [Done], + [], + [{call, Ann, Fun, [Done, {atom, Ann, done}]}]}], + [{clause, Ann, + [{tuple, Ann, [Kind, Reason, {var, Ann, '_'}]}], + [], + [{match, Ann, Stack, elixir_erl:remote(Ann, erlang, get_stacktrace, [])}, + {call, Ann, Fun, [Acc, {atom, Ann, halt}]}, + elixir_erl:remote(Ann, erlang, raise, [Kind, Reason, Stack])]}], + []}, + + {{block, Ann, [MatchExpr, TryExpr]}, SD}. + +%% Helpers + +build_reduce(Clauses, Expr, false, Acc, S) -> + build_reduce_clause(Clauses, Expr, {nil, 0}, Acc, S); +build_reduce(Clauses, Expr, {nil, Ann} = Into, Acc, S) -> + ListExpr = {cons, Ann, Expr, Acc}, + elixir_erl:remote(Ann, lists, reverse, + [build_reduce_clause(Clauses, ListExpr, Into, Acc, S)]); +build_reduce(Clauses, Expr, {bin, _, _} = Into, Acc, S) -> + {bin, Ann, Elements} = Expr, + BinExpr = {bin, Ann, [{bin_element, Ann, Acc, default, [bitstring]} | Elements]}, + build_reduce_clause(Clauses, BinExpr, Into, Acc, S). + +build_reduce_clause([{enum, Meta, Left, Right, Filters} | T], Expr, Arg, Acc, S) -> + Ann = ?ann(Meta), + True = build_reduce_clause(T, Expr, Acc, Acc, S), + False = Acc, + Generated = erl_anno:set_generated(true, Ann), + + Clauses0 = + case is_var(Left) of + true -> []; + false -> + [{clause, Generated, + [{var, Ann, '_'}, Acc], [], + [False]}] + end, + + Clauses1 = + [{clause, Ann, + [Left, Acc], [], + [join_filters(Generated, Filters, True, False)]} | Clauses0], + + Args = [Right, Arg, {'fun', Ann, {clauses, Clauses1}}], + elixir_erl:remote(Ann, 'Elixir.Enum', reduce, Args); + +build_reduce_clause([{bin, Meta, Left, Right, Filters} | T], Expr, Arg, Acc, S) -> + Ann = ?ann(Meta), + Generated = erl_anno:set_generated(true, Ann), + {Tail, ST} = build_var(Ann, S), + {Fun, SF} = build_var(Ann, ST), + + True = build_reduce_clause(T, Expr, Acc, Acc, SF), + False = Acc, + + {bin, _, Elements} = Left, + + BinMatch = + {bin, Ann, Elements ++ [{bin_element, Ann, Tail, default, [bitstring]}]}, + NoVarMatch = + {bin, Ann, no_var(Elements) ++ [{bin_element, Ann, Tail, default, [bitstring]}]}, + + Clauses = + [{clause, Ann, + [BinMatch, Acc], [], + [{call, Ann, Fun, [Tail, join_filters(Generated, Filters, True, False)]}]}, + {clause, Generated, + [NoVarMatch, Acc], [], + [{call, Ann, Fun, [Tail, False]}]}, + {clause, Generated, + [{bin, Ann, []}, Acc], [], + [Acc]}, + {clause, Generated, + [Tail, {var, Ann, '_'}], [], + [elixir_erl:remote(Ann, erlang, error, [pair(Ann, badarg, Tail)])]}], + + {call, Ann, + {named_fun, Ann, element(3, Fun), Clauses}, + [Right, Arg]}; + +build_reduce_clause([], Expr, _Arg, _Acc, _S) -> + Expr. + +is_var({var, _, _}) -> true; +is_var(_) -> false. + +pair(Ann, Atom, Arg) -> + {tuple, Ann, [{atom, Ann, Atom}, Arg]}. + +build_var(Ann, S) -> + {Name, _, ST} = elixir_erl_var:build('_', S), + {{var, Ann, Name}, ST}. + +no_var(Elements) -> + [{bin_element, Ann, no_var_expr(Expr), Size, Types} || + {bin_element, Ann, Expr, Size, Types} <- Elements]. +no_var_expr({var, Ann, _}) -> + {var, Ann, '_'}. + +build_comprehension(Ann, Clauses, Expr, false) -> + {lc, Ann, Expr, comprehension_clause(Clauses)}; +build_comprehension(Ann, Clauses, Expr, Into) -> + {comprehension_kind(Into), Ann, Expr, comprehension_clause(Clauses)}. + +comprehension_clause([{Kind, Meta, Left, Right, Filters} | T]) -> + Ann = ?ann(Meta), + [{comprehension_generator(Kind), Ann, Left, Right}] ++ + comprehension_filter(Ann, Filters) ++ + comprehension_clause(T); +comprehension_clause([]) -> + []. + +comprehension_kind({nil, _}) -> lc; +comprehension_kind({bin, _, []}) -> bc. + +comprehension_generator(enum) -> generate; +comprehension_generator(bin) -> b_generate. + +comprehension_expr({bin, _, []}, {bin, _, _} = Expr) -> + {inline, Expr}; +comprehension_expr({bin, Ann, []}, Expr) -> + BinExpr = {bin, Ann, [{bin_element, Ann, Expr, default, [bitstring]}]}, + {inline, BinExpr}; +comprehension_expr({nil, _}, Expr) -> + {inline, Expr}; +comprehension_expr(false, Expr) -> + {inline, Expr}; +comprehension_expr(_, Expr) -> + {into, Expr}. + +comprehension_filter(Ann, Filters) -> + [join_filter(Ann, Filter, {atom, Ann, true}, {atom, Ann, false}) || + Filter <- lists:reverse(Filters)]. + +join_filters(_Ann, [], True, _False) -> + True; +join_filters(Ann, [H | T], True, False) -> + lists:foldl(fun(Filter, Acc) -> + join_filter(Ann, Filter, Acc, False) + end, join_filter(Ann, H, True, False), T). + +join_filter(Ann, {nil, Filter}, True, False) -> + {'case', Ann, Filter, [ + {clause, Ann, [{atom, Ann, true}], [], [True]}, + {clause, Ann, [{atom, Ann, false}], [], [False]} + ]}; +join_filter(Ann, {Var, Filter}, True, False) -> + Guard = + {op, Ann, 'orelse', + {op, Ann, '==', Var, {atom, Ann, false}}, + {op, Ann, '==', Var, {atom, Ann, nil}}}, + + {'case', Ann, Filter, [ + {clause, Ann, [Var], [[Guard]], [False]}, + {clause, Ann, [{var, Ann, '_'}], [], [True]} + ]}. diff --git a/lib/elixir/src/elixir_erl_pass.erl b/lib/elixir/src/elixir_erl_pass.erl new file mode 100644 index 00000000000..c8dd06aee83 --- /dev/null +++ b/lib/elixir/src/elixir_erl_pass.erl @@ -0,0 +1,509 @@ +%% Translate Elixir quoted expressions to Erlang Abstract Format. +-module(elixir_erl_pass). +-export([translate/2, translate_arg/3, translate_args/2]). +-import(elixir_erl_var, [mergev/2, mergec/2]). +-include("elixir.hrl"). + +%% = + +translate({'=', Meta, [{'_', _, Atom}, Right]}, S) when is_atom(Atom) -> + {TRight, SR} = translate(Right, S), + {{match, ?ann(Meta), {var, ?ann(Meta), '_'}, TRight}, SR}; + +translate({'=', Meta, [Left, Right]}, S) -> + {TRight, SR} = translate(Right, S), + {TLeft, SL} = elixir_erl_clauses:match(fun translate/2, Left, SR), + {{match, ?ann(Meta), TLeft, TRight}, SL}; + +%% Containers + +translate({'{}', Meta, Args}, S) when is_list(Args) -> + {TArgs, SE} = translate_args(Args, S), + {{tuple, ?ann(Meta), TArgs}, SE}; + +translate({'%{}', Meta, Args}, S) when is_list(Args) -> + translate_map(Meta, Args, S); + +translate({'%', Meta, [Left, Right]}, S) -> + translate_struct(Meta, Left, Right, S); + +translate({'<<>>', Meta, Args}, S) when is_list(Args) -> + translate_bitstring(Meta, Args, S); + +%% Blocks + +translate({'__block__', Meta, Args}, S) when is_list(Args) -> + {TArgs, SA} = translate_block(Args, [], S), + {{block, ?ann(Meta), TArgs}, SA}; + +%% Compilation environment macros + +translate({'__CALLER__', Meta, Atom}, S) when is_atom(Atom) -> + {{var, ?ann(Meta), '__CALLER__'}, S#elixir_erl{caller=true}}; + +translate({'super', Meta, Args}, #elixir_erl{def={Kind, Name, _}} = S) -> + %% In the expanded AST, super is used to invoke a function + %% with the same name but possibly different arity. + {TArgs, SA} = translate_args(Args, S), + Ann = ?ann(Meta), + if + Kind == defmacro; Kind == defmacrop -> + MacroName = elixir_utils:macro_name(Name), + {{call, Ann, {atom, Ann, MacroName}, [{var, Ann, '_@CALLER'} | TArgs]}, SA}; + Kind == def; Kind == defp -> + {{call, Ann, {atom, Ann, Name}, TArgs}, SA} + end; + +%% Functions + +translate({'&', Meta, [{'/', _, [{{'.', _, [Remote, Fun]}, _, []}, Arity]}]}, S) + when is_atom(Fun), is_integer(Arity) -> + {TRemote, SR} = translate(Remote, S), + Ann = ?ann(Meta), + TFun = {atom, Ann, Fun}, + TArity = {integer, Ann, Arity}, + {{'fun', Ann, {function, TRemote, TFun, TArity}}, SR}; +translate({'&', Meta, [{'/', _, [{Fun, _, Atom}, Arity]}]}, S) + when is_atom(Fun), is_atom(Atom), is_integer(Arity) -> + {{'fun', ?ann(Meta), {function, Fun, Arity}}, S}; + +translate({fn, Meta, Clauses}, S) -> + Transformer = fun({'->', CMeta, [ArgsWithGuards, Expr]}, Acc) -> + {Args, Guards} = elixir_utils:extract_splat_guards(ArgsWithGuards), + {TClause, TS } = elixir_erl_clauses:clause(CMeta, fun translate_fn_match/2, + Args, Expr, Guards, Acc), + {TClause, elixir_erl_var:mergec(S, TS)} + end, + {TClauses, NS} = lists:mapfoldl(Transformer, S, Clauses), + {{'fun', ?ann(Meta), {clauses, TClauses}}, NS}; + +%% Cond + +translate({'cond', CondMeta, [[{do, Clauses}]]}, S) -> + [{'->', Meta, [[Condition], Body]} = H | T] = lists:reverse(Clauses), + + Case = + case Condition of + X when is_atom(X) and (X /= false) and (X /= nil) -> + build_cond_clauses(T, Body, Meta); + _ -> + Error = {{'.', Meta, [erlang, error]}, [], [cond_clause]}, + build_cond_clauses([H | T], Error, Meta) + end, + translate(replace_case_meta(CondMeta, Case), S); + +%% Case + +translate({'case', Meta, [Expr, Opts]}, S) -> + ShouldExportVars = proplists:get_value(export_vars, Meta, true), + translate_case(ShouldExportVars, Meta, Expr, Opts, S); + +%% Try + +translate({'try', Meta, [Opts]}, S) -> + SN = S#elixir_erl{extra=nil}, + Do = proplists:get_value('do', Opts, nil), + {TDo, SB} = translate(Do, SN), + + Catch = [Tuple || {X, _} = Tuple <- Opts, X == 'rescue' orelse X == 'catch'], + {TCatch, SC} = elixir_erl_try:clauses(Meta, Catch, mergec(SN, SB)), + + {TAfter, SA} = case lists:keyfind('after', 1, Opts) of + {'after', After} -> + {TBlock, SAExtracted} = translate(After, mergec(SN, SC)), + {unblock(TBlock), SAExtracted}; + false -> + {[], mergec(SN, SC)} + end, + + Else = elixir_erl_clauses:get_clauses(else, Opts, match), + {TElse, SE} = elixir_erl_clauses:clauses(Meta, Else, mergec(SN, SA)), + {{'try', ?ann(Meta), unblock(TDo), TElse, TCatch, TAfter}, mergec(S, SE)}; + +%% Receive + +translate({'receive', Meta, [Opts]}, S) -> + Do = elixir_erl_clauses:get_clauses(do, Opts, match, true), + + case lists:keyfind('after', 1, Opts) of + false -> + {TClauses, SC} = elixir_erl_clauses:clauses(Meta, Do, S), + {{'receive', ?ann(Meta), TClauses}, SC}; + _ -> + After = elixir_erl_clauses:get_clauses('after', Opts, expr), + {TClauses, SC} = elixir_erl_clauses:clauses(Meta, Do ++ After, S), + {FClauses, TAfter} = elixir_utils:split_last(TClauses), + {_, _, [FExpr], _, FAfter} = TAfter, + {{'receive', ?ann(Meta), FClauses, FExpr, FAfter}, SC} + end; + +%% Comprehensions + +translate({for, Meta, [_ | _] = Args}, S) -> + elixir_erl_for:translate(Meta, Args, true, S); + +%% Variables + +translate({'^', Meta, [{Name, VarMeta, Kind}]}, #elixir_erl{context=match, file=File} = S) when is_atom(Name), is_atom(Kind) -> + Tuple = {Name, var_kind(VarMeta, Kind)}, + {ok, {Value, _Counter, Safe}} = maps:find(Tuple, S#elixir_erl.backup_vars), + elixir_erl_var:warn_underscored_var_access(VarMeta, File, Name), + elixir_erl_var:warn_unsafe_var(VarMeta, File, Name, Safe), + + PAnn = ?ann(Meta), + PVar = {var, PAnn, Value}, + + case S#elixir_erl.extra of + pin_guard -> + {TVar, TS} = elixir_erl_var:translate(VarMeta, Name, var_kind(VarMeta, Kind), S), + Guard = {op, PAnn, '=:=', PVar, TVar}, + {TVar, TS#elixir_erl{extra_guards=[Guard | TS#elixir_erl.extra_guards]}}; + _ -> + {PVar, S} + end; + +translate({'_', Meta, Kind}, #elixir_erl{context=match} = S) when is_atom(Kind) -> + {{var, ?ann(Meta), '_'}, S}; + +translate({Name, Meta, Kind}, S) when is_atom(Name), is_atom(Kind) -> + elixir_erl_var:translate(Meta, Name, var_kind(Meta, Kind), S); + +%% Local calls + +translate({Name, Meta, Args}, S) when is_atom(Name), is_list(Meta), is_list(Args) -> + Ann = ?ann(Meta), + {TArgs, NS} = translate_args(Args, S), + {{call, Ann, {atom, Ann, Name}, TArgs}, NS}; + +%% Remote calls + +translate({{'.', _, [Left, Right]}, Meta, []}, S) + when is_tuple(Left), is_atom(Right), is_list(Meta) -> + {TLeft, SL} = translate(Left, S), + {Var, _, SV} = elixir_erl_var:build('_', SL), + + Ann = ?ann(Meta), + Generated = erl_anno:set_generated(true, Ann), + TRight = {atom, Ann, Right}, + TVar = {var, Ann, Var}, + TError = {tuple, Ann, [{atom, Ann, badkey}, TRight, TVar]}, + + {{'case', Generated, TLeft, [ + {clause, Generated, + [{map, Ann, [{map_field_exact, Ann, TRight, TVar}]}], + [], + [TVar]}, + {clause, Generated, + [TVar], + [[elixir_erl:remote(Generated, erlang, is_map, [TVar])]], + [elixir_erl:remote(Ann, erlang, error, [TError])]}, + {clause, Generated, + [TVar], + [], + [{call, Generated, {remote, Generated, TVar, TRight}, []}]} + ]}, SV}; + +translate({{'.', _, [Left, Right]}, Meta, Args}, S) + when (is_tuple(Left) orelse is_atom(Left)), is_atom(Right), is_list(Meta), is_list(Args) -> + translate_remote(Left, Right, Meta, Args, S); + +%% Anonymous function calls + +translate({{'.', _, [Expr]}, Meta, Args}, S) when is_list(Args) -> + {TExpr, SE} = translate(Expr, S), + {TArgs, SA} = translate_args(Args, mergec(S, SE)), + {{call, ?ann(Meta), TExpr, TArgs}, mergev(SE, SA)}; + +%% Literals + +translate(List, S) when is_list(List) -> + Fun = case S#elixir_erl.context of + match -> fun translate/2; + _ -> fun(X, Acc) -> translate_arg(X, Acc, S) end + end, + translate_list(List, Fun, S, []); + +translate({Left, Right}, S) -> + {TArgs, SE} = translate_args([Left, Right], S), + {{tuple, 0, TArgs}, SE}; + +translate(Other, S) -> + {elixir_erl:elixir_to_erl(Other), S}. + +%% Helpers + +translate_case(true, Meta, Expr, Opts, S) -> + Clauses = elixir_erl_clauses:get_clauses(do, Opts, match), + {TExpr, SE} = translate(Expr, S), + {TClauses, SC} = elixir_erl_clauses:clauses(Meta, Clauses, SE#elixir_erl{extra=nil}), + {{'case', ?ann(Meta), TExpr, TClauses}, SC#elixir_erl{extra=SE#elixir_erl.extra}}; +translate_case(false, Meta, Expr, Opts, S) -> + {Case, SC} = translate_case(true, Meta, Expr, Opts, S#elixir_erl{extra=nil}), + {Case, elixir_erl_var:mergec(S, SC)}. + +translate_list([{'|', _, [_, _]=Args}], Fun, Acc, List) -> + {[TLeft, TRight], TAcc} = lists:mapfoldl(Fun, Acc, Args), + {build_list([TLeft | List], TRight), TAcc}; +translate_list([H | T], Fun, Acc, List) -> + {TH, TAcc} = Fun(H, Acc), + translate_list(T, Fun, TAcc, [TH | List]); +translate_list([], _Fun, Acc, List) -> + {build_list(List, {nil, 0}), Acc}. + +build_list([H | T], Acc) -> + build_list(T, {cons, 0, H, Acc}); +build_list([], Acc) -> + Acc. + +var_kind(Meta, Kind) -> + case lists:keyfind(counter, 1, Meta) of + {counter, Counter} -> Counter; + false -> Kind + end. + +%% Pack a list of expressions from a block. +unblock({'block', _, Exprs}) -> Exprs; +unblock(Expr) -> [Expr]. + +translate_fn_match(Arg, S) -> + {TArg, TS} = translate_args(Arg, S#elixir_erl{extra=pin_guard}), + {TArg, TS#elixir_erl{extra=S#elixir_erl.extra}}. + +%% Translate args + +translate_arg(Arg, Acc, S) when is_number(Arg); is_atom(Arg); is_binary(Arg); is_pid(Arg); is_function(Arg) -> + {TArg, _} = translate(Arg, S), + {TArg, Acc}; +translate_arg(Arg, Acc, S) -> + {TArg, TAcc} = translate(Arg, mergec(S, Acc)), + {TArg, mergev(Acc, TAcc)}. + +translate_args(Args, #elixir_erl{context=match} = S) -> + lists:mapfoldl(fun translate/2, S, Args); + +translate_args(Args, S) -> + lists:mapfoldl(fun(X, Acc) -> translate_arg(X, Acc, S) end, S, Args). + +%% Translate blocks + +translate_block([], Acc, S) -> + {lists:reverse(Acc), S}; +translate_block([H], Acc, S) -> + {TH, TS} = translate(H, S), + translate_block([], [TH | Acc], TS); +translate_block([{'__block__', _Meta, Args} | T], Acc, S) when is_list(Args) -> + translate_block(Args ++ T, Acc, S); +translate_block([{for, Meta, [_ | _] = Args} | T], Acc, S) -> + {TH, TS} = elixir_erl_for:translate(Meta, Args, false, S), + translate_block(T, [TH | Acc], TS); +translate_block([{'=', _, [{'_', _, Ctx}, {for, Meta, [_ | _] = Args}]} | T], Acc, S) when is_atom(Ctx) -> + {TH, TS} = elixir_erl_for:translate(Meta, Args, false, S), + translate_block(T, [TH | Acc], TS); +translate_block([H | T], Acc, S) -> + {TH, TS} = translate(H, S), + translate_block(T, [TH | Acc], TS). + +%% Cond + +build_cond_clauses([{'->', NewMeta, [[Condition], Body]} | T], Acc, OldMeta) -> + {NewCondition, Truthy, Other} = build_truthy_clause(NewMeta, Condition, Body), + Falsy = {'->', OldMeta, [[Other], Acc]}, + Case = {'case', NewMeta, [NewCondition, [{do, [Truthy, Falsy]}]]}, + build_cond_clauses(T, Case, NewMeta); +build_cond_clauses([], Acc, _) -> + Acc. + +replace_case_meta(Meta, {'case', _, Args}) -> + {'case', Meta, Args}; +replace_case_meta(_Meta, Other) -> + Other. + +build_truthy_clause(Meta, Condition, Body) -> + case returns_boolean(Condition, Body) of + {NewCondition, NewBody} -> + {NewCondition, {'->', Meta, [[true], NewBody]}, false}; + false -> + Var = {'cond', [], ?var_context}, + Head = {'when', [], [Var, + {{'.', [], [erlang, 'andalso']}, [], [ + {{'.', [], [erlang, '/=']}, [], [Var, nil]}, + {{'.', [], [erlang, '/=']}, [], [Var, false]} + ]} + ]}, + {Condition, {'->', Meta, [[Head], Body]}, {'_', [], nil}} + end. + +%% In case a variable is defined to match in a condition +%% but a condition returns boolean, we can replace the +%% variable directly by the boolean result. +returns_boolean({'=', _, [{Var, _, Ctx}, Condition]}, {Var, _, Ctx}) when is_atom(Var), is_atom(Ctx) -> + case elixir_utils:returns_boolean(Condition) of + true -> {Condition, true}; + false -> false + end; + +%% For all other cases, we check the condition but +%% return both condition and body untouched. +returns_boolean(Condition, Body) -> + case elixir_utils:returns_boolean(Condition) of + true -> {Condition, Body}; + false -> false + end. + +%% Maps and structs + +translate_map(Meta, [{'|', _Meta, [Update, Assocs]}], S) -> + {TUpdate, US} = translate_arg(Update, S, S), + translate_map(Meta, Assocs, {ok, TUpdate}, US); +translate_map(Meta, Assocs, S) -> + translate_map(Meta, Assocs, none, S). + +translate_struct(Meta, Name, {'%{}', _, [{'|', _, [Update, Assocs]}]}, S) -> + Ann = ?ann(Meta), + Generated = erl_anno:set_generated(true, Ann), + {VarName, _, VS} = elixir_erl_var:build('_', S), + + Var = {var, Ann, VarName}, + Map = {map, Ann, [{map_field_exact, Ann, {atom, Ann, '__struct__'}, {atom, Ann, Name}}]}, + + Match = {match, Ann, Var, Map}, + Error = {tuple, Ann, [{atom, Ann, badstruct}, {atom, Ann, Name}, Var]}, + + {TUpdate, US} = translate_arg(Update, VS, VS), + {TAssocs, TS} = translate_map(Meta, Assocs, {ok, Var}, US), + + {{'case', Generated, TUpdate, [ + {clause, Ann, [Match], [], [TAssocs]}, + {clause, Generated, [Var], [], [elixir_erl:remote(Ann, erlang, error, [Error])]} + ]}, TS}; +translate_struct(Meta, Name, {'%{}', _, Assocs}, S) -> + translate_map(Meta, Assocs ++ [{'__struct__', Name}], none, S). + +translate_map(Meta, Assocs, TUpdate, #elixir_erl{extra=Extra} = S) -> + {Op, KeyFun, ValFun} = translate_key_val_op(TUpdate, S), + Ann = ?ann(Meta), + + {TArgs, SA} = lists:mapfoldl(fun({Key, Value}, Acc) -> + {TKey, Acc1} = KeyFun(Key, Acc), + {TValue, Acc2} = ValFun(Value, Acc1#elixir_erl{extra=Extra}), + {{Op, ?ann(Meta), TKey, TValue}, Acc2} + end, S, Assocs), + + build_map(Ann, TUpdate, TArgs, SA). + +translate_key_val_op(_TUpdate, #elixir_erl{extra=map_key}) -> + {map_field_assoc, + fun(X, Acc) -> translate(X, Acc#elixir_erl{extra=map_key}) end, + fun translate/2}; +translate_key_val_op(_TUpdate, #elixir_erl{context=match}) -> + {map_field_exact, + fun(X, Acc) -> translate(X, Acc#elixir_erl{extra=map_key}) end, + fun translate/2}; +translate_key_val_op(TUpdate, S) -> + KS = S#elixir_erl{extra=map_key}, + Op = if TUpdate == none -> map_field_assoc; true -> map_field_exact end, + {Op, + fun(X, Acc) -> translate_arg(X, Acc, KS) end, + fun(X, Acc) -> translate_arg(X, Acc, S) end}. + +build_map(Ann, {ok, TUpdate}, TArgs, SA) -> {{map, Ann, TUpdate, TArgs}, SA}; +build_map(Ann, none, TArgs, SA) -> {{map, Ann, TArgs}, SA}. + +%% Translate bitstrings + +translate_bitstring(Meta, Args, S) -> + case S#elixir_erl.context of + match -> build_bitstr(fun translate/2, Args, Meta, S, []); + _ -> build_bitstr(fun(X, Acc) -> translate_arg(X, Acc, S) end, Args, Meta, S, []) + end. + +build_bitstr(Fun, [{'::', _, [H, V]} | T], Meta, S, Acc) -> + {Size, Types} = extract_bit_info(V, S#elixir_erl{context=nil}), + build_bitstr(Fun, T, Meta, S, Acc, H, Size, Types); +build_bitstr(_Fun, [], Meta, S, Acc) -> + {{bin, ?ann(Meta), lists:reverse(Acc)}, S}. + +build_bitstr(Fun, T, Meta, S, Acc, H, default, Types) when is_binary(H) -> + Element = + case requires_utf_conversion(Types) of + false -> + %% See explanation in elixir_erl:elixir_to_erl/1 to + %% know why we can simply convert the binary to a list. + {bin_element, ?ann(Meta), {string, 0, binary_to_list(H)}, default, default}; + true -> + %% UTF types require conversion. + {bin_element, ?ann(Meta), {string, 0, elixir_utils:characters_to_list(H)}, default, Types} + end, + build_bitstr(Fun, T, Meta, S, [Element | Acc]); + +build_bitstr(Fun, T, Meta, S, Acc, H, Size, Types) -> + case Fun(H, S) of + {{bin, _, Elements}, NS} when S#elixir_erl.context == match -> + build_bitstr(Fun, T, Meta, NS, lists:reverse(Elements, Acc)); + {Expr, NS} -> + build_bitstr(Fun, T, Meta, NS, [{bin_element, ?ann(Meta), Expr, Size, Types} | Acc]) + end. + +requires_utf_conversion([bitstring | _]) -> false; +requires_utf_conversion([binary | _]) -> false; +requires_utf_conversion(_) -> true. + +extract_bit_info({'-', _, [L, {size, _, [Size]}]}, S) -> + {extract_bit_size(Size, S), extract_bit_type(L, [])}; +extract_bit_info({size, _, [Size]}, S) -> + {extract_bit_size(Size, S), []}; +extract_bit_info(L, _S) -> + {default, extract_bit_type(L, [])}. + +extract_bit_size(Size, S) -> + {TSize, _} = translate(Size, S), + TSize. + +extract_bit_type({'-', _, [L, R]}, Acc) -> + extract_bit_type(L, extract_bit_type(R, Acc)); +extract_bit_type({unit, _, [Arg]}, Acc) -> + [{unit, Arg} | Acc]; +extract_bit_type({Other, _, []}, Acc) -> + [Other | Acc]. + +%% Optimizations that are specific to Erlang and change +%% the format of the AST. + +translate_remote('Elixir.Access' = Mod, get, Meta, [Container, Value], S) -> + Ann = ?ann(Meta), + {TArgs, SA} = translate_args([Container, Value, nil], S), + {elixir_erl:remote(Ann, Mod, get, TArgs), SA}; +translate_remote('Elixir.String.Chars', to_string, Meta, [Arg], S) -> + {TArg, TS} = translate(Arg, S), + {VarName, _, VS} = elixir_erl_var:build(rewrite, TS), + + Generated = erl_anno:set_generated(true, ?ann(Meta)), + Var = {var, Generated, VarName}, + Guard = elixir_erl:remote(Generated, erlang, is_binary, [Var]), + Slow = elixir_erl:remote(Generated, 'Elixir.String.Chars', to_string, [Var]), + Fast = Var, + + {{'case', Generated, TArg, [ + {clause, Generated, [Var], [[Guard]], [Fast]}, + {clause, Generated, [Var], [], [Slow]} + ]}, VS}; +translate_remote(Left, Right, Meta, Args, S) -> + {TLeft, SL} = translate(Left, S), + {TArgs, SA} = translate_args(Args, mergec(S, SL)), + + Ann = ?ann(Meta), + Arity = length(Args), + TRight = {atom, Ann, Right}, + SC = mergev(SL, SA), + + %% Rewrite Erlang function calls as operators so they + %% work on guards, matches and so on. + case (Left == erlang) andalso elixir_utils:guard_op(Right, Arity) of + true -> + case TArgs of + [TOne] -> {{op, Ann, Right, TOne}, SC}; + [TOne, TTwo] -> {{op, Ann, Right, TOne, TTwo}, SC} + end; + false -> + {{call, Ann, {remote, Ann, TLeft, TRight}, TArgs}, SC} + end. diff --git a/lib/elixir/src/elixir_try.erl b/lib/elixir/src/elixir_erl_try.erl similarity index 57% rename from lib/elixir/src/elixir_try.erl rename to lib/elixir/src/elixir_erl_try.erl index 73729a70ec9..9f5ea78e33f 100644 --- a/lib/elixir/src/elixir_try.erl +++ b/lib/elixir/src/elixir_erl_try.erl @@ -1,78 +1,71 @@ --module(elixir_try). --export([clauses/4]). +-module(elixir_erl_try). +-export([clauses/3]). -include("elixir.hrl"). -clauses(_Meta, Clauses, Return, S) -> - Catch = elixir_clauses:get_pairs('catch', Clauses, 'catch'), - Rescue = elixir_clauses:get_pairs(rescue, Clauses, rescue), - reduce_clauses(Rescue ++ Catch, [], S, Return, S). +clauses(_Meta, Args, S) -> + Catch = elixir_erl_clauses:get_clauses('catch', Args, 'catch'), + Rescue = elixir_erl_clauses:get_clauses(rescue, Args, rescue), + reduce_clauses(Rescue ++ Catch, [], S, S). -reduce_clauses([H|T], Acc, SAcc, Return, S) -> - {TH, TS} = each_clause(H, Return, SAcc), - reduce_clauses(T, TH ++ Acc, elixir_scope:mergec(S, TS), Return, S); -reduce_clauses([], Acc, SAcc, _Return, _S) -> +reduce_clauses([H | T], Acc, SAcc, S) -> + {TH, TS} = each_clause(H, SAcc), + reduce_clauses(T, TH ++ Acc, elixir_erl_var:mergec(S, TS), S); +reduce_clauses([], Acc, SAcc, _S) -> {lists:reverse(Acc), SAcc}. -each_clause({'catch', Meta, Raw, Expr}, Return, S) -> - {Args, Guards} = elixir_clauses:extract_splat_guards(Raw), +each_clause({'catch', Meta, Raw, Expr}, S) -> + {Args, Guards} = elixir_utils:extract_splat_guards(Raw), Final = case Args of [X] -> [throw, X, {'_', Meta, nil}]; - [X,Y] -> [X, Y, {'_', Meta, nil}] + [X, Y] -> [X, Y, {'_', Meta, nil}] end, Condition = [{'{}', Meta, Final}], - {TC, TS} = elixir_clauses:clause(?line(Meta), fun elixir_translator:translate_args/2, - Condition, Expr, Guards, Return, S), + {TC, TS} = elixir_erl_clauses:clause(Meta, fun elixir_erl_pass:translate_args/2, + Condition, Expr, Guards, S), {[TC], TS}; -each_clause({rescue, Meta, [{in, _, [Left, Right]}], Expr}, Return, S) -> - {VarName, _, CS} = elixir_scope:build_var('_', S), +each_clause({rescue, Meta, [{in, _, [Left, Right]}], Expr}, S) -> + {VarName, _, CS} = elixir_erl_var:build('_', S), Var = {VarName, Meta, nil}, {Parts, Safe, FS} = rescue_guards(Meta, Var, Right, CS), - - Body = - case Left of - {'_', _, Atom} when is_atom(Atom) -> - Expr; - _ -> - Normalized = - case Safe of - true -> Var; - false -> {{'.', Meta, ['Elixir.Exception', normalize]}, Meta, [error, Var]} - end, - prepend_to_block(Meta, {'=', Meta, [Left, Normalized]}, Expr) + Body = rescue_clause_body(Left, Expr, Safe, Var, Meta), + build_rescue(Meta, Parts, Body, FS); + +each_clause({rescue, Meta, [{VarName, _, Atom} = Var], Expr}, S) when is_atom(VarName), is_atom(Atom) -> + Body = rescue_clause_body(Var, Expr, false, Var, Meta), + build_rescue(Meta, _Parts = [{Var, []}], Body, S). + +rescue_clause_body({'_', _, Atom}, Expr, _Safe, _Var, _Meta) when is_atom(Atom) -> + Expr; +rescue_clause_body(Pattern, Expr, Safe, Var, Meta) -> + Normalized = + case Safe of + true -> Var; + false -> {{'.', Meta, ['Elixir.Exception', normalize]}, Meta, [error, Var]} end, - - build_rescue(Meta, Parts, Body, Return, FS); - -each_clause({rescue, Meta, _, _}, _Return, S) -> - elixir_errors:compile_error(Meta, S#elixir_scope.file, "invalid arguments for rescue in try"); - -each_clause({Key, Meta, _, _}, _Return, S) -> - elixir_errors:compile_error(Meta, S#elixir_scope.file, "invalid key ~ts in try", [Key]). + prepend_to_block(Meta, {'=', Meta, [Pattern, Normalized]}, Expr). %% Helpers -build_rescue(Meta, Parts, Body, Return, S) -> +build_rescue(Meta, Parts, Body, S) -> Matches = [Match || {Match, _} <- Parts], {{clause, Line, TMatches, _, TBody}, TS} = - elixir_clauses:clause(?line(Meta), fun elixir_translator:translate_args/2, - Matches, Body, [], Return, S), + elixir_erl_clauses:clause(Meta, fun elixir_erl_pass:translate_args/2, + Matches, Body, [], S), TClauses = [begin TArgs = [{tuple, Line, [{atom, Line, error}, TMatch, {var, Line, '_'}]}], - TGuards = elixir_clauses:guards(Line, Guards, [], TS), + TGuards = elixir_erl_clauses:guards(Guards, [], TS), {clause, Line, TArgs, TGuards, TBody} end || {TMatch, {_, Guards}} <- lists:zip(TMatches, Parts)], {TClauses, TS}. -%% Convert rescue clauses into guards. -rescue_guards(_, Var, {'_', _, _}, S) -> {[{Var, []}], false, S}; - +%% Convert rescue clauses ("var in [alias1, alias2]") into guards. rescue_guards(Meta, Var, Aliases, S) -> {Elixir, Erlang} = rescue_each_ref(Meta, Var, Aliases, [], [], S), @@ -80,7 +73,7 @@ rescue_guards(Meta, Var, Aliases, S) -> case Elixir of [] -> {[], S}; _ -> - {VarName, _, CS} = elixir_scope:build_var('_', S), + {VarName, _, CS} = elixir_erl_var:build('_', S), StructVar = {VarName, Meta, nil}, Map = {'%{}', Meta, [{'__struct__', StructVar}, {'__exception__', true}]}, Match = {'=', Meta, [Map, Var]}, @@ -100,16 +93,16 @@ rescue_guards(Meta, Var, Aliases, S) -> %% Matching of variables is done with Erlang exceptions is done in %% function for optimization. -rescue_each_ref(Meta, Var, [H|T], Elixir, Erlang, S) when is_atom(H) -> +rescue_each_ref(Meta, Var, [H | T], Elixir, Erlang, S) when is_atom(H) -> case erl_rescue_guard_for(Meta, Var, H) of - false -> rescue_each_ref(Meta, Var, T, [H|Elixir], Erlang, S); - Expr -> rescue_each_ref(Meta, Var, T, [H|Elixir], [Expr|Erlang], S) + false -> rescue_each_ref(Meta, Var, T, [H | Elixir], Erlang, S); + Expr -> rescue_each_ref(Meta, Var, T, [H | Elixir], [Expr | Erlang], S) end; rescue_each_ref(_, _, [], Elixir, Erlang, _) -> {Elixir, Erlang}. -%% Handle erlang rescue matches. +%% Handle Erlang rescue matches. erl_rescue_guard_for(Meta, Var, 'Elixir.UndefinedFunctionError') -> {erl(Meta, '=='), Meta, [Var, undef]}; @@ -146,6 +139,11 @@ erl_rescue_guard_for(Meta, Var, 'Elixir.CaseClauseError') -> erl_tuple_size(Meta, Var, 2), erl_record_compare(Meta, Var, case_clause)); +erl_rescue_guard_for(Meta, Var, 'Elixir.WithClauseError') -> + erl_and(Meta, + erl_tuple_size(Meta, Var, 2), + erl_record_compare(Meta, Var, with_clause)); + erl_rescue_guard_for(Meta, Var, 'Elixir.TryClauseError') -> erl_and(Meta, erl_tuple_size(Meta, Var, 2), @@ -156,6 +154,23 @@ erl_rescue_guard_for(Meta, Var, 'Elixir.BadStructError') -> erl_tuple_size(Meta, Var, 3), erl_record_compare(Meta, Var, badstruct)); +erl_rescue_guard_for(Meta, Var, 'Elixir.BadMapError') -> + erl_and(Meta, + erl_tuple_size(Meta, Var, 2), + erl_record_compare(Meta, Var, badmap)); + +erl_rescue_guard_for(Meta, Var, 'Elixir.BadBooleanError') -> + erl_and(Meta, + erl_tuple_size(Meta, Var, 3), + erl_record_compare(Meta, Var, badbool)); + +erl_rescue_guard_for(Meta, Var, 'Elixir.KeyError') -> + erl_and(Meta, + erl_or(Meta, + erl_tuple_size(Meta, Var, 2), + erl_tuple_size(Meta, Var, 3)), + erl_record_compare(Meta, Var, badkey)); + erl_rescue_guard_for(Meta, Var, 'Elixir.ArgumentError') -> erl_or(Meta, {erl(Meta, '=='), Meta, [Var, badarg]}, @@ -185,11 +200,11 @@ erl_record_compare(Meta, Var, Expr) -> ]}. prepend_to_block(_Meta, Expr, {'__block__', Meta, Args}) -> - {'__block__', Meta, [Expr|Args]}; + {'__block__', Meta, [Expr | Args]}; prepend_to_block(Meta, Expr, Args) -> {'__block__', Meta, [Expr, Args]}. erl(Meta, Op) -> {'.', Meta, [erlang, Op]}. -erl_or(Meta, Left, Right) -> {'__op__', Meta, ['orelse', Left, Right]}. -erl_and(Meta, Left, Right) -> {'__op__', Meta, ['andalso', Left, Right]}. +erl_or(Meta, Left, Right) -> {{'.', Meta, [erlang, 'orelse']}, Meta, [Left, Right]}. +erl_and(Meta, Left, Right) -> {{'.', Meta, [erlang, 'andalso']}, Meta, [Left, Right]}. diff --git a/lib/elixir/src/elixir_erl_var.erl b/lib/elixir/src/elixir_erl_var.erl new file mode 100644 index 00000000000..9723d801ca2 --- /dev/null +++ b/lib/elixir/src/elixir_erl_var.erl @@ -0,0 +1,223 @@ +%% Convenience functions used to manipulate scope and its variables. +-module(elixir_erl_var). +-export([translate/4, build/2, context_info/1, + load_binding/2, dump_binding/2, + mergev/2, mergec/2, merge_vars/2, merge_opt_vars/2, + warn_unsafe_var/4, warn_underscored_var_access/3, format_error/1 +]). +-include("elixir.hrl"). + +%% VAR HANDLING + +translate(Meta, Name, Kind, S) when is_atom(Kind); is_integer(Kind) -> + Ann = ?ann(Meta), + Tuple = {Name, Kind}, + Vars = S#elixir_erl.vars, + + {Current, Exists, Safe} = + case maps:find({Name, Kind}, Vars) of + {ok, {VarC, _, VarS}} -> {VarC, true, VarS}; + error -> {nil, false, true} + end, + + case S#elixir_erl.context of + match -> + MatchVars = S#elixir_erl.match_vars, + + case Exists andalso maps:get(Tuple, MatchVars, false) of + true -> + warn_underscored_var_repeat(Meta, S#elixir_erl.file, Name, Kind), + {{var, Ann, Current}, S}; + false -> + %% We attempt to give vars a nice name because we + %% still use the unused vars warnings from erl_lint. + %% + %% Once we move the warning to Elixir compiler, we + %% can name vars as _@COUNTER. + {NewVar, Counter, NS} = + if + Kind /= nil -> + build('_', S); + true -> + build(Name, S) + end, + + FS = NS#elixir_erl{ + vars=maps:put(Tuple, {NewVar, Counter, true}, Vars), + match_vars=maps:put(Tuple, true, MatchVars), + export_vars=case S#elixir_erl.export_vars of + nil -> nil; + EV -> maps:put(Tuple, {NewVar, Counter, true}, EV) + end + }, + + {{var, Ann, NewVar}, FS} + end; + _ when Exists -> + warn_underscored_var_access(Meta, S#elixir_erl.file, Name), + warn_unsafe_var(Meta, S#elixir_erl.file, Name, Safe), + {{var, Ann, Current}, S} + end. + +build(Key, #elixir_erl{counter=Counter} = S) -> + Cnt = + case maps:find(Key, Counter) of + {ok, Val} -> Val + 1; + error -> 1 + end, + {list_to_atom(atom_to_list(Key) ++ "@" ++ integer_to_list(Cnt)), + Cnt, + S#elixir_erl{counter=maps:put(Key, Cnt, Counter)}}. + +context_info(Kind) when Kind == nil; is_integer(Kind) -> ""; +context_info(Kind) -> io_lib:format(" (context ~ts)", [elixir_aliases:inspect(Kind)]). + +warn_underscored_var_repeat(Meta, File, Name, Kind) -> + Warn = should_warn(Meta), + case atom_to_list(Name) of + "_@" ++ _ -> + ok; %% Automatically generated variables + "_" ++ _ when Warn -> + elixir_errors:form_warn(Meta, File, ?MODULE, {unused_match, Name, Kind}); + _ -> + ok + end. + +warn_unsafe_var(Meta, File, Name, Safe) -> + Warn = should_warn(Meta), + if + (not Safe) and Warn -> + elixir_errors:form_warn(Meta, File, ?MODULE, {unsafe_var, Name}); + true -> + ok + end. + +warn_underscored_var_access(Meta, File, Name) -> + Warn = should_warn(Meta), + case atom_to_list(Name) of + "_@" ++ _ -> + ok; %% Automatically generated variables + "_" ++ _ when Warn -> + elixir_errors:form_warn(Meta, File, ?MODULE, {underscore_var_access, Name}); + _ -> + ok + end. + +should_warn(Meta) -> + lists:keyfind(generated, 1, Meta) /= {generated, true}. + +%% SCOPE MERGING + +%% Receives two scopes and return a new scope based on +%% the second with their variables merged. + +mergev(S1, S2) -> + S2#elixir_erl{ + vars=merge_vars(S1#elixir_erl.vars, S2#elixir_erl.vars), + export_vars=merge_opt_vars(S1#elixir_erl.export_vars, S2#elixir_erl.export_vars) + }. + +%% Receives two scopes and return the first scope with +%% counters and flags from the later. + +mergec(S1, S2) -> + S1#elixir_erl{ + counter=S2#elixir_erl.counter, + caller=S2#elixir_erl.caller + }. + +%% Mergers. + +merge_vars(V, V) -> V; +merge_vars(V1, V2) -> + merge_maps(fun var_merger/3, V1, V2). + +merge_opt_vars(nil, _C2) -> nil; +merge_opt_vars(_C1, nil) -> nil; +merge_opt_vars(C, C) -> C; +merge_opt_vars(C1, C2) -> + merge_maps(fun var_merger/3, C1, C2). + +var_merger(_Var, {_, V1, _} = K1, {_, V2, _}) when V1 > V2 -> K1; +var_merger(_Var, _K1, K2) -> K2. + +merge_maps(Fun, Map1, Map2) -> + maps:fold(fun(K, V2, Acc) -> + V = + case maps:find(K, Acc) of + {ok, V1} -> Fun(K, V1, V2); + error -> V2 + end, + maps:put(K, V, Acc) + end, Map1, Map2). + +%% BINDINGS + +load_binding(Binding, Scope) -> + {NewBinding, NewKeys, NewVars, NewCounter} = load_binding(Binding, [], [], #{}, 0), + {NewBinding, NewKeys, Scope#elixir_erl{ + vars=NewVars, + counter=#{'_' => NewCounter} + }}. + +load_binding([{Key, Value} | T], Binding, Keys, Vars, Counter) -> + Actual = case Key of + {_Name, _Kind} -> Key; + Name when is_atom(Name) -> {Name, nil} + end, + InternalName = list_to_atom("_@" ++ integer_to_list(Counter)), + load_binding(T, + orddict:store(InternalName, Value, Binding), + ordsets:add_element(Actual, Keys), + maps:put(Actual, {InternalName, 0, true}, Vars), Counter + 1); +load_binding([], Binding, Keys, Vars, Counter) -> + {Binding, Keys, Vars, Counter}. + +dump_binding(Binding, #elixir_erl{vars=Vars}) -> + maps:fold(fun + ({Var, Kind} = Key, {InternalName, _, _}, Acc) when is_atom(Kind) -> + Actual = case Kind of + nil -> Var; + _ -> Key + end, + + Value = case orddict:find(InternalName, Binding) of + {ok, V} -> V; + error -> nil + end, + + orddict:store(Actual, Value, Acc); + (_, _, Acc) -> + Acc + end, [], Vars). + +%% Errors + +format_error({unused_match, Name, Kind}) -> + io_lib:format("the underscored variable \"~ts\"~ts appears more than once in a " + "match. This means the pattern will only match if all \"~ts\" bind " + "to the same value. If this is the intended behaviour, please " + "remove the leading underscore from the variable name, otherwise " + "give the variables different names", [Name, context_info(Kind), Name]); + +format_error({unsafe_var, Name}) -> + io_lib:format("the variable \"~ts\" is unsafe as it has been set inside " + "one of: case, cond, receive, if, and, or, &&, ||. " + "Please explicitly return the variable value instead. For example:\n\n" + " case integer do\n" + " 1 -> atom = :one\n" + " 2 -> atom = :two\n" + " end\n\n" + "should be written as\n\n" + " atom =\n" + " case integer do\n" + " 1 -> :one\n" + " 2 -> :two\n" + " end\n\n" + "Unsafe variable found at:", [Name]); + +format_error({underscore_var_access, Name}) -> + io_lib:format("the underscored variable \"~ts\" is used after being set. " + "A leading underscore indicates that the value of the variable " + "should be ignored. If this is intended please rename the " + "variable to remove the underscore", [Name]). diff --git a/lib/elixir/src/elixir_errors.erl b/lib/elixir/src/elixir_errors.erl index 84bbb6c7c3b..d6628228d4b 100644 --- a/lib/elixir/src/elixir_errors.erl +++ b/lib/elixir/src/elixir_errors.erl @@ -1,217 +1,151 @@ -% A bunch of helpers to help to deal with errors in Elixir source code. -% This is not exposed in the Elixir language. +%% A bunch of helpers to help to deal with errors in Elixir source code. +%% This is not exposed in the Elixir language. +%% +%% Notice this is also called by the Erlang backend, so we also support +%% the line number to be none (as it may happen in some erlang errors). -module(elixir_errors). -export([compile_error/3, compile_error/4, - form_error/4, parse_error/4, warn/2, warn/3, - handle_file_warning/2, handle_file_warning/3, handle_file_error/2]). + form_error/4, form_warn/4, parse_error/4, warn/1, warn/3]). -include("elixir.hrl"). --type line_or_meta() :: integer() | list(). +-spec warn(non_neg_integer() | none, unicode:chardata(), unicode:chardata()) -> ok. +warn(none, File, Warning) -> + warn(0, File, Warning); +warn(Line, File, Warning) when is_integer(Line), is_binary(File) -> + warn([Warning, "\n ", file_format(Line, File), $\n]). -warn(Warning) -> +-spec warn(unicode:chardata()) -> ok. +warn(Message) -> CompilerPid = get(elixir_compiler_pid), if CompilerPid =/= undefined -> elixir_code_server:cast({register_warning, CompilerPid}); - true -> false + true -> ok end, - io:put_chars(standard_error, Warning). + io:put_chars(standard_error, [warning_prefix(), Message, $\n]), + ok. -warn(Caller, Warning) -> - warn([Caller, "warning: ", Warning]). +warning_prefix() -> + case application:get_env(elixir, ansi_enabled) of + {ok, true} -> <<"\e[33mwarning: \e[0m">>; + _ -> <<"warning: ">> + end. -warn(Line, File, Warning) when is_integer(Line) -> - warn(file_format(Line, File, "warning: " ++ Warning)). +%% General forms handling. -%% Raised during expansion/translation/compilation. +-spec form_error(list(), binary(), module(), any()) -> no_return(). +form_error(Meta, File, Module, Desc) -> + compile_error(Meta, File, Module:format_error(Desc)). --spec form_error(line_or_meta(), binary(), module(), any()) -> no_return(). +-spec form_warn(list(), binary(), module(), any()) -> ok. +form_warn(Meta, File, Module, Desc) when is_list(Meta) -> + {MetaFile, MetaLine} = meta_location(Meta, File), + warn(MetaLine, MetaFile, Module:format_error(Desc)). -form_error(Meta, File, Module, Desc) -> - compile_error(Meta, File, format_error(Module, Desc)). +%% Compilation error. --spec compile_error(line_or_meta(), binary(), iolist()) -> no_return(). --spec compile_error(line_or_meta(), binary(), iolist(), list()) -> no_return(). +-spec compile_error(list(), binary(), binary() | unicode:charlist()) -> no_return(). +-spec compile_error(list(), binary(), string(), list()) -> no_return(). +compile_error(Meta, File, Message) when is_binary(Message) -> + {MetaFile, MetaLine} = meta_location(Meta, File), + raise(MetaLine, MetaFile, 'Elixir.CompileError', Message); compile_error(Meta, File, Message) when is_list(Message) -> - raise(Meta, File, 'Elixir.CompileError', elixir_utils:characters_to_binary(Message)). + {MetaFile, MetaLine} = meta_location(Meta, File), + raise(MetaLine, MetaFile, 'Elixir.CompileError', + elixir_utils:characters_to_binary(Message)). compile_error(Meta, File, Format, Args) when is_list(Format) -> compile_error(Meta, File, io_lib:format(Format, Args)). -%% Raised on tokenizing/parsing +%% Tokenization parsing/errors. --spec parse_error(line_or_meta(), binary(), binary(), binary()) -> no_return(). - -parse_error(Meta, File, Error, <<>>) -> +-spec parse_error(non_neg_integer(), binary() | {binary(), binary()}, + binary(), binary()) -> no_return(). +parse_error(Line, File, Error, <<>>) -> Message = case Error of <<"syntax error before: ">> -> <<"syntax error: expression is incomplete">>; _ -> Error end, - raise(Meta, File, 'Elixir.TokenMissingError', Message); + raise(Line, File, 'Elixir.TokenMissingError', Message); -%% Show a nicer message for missing end tokens -parse_error(Meta, File, <<"syntax error before: ">>, <<"'end'">>) -> - raise(Meta, File, 'Elixir.SyntaxError', <<"unexpected token: end">>); - -%% Binaries are wrapped in [<<...>>], so we need to unwrap them -parse_error(Meta, File, Error, <<"[", _/binary>> = Full) when is_binary(Error) -> - Rest = - case binary:split(Full, <<"<<">>) of - [Lead, Token] -> - case binary:split(Token, <<">>">>) of - [Part, _] when Lead == <<$[>> -> Part; - _ -> <<$">> - end; - [_] -> - <<$">> - end, - raise(Meta, File, 'Elixir.SyntaxError', <>); +%% Show a nicer message for end of line +parse_error(Line, File, <<"syntax error before: ">>, <<"eol">>) -> + raise(Line, File, 'Elixir.SyntaxError', + <<"unexpectedly reached end of line. The current expression is invalid or incomplete">>); -%% Everything else is fine as is -parse_error(Meta, File, Error, Token) when is_binary(Error), is_binary(Token) -> - Message = <>, - raise(Meta, File, 'Elixir.SyntaxError', Message). - -%% Handle warnings and errors (called during module compilation) - -%% Ignore on bootstrap -handle_file_warning(true, _File, {_Line, sys_core_fold, nomatch_guard}) -> []; -handle_file_warning(true, _File, {_Line, sys_core_fold, {nomatch_shadow, _}}) -> []; - -%% Ignore always -handle_file_warning(_, _File, {_Line, sys_core_fold, useless_building}) -> []; - -%% This is an Erlang bug, it considers {tuple, _}.call to always fail -handle_file_warning(_, _File, {_Line, v3_kernel, bad_call}) -> []; - -%% We handle unused local warnings ourselves -handle_file_warning(_, _File, {_Line, erl_lint, {unused_function, _}}) -> []; - -%% Make no_effect clauses pretty -handle_file_warning(_, File, {Line, sys_core_fold, {no_effect, {erlang, F, A}}}) -> - {Fmt, Args} = case erl_internal:comp_op(F, A) of - true -> {"use of operator ~ts has no effect", [translate_comp_op(F)]}; - false -> - case erl_internal:bif(F, A) of - false -> {"the call to :erlang.~ts/~B has no effect", [F,A]}; - true -> {"the call to ~ts/~B has no effect", [F,A]} - end +%% Show a nicer message for missing end tokens +parse_error(Line, File, <<"syntax error before: ">>, <<"'end'">>) -> + raise(Line, File, 'Elixir.SyntaxError', <<"unexpected token: end">>); + +%% Produce a human-readable message for errors before a sigil +parse_error(Line, File, <<"syntax error before: ">>, <<"{sigil,", _Rest/binary>> = Full) -> + {sigil, _, Sigil, [Content | _], _} = parse_erl_term(Full), + Content2 = case is_binary(Content) of + true -> Content; + false -> <<>> end, - Message = io_lib:format(Fmt, Args), - warn(Line, File, Message); - -%% Rewrite undefined behaviour to check for protocols -handle_file_warning(_, File, {Line,erl_lint,{undefined_behaviour_func,{Fun,Arity},Module}}) -> - {DefKind, Def, DefArity} = - case atom_to_list(Fun) of - "MACRO-" ++ Rest -> {macro, list_to_atom(Rest), Arity - 1}; - _ -> {function, Fun, Arity} - end, - - Kind = protocol_or_behaviour(Module), - Raw = "undefined ~ts ~ts ~ts/~B (for ~ts ~ts)", - Message = io_lib:format(Raw, [Kind, DefKind, Def, DefArity, Kind, elixir_aliases:inspect(Module)]), - warn(Line, File, Message); - -handle_file_warning(_, File, {Line,erl_lint,{undefined_behaviour,Module}}) -> - case elixir_compiler:get_opt(internal) of - true -> []; - false -> - Message = io_lib:format("behaviour ~ts undefined", [elixir_aliases:inspect(Module)]), - warn(Line, File, Message) - end; - -%% Ignore unused vars at "weird" lines (<= 0) -handle_file_warning(_, _File, {Line,erl_lint,{unused_var,_Var}}) when Line =< 0 -> - []; - -%% Ignore shadowed vars as we guarantee no conflicts ourselves -handle_file_warning(_, _File, {_Line,erl_lint,{shadowed_var,_Var,_Where}}) -> - []; - -%% Properly format other unused vars -handle_file_warning(_, File, {Line,erl_lint,{unused_var,Var}}) -> - Message = format_error(erl_lint, {unused_var, format_var(Var)}), - warn(Line, File, Message); - -%% Default behaviour -handle_file_warning(_, File, {Line,Module,Desc}) -> - Message = format_error(Module, Desc), - warn(Line, File, Message). - -handle_file_warning(File, Desc) -> - handle_file_warning(false, File, Desc). - --spec handle_file_error(file:filename_all(), {non_neg_integer(), module(), any()}) -> no_return(). - -handle_file_error(File, {Line,erl_lint,{unsafe_var,Var,{In,_Where}}}) -> - Translated = case In of - 'orelse' -> 'or'; - 'andalso' -> 'and'; - _ -> In + Message = <<"syntax error before: sigil \~", Sigil, " starting with content '", Content2/binary, "'">>, + raise(Line, File, 'Elixir.SyntaxError', Message); + +%% Aliases are wrapped in [''] +parse_error(Line, File, Error, <<"['", _/binary>> = Full) when is_binary(Error) -> + [AliasAtom] = parse_erl_term(Full), + Alias = atom_to_binary(AliasAtom, utf8), + raise(Line, File, 'Elixir.SyntaxError', <>); + +%% Binaries (and interpolation) are wrapped in [<<...>>] +parse_error(Line, File, Error, <<"[", _/binary>> = Full) when is_binary(Error) -> + Term = case parse_erl_term(Full) of + [H | _] when is_binary(H) -> <<$", H/binary, $">>; + _ -> <<$">> end, - Message = io_lib:format("cannot define variable ~ts inside ~ts", [format_var(Var), Translated]), - raise(Line, File, 'Elixir.CompileError', iolist_to_binary(Message)); + raise(Line, File, 'Elixir.SyntaxError', <>); + +%% Given a string prefix and suffix to insert the token inside the error message rather than append it +parse_error(Line, File, {ErrorPrefix, ErrorSuffix}, Token) when is_binary(ErrorPrefix), is_binary(ErrorSuffix), is_binary(Token) -> + Message = <>, + raise(Line, File, 'Elixir.SyntaxError', Message); + +%% Misplaced char tokens (e.g., {char, _, 97}) are translated by Erlang into +%% the char literal (i.e., the token in the previous example becomes $a), +%% because {char, _, _} is a valid Erlang token for an Erlang char literal. We +%% want to represent that token as ?a in the error, according to the Elixir +%% syntax. +parse_error(Line, File, <<"syntax error before: ">>, <<$$, Char/binary>>) -> + Message = <<"syntax error before: ?", Char/binary>>, + raise(Line, File, 'Elixir.SyntaxError', Message); -handle_file_error(File, {Line,erl_lint,{spec_fun_undefined,{M,F,A}}}) -> - Message = io_lib:format("spec for undefined function ~ts.~ts/~B", [elixir_aliases:inspect(M), F, A]), - raise(Line, File, 'Elixir.CompileError', iolist_to_binary(Message)); +%% Everything else is fine as is +parse_error(Line, File, Error, Token) when is_binary(Error), is_binary(Token) -> + Message = <>, + raise(Line, File, 'Elixir.SyntaxError', Message). -handle_file_error(File, {Line,Module,Desc}) -> - form_error(Line, File, Module, Desc). +%% Helper to parse terms which have been converted to binaries +parse_erl_term(Term) -> + {ok, Tokens, _} = erl_scan:string(binary_to_list(Term)), + {ok, Parsed} = erl_parse:parse_term(Tokens ++ [{dot, 1}]), + Parsed. %% Helpers -raise(Meta, File, Kind, Message) when is_list(Meta) -> - raise(?line(Meta), File, Kind, Message); +file_format(0, File) -> + io_lib:format("~ts", [elixir_utils:relative_to_cwd(File)]); + +file_format(Line, File) -> + io_lib:format("~ts:~w", [elixir_utils:relative_to_cwd(File), Line]). + +meta_location(Meta, File) -> + case elixir_utils:meta_location(Meta) of + {F, L} -> {F, L}; + nil -> {File, ?line(Meta)} + end. raise(none, File, Kind, Message) -> raise(0, File, Kind, Message); - -raise(Line, File, Kind, Message) when is_integer(Line), is_binary(File) -> - %% Populate the stacktrace so we can raise it - try - throw(ok) - catch - ok -> ok - end, - Stacktrace = erlang:get_stacktrace(), +raise({Line, _, _}, File, Kind, Message) when is_integer(Line) -> + raise(Line, File, Kind, Message); +raise(Line, File, Kind, Message) when is_integer(Line), is_binary(File), is_binary(Message) -> + Stacktrace = try throw(ok) catch ok -> erlang:get_stacktrace() end, Exception = Kind:exception([{description, Message}, {file, File}, {line, Line}]), erlang:raise(error, Exception, tl(Stacktrace)). - -file_format(0, File, Message) when is_binary(File) -> - io_lib:format("~ts: ~ts~n", [elixir_utils:relative_to_cwd(File), Message]); - -file_format(Line, File, Message) when is_binary(File) -> - io_lib:format("~ts:~w: ~ts~n", [elixir_utils:relative_to_cwd(File), Line, Message]). - -format_var(Var) -> - list_to_atom(lists:takewhile(fun(X) -> X /= $@ end, atom_to_list(Var))). - -format_error([], Desc) -> - io_lib:format("~p", [Desc]); - -format_error(Module, Desc) -> - Module:format_error(Desc). - -protocol_or_behaviour(Module) -> - case is_protocol(Module) of - true -> protocol; - false -> behaviour - end. - -is_protocol(Module) -> - case code:ensure_loaded(Module) of - {module, _} -> - erlang:function_exported(Module, '__protocol__', 1) andalso - Module:'__protocol__'(name) == Module; - {error, _} -> - false - end. - -translate_comp_op('/=') -> '!='; -translate_comp_op('=<') -> '<='; -translate_comp_op('=:=') -> '==='; -translate_comp_op('=/=') -> '!=='; -translate_comp_op(Other) -> Other. diff --git a/lib/elixir/src/elixir_exp.erl b/lib/elixir/src/elixir_exp.erl deleted file mode 100644 index ba786b4b885..00000000000 --- a/lib/elixir/src/elixir_exp.erl +++ /dev/null @@ -1,571 +0,0 @@ --module(elixir_exp). --export([expand/2, expand_args/2, expand_arg/2]). --import(elixir_errors, [compile_error/3, compile_error/4]). --include("elixir.hrl"). - -%% = - -expand({'=', Meta, [Left, Right]}, E) -> - assert_no_guard_scope(Meta, '=', E), - {ERight, ER} = expand(Right, E), - {ELeft, EL} = elixir_exp_clauses:match(fun expand/2, Left, E), - {{'=', Meta, [ELeft, ERight]}, elixir_env:mergev(EL, ER)}; - -%% Literal operators - -expand({'{}', Meta, Args}, E) -> - {EArgs, EA} = expand_args(Args, E), - {{'{}', Meta, EArgs}, EA}; - -expand({'%{}', Meta, Args}, E) -> - elixir_map:expand_map(Meta, Args, E); - -expand({'%', Meta, [Left, Right]}, E) -> - elixir_map:expand_struct(Meta, Left, Right, E); - -expand({'<<>>', Meta, Args}, E) -> - elixir_bitstring:expand(Meta, Args, E); - -%% Other operators - -expand({'__op__', Meta, [_, _] = Args}, E) -> - {EArgs, EA} = expand_args(Args, E), - {{'__op__', Meta, EArgs}, EA}; - -expand({'__op__', Meta, [_, _, _] = Args}, E) -> - {EArgs, EA} = expand_args(Args, E), - {{'__op__', Meta, EArgs}, EA}; - -expand({'->', Meta, _Args}, E) -> - compile_error(Meta, ?m(E, file), "unhandled operator ->"); - -%% __block__ - -expand({'__block__', _Meta, []}, E) -> - {nil, E}; -expand({'__block__', _Meta, [Arg]}, E) -> - expand(Arg, E); -expand({'__block__', Meta, Args}, E) when is_list(Args) -> - {EArgs, EA} = expand_many(Args, E), - {{'__block__', Meta, EArgs}, EA}; - -%% __aliases__ - -expand({'__aliases__', _, _} = Alias, E) -> - case elixir_aliases:expand(Alias, ?m(E, aliases), - ?m(E, macro_aliases), ?m(E, lexical_tracker)) of - Receiver when is_atom(Receiver) -> - elixir_lexical:record_remote(Receiver, ?m(E, lexical_tracker)), - {Receiver, E}; - Aliases -> - {EAliases, EA} = expand_args(Aliases, E), - - case lists:all(fun is_atom/1, EAliases) of - true -> - Receiver = elixir_aliases:concat(EAliases), - elixir_lexical:record_remote(Receiver, ?m(E, lexical_tracker)), - {Receiver, EA}; - false -> - {{{'.', [], [elixir_aliases, concat]}, [], [EAliases]}, EA} - end - end; - -%% alias - -expand({alias, Meta, [Ref]}, E) -> - expand({alias, Meta, [Ref,[]]}, E); -expand({alias, Meta, [Ref, KV]}, E) -> - assert_no_match_or_guard_scope(Meta, alias, E), - {ERef, ER} = expand(Ref, E), - {EKV, ET} = expand_opts(Meta, alias, [as, warn], no_alias_opts(KV), ER), - - if - is_atom(ERef) -> - {{alias, Meta, [ERef, EKV]}, - expand_alias(Meta, true, ERef, EKV, ET)}; - true -> - compile_error(Meta, ?m(E, file), - "invalid argument for alias, expected a compile time atom or alias, got: ~ts", - ['Elixir.Kernel':inspect(ERef)]) - end; - -expand({require, Meta, [Ref]}, E) -> - expand({require, Meta, [Ref, []]}, E); -expand({require, Meta, [Ref, KV]}, E) -> - assert_no_match_or_guard_scope(Meta, require, E), - - {ERef, ER} = expand(Ref, E), - {EKV, ET} = expand_opts(Meta, require, [as, warn], no_alias_opts(KV), ER), - - if - is_atom(ERef) -> - elixir_aliases:ensure_loaded(Meta, ERef, ET), - {{require, Meta, [ERef, EKV]}, - expand_require(Meta, ERef, EKV, ET)}; - true -> - compile_error(Meta, ?m(E, file), - "invalid argument for require, expected a compile time atom or alias, got: ~ts", - ['Elixir.Kernel':inspect(ERef)]) - end; - -expand({import, Meta, [Left]}, E) -> - expand({import, Meta, [Left, []]}, E); - -expand({import, Meta, [Ref, KV]}, E) -> - assert_no_match_or_guard_scope(Meta, import, E), - {ERef, ER} = expand(Ref, E), - {EKV, ET} = expand_opts(Meta, import, [only, except, warn], KV, ER), - - if - is_atom(ERef) -> - elixir_aliases:ensure_loaded(Meta, ERef, ET), - {Functions, Macros} = elixir_import:import(Meta, ERef, EKV, ET), - {{import, Meta, [ERef, EKV]}, - expand_require(Meta, ERef, EKV, ET#{functions := Functions, macros := Macros})}; - true -> - compile_error(Meta, ?m(E, file), - "invalid argument for import, expected a compile time atom or alias, got: ~ts", - ['Elixir.Kernel':inspect(ERef)]) - end; - -%% Pseudo vars - -expand({'__MODULE__', _, Atom}, E) when is_atom(Atom) -> - {?m(E, module), E}; -expand({'__DIR__', _, Atom}, E) when is_atom(Atom) -> - {filename:dirname(?m(E, file)), E}; -expand({'__CALLER__', _, Atom} = Caller, E) when is_atom(Atom) -> - {Caller, E}; -expand({'__ENV__', Meta, Atom}, E) when is_atom(Atom) -> - Env = elixir_env:linify({?line(Meta), E}), - {{'%{}', [], maps:to_list(Env)}, E}; -expand({{'.', DotMeta, [{'__ENV__', Meta, Atom}, Field]}, CallMeta, []}, E) when is_atom(Atom), is_atom(Field) -> - Env = elixir_env:linify({?line(Meta), E}), - case maps:is_key(Field, Env) of - true -> {maps:get(Field, Env), E}; - false -> {{{'.', DotMeta, [{'%{}', [], maps:to_list(Env)}, Field]}, CallMeta, []}, E} - end; - -%% Quote - -expand({Unquote, Meta, [_]}, E) when Unquote == unquote; Unquote == unquote_splicing -> - compile_error(Meta, ?m(E, file), "~p called outside quote", [Unquote]); - -expand({quote, Meta, [Opts]}, E) when is_list(Opts) -> - case lists:keyfind(do, 1, Opts) of - {do, Do} -> - expand({quote, Meta, [lists:keydelete(do, 1, Opts), [{do,Do}]]}, E); - false -> - compile_error(Meta, ?m(E, file), "missing do keyword in quote") - end; - -expand({quote, Meta, [_]}, E) -> - compile_error(Meta, ?m(E, file), "invalid arguments for quote"); - -expand({quote, Meta, [KV, Do]}, E) when is_list(Do) -> - Exprs = - case lists:keyfind(do, 1, Do) of - {do, Expr} -> Expr; - false -> compile_error(Meta, E#elixir_scope.file, "missing do keyword in quote") - end, - - ValidOpts = [context, location, line, unquote, bind_quoted], - {EKV, ET} = expand_opts(Meta, quote, ValidOpts, KV, E), - - Context = case lists:keyfind(context, 1, EKV) of - {context, Ctx} when is_atom(Ctx) and (Ctx /= nil) -> - Ctx; - {context, Ctx} -> - compile_error(Meta, ?m(E, file), "invalid :context for quote, " - "expected non nil compile time atom or alias, got: ~ts", ['Elixir.Kernel':inspect(Ctx)]); - false -> - case ?m(E, module) of - nil -> 'Elixir'; - Mod -> Mod - end - end, - - Keep = lists:keyfind(location, 1, EKV) == {location, keep}, - Line = proplists:get_value(line, EKV, false), - - {Binding, DefaultUnquote} = case lists:keyfind(bind_quoted, 1, EKV) of - {bind_quoted, BQ} -> {BQ, false}; - false -> {nil, true} - end, - - Unquote = case lists:keyfind(unquote, 1, EKV) of - {unquote, Bool} when is_boolean(Bool) -> Bool; - false -> DefaultUnquote - end, - - Q = #elixir_quote{line=Line, keep=Keep, unquote=Unquote, context=Context}, - - {Quoted, _Q} = elixir_quote:quote(Exprs, Binding, Q, ET), - expand(Quoted, ET); - -expand({quote, Meta, [_, _]}, E) -> - compile_error(Meta, ?m(E, file), "invalid arguments for quote"); - -%% Functions - -expand({'&', _, [Arg]} = Original, E) when is_integer(Arg) -> - {Original, E}; -expand({'&', Meta, [Arg]}, E) -> - assert_no_match_or_guard_scope(Meta, '&', E), - case elixir_fn:capture(Meta, Arg, E) of - {local, Fun, Arity} -> - {{'&', Meta, [{'/', [], [{Fun, [], nil}, Arity]}]}, E}; - {expanded, Expr, EE} -> - expand(Expr, EE) - end; - -expand({fn, Meta, Pairs}, E) -> - assert_no_match_or_guard_scope(Meta, fn, E), - elixir_fn:expand(Meta, Pairs, E); - -%% Case/Receive/Try - -expand({'cond', Meta, [KV]}, E) -> - assert_no_match_or_guard_scope(Meta, 'cond', E), - {EClauses, EC} = elixir_exp_clauses:'cond'(Meta, KV, E), - {{'cond', Meta, [EClauses]}, EC}; - -expand({'case', Meta, [Expr, KV]}, E) -> - assert_no_match_or_guard_scope(Meta, 'case', E), - {EExpr, EE} = expand(Expr, E), - {EClauses, EC} = elixir_exp_clauses:'case'(Meta, KV, EE), - FClauses = - case (lists:keyfind(optimize_boolean, 1, Meta) == {optimize_boolean, true}) and - elixir_utils:returns_boolean(EExpr) of - true -> rewrite_case_clauses(EClauses); - false -> EClauses - end, - {{'case', Meta, [EExpr, FClauses]}, EC}; - -expand({'receive', Meta, [KV]}, E) -> - assert_no_match_or_guard_scope(Meta, 'receive', E), - {EClauses, EC} = elixir_exp_clauses:'receive'(Meta, KV, E), - {{'receive', Meta, [EClauses]}, EC}; - -expand({'try', Meta, [KV]}, E) -> - assert_no_match_or_guard_scope(Meta, 'try', E), - {EClauses, EC} = elixir_exp_clauses:'try'(Meta, KV, E), - {{'try', Meta, [EClauses]}, EC}; - -%% Comprehensions - -expand({for, Meta, [_|_] = Args}, E) -> - elixir_for:expand(Meta, Args, E); - -%% Super - -expand({super, Meta, Args}, E) when is_list(Args) -> - assert_no_match_or_guard_scope(Meta, super, E), - {EArgs, EA} = expand_args(Args, E), - {{super, Meta, EArgs}, EA}; - -%% Vars - -expand({'^', Meta, [Arg]}, #{context := match} = E) -> - case expand(Arg, E) of - {{Name, _, Kind} = EArg, EA} when is_atom(Name), is_atom(Kind) -> - {{'^', Meta, [EArg]}, EA}; - _ -> - Msg = "invalid argument for unary operator ^, expected an existing variable, got: ^~ts", - compile_error(Meta, ?m(E, file), Msg, ['Elixir.Macro':to_string(Arg)]) - end; -expand({'^', Meta, [Arg]}, E) -> - compile_error(Meta, ?m(E, file), - "cannot use ^~ts outside of match clauses", ['Elixir.Macro':to_string(Arg)]); - -expand({'_', _, Kind} = Var, E) when is_atom(Kind) -> - {Var, E}; -expand({Name, Meta, Kind} = Var, #{context := match, export_vars := Export} = E) when is_atom(Name), is_atom(Kind) -> - Pair = {Name, var_kind(Meta, Kind)}, - NewVars = ordsets:add_element(Pair, ?m(E, vars)), - NewExport = case (Export /= nil) of - true -> ordsets:add_element(Pair, Export); - false -> Export - end, - {Var, E#{vars := NewVars, export_vars := NewExport}}; -expand({Name, Meta, Kind} = Var, #{vars := Vars} = E) when is_atom(Name), is_atom(Kind) -> - case lists:member({Name, var_kind(Meta, Kind)}, Vars) of - true -> - {Var, E}; - false -> - VarMeta = lists:keyfind(var, 1, Meta), - if - VarMeta == {var, true} -> - Extra = case Kind of - nil -> ""; - _ -> io_lib:format(" (context ~ts)", [elixir_aliases:inspect(Kind)]) - end, - - compile_error(Meta, ?m(E, file), "expected var ~ts~ts to expand to an existing " - "variable or be a part of a match", [Name, Extra]); - true -> - expand({Name, Meta, []}, E) - end - end; - -%% Local calls - -expand({Atom, Meta, Args}, E) when is_atom(Atom), is_list(Meta), is_list(Args) -> - assert_no_ambiguous_op(Atom, Meta, Args, E), - - elixir_dispatch:dispatch_import(Meta, Atom, Args, E, fun() -> - expand_local(Meta, Atom, Args, E) - end); - -%% Remote calls - -expand({{'.', DotMeta, [Left, Right]}, Meta, Args}, E) - when (is_tuple(Left) orelse is_atom(Left)), is_atom(Right), is_list(Meta), is_list(Args) -> - {ELeft, EL} = expand(Left, E), - - elixir_dispatch:dispatch_require(Meta, ELeft, Right, Args, EL, fun(AR, AF, AA) -> - expand_remote(AR, DotMeta, AF, Meta, AA, E, EL) - end); - -%% Anonymous calls - -expand({{'.', DotMeta, [Expr]}, Meta, Args}, E) when is_list(Args) -> - {EExpr, EE} = expand(Expr, E), - if - is_atom(EExpr) -> - compile_error(Meta, ?m(E, file), "invalid function call :~ts.()", [EExpr]); - true -> - {EArgs, EA} = expand_args(Args, elixir_env:mergea(E, EE)), - {{{'.', DotMeta, [EExpr]}, Meta, EArgs}, elixir_env:mergev(EE, EA)} - end; - -%% Invalid calls - -expand({_, Meta, Args} = Invalid, E) when is_list(Meta) and is_list(Args) -> - compile_error(Meta, ?m(E, file), "invalid call ~ts", - ['Elixir.Macro':to_string(Invalid)]); - -expand({_, _, _} = Tuple, E) -> - compile_error([{line,0}], ?m(E, file), "invalid quoted expression: ~ts", - ['Elixir.Kernel':inspect(Tuple, [{records,false}])]); - -%% Literals - -expand({Left, Right}, E) -> - {[ELeft, ERight], EE} = expand_args([Left, Right], E), - {{ELeft, ERight}, EE}; - -expand(List, #{context := match} = E) when is_list(List) -> - expand_list(List, fun expand/2, E, []); - -expand(List, E) when is_list(List) -> - {EArgs, {EC, EV}} = expand_list(List, fun expand_arg/2, {E, E}, []), - {EArgs, elixir_env:mergea(EV, EC)}; - -expand(Function, E) when is_function(Function) -> - case (erlang:fun_info(Function, type) == {type, external}) andalso - (erlang:fun_info(Function, env) == {env, []}) of - true -> - {Function, E}; - false -> - compile_error([{line,0}], ?m(E, file), - "invalid quoted expression: ~ts", ['Elixir.Kernel':inspect(Function)]) - end; - -expand(Other, E) when is_number(Other); is_atom(Other); is_binary(Other); is_pid(Other) -> - {Other, E}; - -expand(Other, E) -> - compile_error([{line,0}], ?m(E, file), - "invalid quoted expression: ~ts", ['Elixir.Kernel':inspect(Other)]). - -%% Helpers - -expand_list([{'|', Meta, [_, _] = Args}], Fun, Acc, List) -> - {EArgs, EAcc} = lists:mapfoldl(Fun, Acc, Args), - expand_list([], Fun, EAcc, [{'|', Meta, EArgs}|List]); -expand_list([H|T], Fun, Acc, List) -> - {EArg, EAcc} = Fun(H, Acc), - expand_list(T, Fun, EAcc, [EArg|List]); -expand_list([], _Fun, Acc, List) -> - {lists:reverse(List), Acc}. - -expand_many(Args, E) -> - lists:mapfoldl(fun expand/2, E, Args). - -%% Variables in arguments are not propagated from one -%% argument to the other. For instance: -%% -%% x = 1 -%% foo(x = x + 2, x) -%% x -%% -%% Should be the same as: -%% -%% foo(3, 1) -%% 3 -%% -%% However, lexical information is. -expand_arg(Arg, Acc) when is_number(Arg); is_atom(Arg); is_binary(Arg); is_pid(Arg) -> - {Arg, Acc}; -expand_arg(Arg, {Acc1, Acc2}) -> - {EArg, EAcc} = expand(Arg, Acc1), - {EArg, {elixir_env:mergea(Acc1, EAcc), elixir_env:mergev(Acc2, EAcc)}}. - -expand_args([Arg], E) -> - {EArg, EE} = expand(Arg, E), - {[EArg], EE}; -expand_args(Args, #{context := match} = E) -> - expand_many(Args, E); -expand_args(Args, E) -> - {EArgs, {EC, EV}} = lists:mapfoldl(fun expand_arg/2, {E, E}, Args), - {EArgs, elixir_env:mergea(EV, EC)}. - -%% Match/var helpers - -var_kind(Meta, Kind) -> - case lists:keyfind(counter, 1, Meta) of - {counter, Counter} -> Counter; - false -> Kind - end. - -%% Locals - -assert_no_ambiguous_op(Name, Meta, [Arg], E) -> - case lists:keyfind(ambiguous_op, 1, Meta) of - {ambiguous_op, Kind} -> - case lists:member({Name, Kind}, ?m(E, vars)) of - true -> - compile_error(Meta, ?m(E, file), "\"~ts ~ts\" looks like a function call but " - "there is a variable named \"~ts\", please use explicit parenthesis or even spaces", - [Name, 'Elixir.Macro':to_string(Arg), Name]); - false -> - ok - end; - _ -> - ok - end; -assert_no_ambiguous_op(_Atom, _Meta, _Args, _E) -> - ok. - -expand_local(Meta, Name, Args, #{local := nil, function := nil} = E) -> - {EArgs, EA} = expand_args(Args, E), - {{Name, Meta, EArgs}, EA}; -expand_local(Meta, Name, Args, #{local := nil, module := Module, function := Function} = E) -> - elixir_locals:record_local({Name, length(Args)}, Module, Function), - {EArgs, EA} = expand_args(Args, E), - {{Name, Meta, EArgs}, EA}; -expand_local(Meta, Name, Args, E) -> - expand({{'.', Meta, [?m(E, local), Name]}, Meta, Args}, E). - -%% Remote - -expand_remote(Receiver, DotMeta, Right, Meta, Args, E, EL) -> - if - is_atom(Receiver) -> elixir_lexical:record_remote(Receiver, ?m(E, lexical_tracker)); - true -> ok - end, - {EArgs, EA} = expand_args(Args, E), - {{{'.', DotMeta, [Receiver, Right]}, Meta, EArgs}, elixir_env:mergev(EL, EA)}. - -%% Lexical helpers - -expand_opts(Meta, Kind, Allowed, Opts, E) -> - {EOpts, EE} = expand(Opts, E), - validate_opts(Meta, Kind, Allowed, EOpts, EE), - {EOpts, EE}. - -validate_opts(Meta, Kind, Allowed, Opts, E) when is_list(Opts) -> - [begin - compile_error(Meta, ?m(E, file), - "unsupported option ~ts given to ~s", ['Elixir.Kernel':inspect(Key), Kind]) - end || {Key, _} <- Opts, not lists:member(Key, Allowed)]; - -validate_opts(Meta, Kind, _Allowed, _Opts, S) -> - compile_error(Meta, S#elixir_scope.file, "invalid options for ~s, expected a keyword list", [Kind]). - -no_alias_opts(KV) when is_list(KV) -> - case lists:keyfind(as, 1, KV) of - {as, As} -> lists:keystore(as, 1, KV, {as, no_alias_expansion(As)}); - false -> KV - end; -no_alias_opts(KV) -> KV. - -no_alias_expansion({'__aliases__', Meta, [H|T]}) when (H /= 'Elixir') and is_atom(H) -> - {'__aliases__', Meta, ['Elixir',H|T]}; -no_alias_expansion(Other) -> - Other. - -expand_require(Meta, Ref, KV, E) -> - RE = E#{requires := ordsets:add_element(Ref, ?m(E, requires))}, - expand_alias(Meta, false, Ref, KV, RE). - -expand_alias(Meta, IncludeByDefault, Ref, KV, #{context_modules := Context} = E) -> - New = expand_as(lists:keyfind(as, 1, KV), Meta, IncludeByDefault, Ref, E), - - %% Add the alias to context_modules if defined is true. - %% This is used by defmodule in order to store the defined - %% module in context modules. - NewContext = - case lists:keyfind(defined, 1, Meta) of - {defined, Mod} when is_atom(Mod) -> [Mod|Context]; - false -> Context - end, - - {Aliases, MacroAliases} = elixir_aliases:store(Meta, New, Ref, KV, ?m(E, aliases), - ?m(E, macro_aliases), ?m(E, lexical_tracker)), - - E#{aliases := Aliases, macro_aliases := MacroAliases, context_modules := NewContext}. - -expand_as({as, true}, _Meta, _IncludeByDefault, Ref, _E) -> - elixir_aliases:last(Ref); -expand_as({as, false}, _Meta, _IncludeByDefault, Ref, _E) -> - Ref; -expand_as({as, Atom}, Meta, _IncludeByDefault, _Ref, E) when is_atom(Atom) -> - case length(string:tokens(atom_to_list(Atom), ".")) of - 1 -> compile_error(Meta, ?m(E, file), - "invalid value for keyword :as, expected an alias, got atom: ~ts", [elixir_aliases:inspect(Atom)]); - 2 -> Atom; - _ -> compile_error(Meta, ?m(E, file), - "invalid value for keyword :as, expected an alias, got nested alias: ~ts", [elixir_aliases:inspect(Atom)]) - end; -expand_as(false, _Meta, IncludeByDefault, Ref, _E) -> - if IncludeByDefault -> elixir_aliases:last(Ref); - true -> Ref - end; -expand_as({as, Other}, Meta, _IncludeByDefault, _Ref, E) -> - compile_error(Meta, ?m(E, file), - "invalid value for keyword :as, expected an alias, got: ~ts", ['Elixir.Macro':to_string(Other)]). - -%% Assertions - -rewrite_case_clauses([{do,[ - {'->', FalseMeta, [ - [{'when', _, [Var, {'__op__', _,[ - 'orelse', - {{'.', _, [erlang, '=:=']}, _, [Var, nil]}, - {{'.', _, [erlang, '=:=']}, _, [Var, false]} - ]}]}], - FalseExpr - ]}, - {'->', TrueMeta, [ - [{'_', _, _}], - TrueExpr - ]} -]}]) -> - [{do, [ - {'->', FalseMeta, [[false], FalseExpr]}, - {'->', TrueMeta, [[true], TrueExpr]} - ]}]; -rewrite_case_clauses(Clauses) -> - Clauses. - -assert_no_match_or_guard_scope(Meta, Kind, E) -> - assert_no_match_scope(Meta, Kind, E), - assert_no_guard_scope(Meta, Kind, E). -assert_no_match_scope(Meta, _Kind, #{context := match, file := File}) -> - compile_error(Meta, File, "invalid expression in match"); -assert_no_match_scope(_Meta, _Kind, _E) -> []. -assert_no_guard_scope(Meta, _Kind, #{context := guard, file := File}) -> - compile_error(Meta, File, "invalid expression in guard"); -assert_no_guard_scope(_Meta, _Kind, _E) -> []. diff --git a/lib/elixir/src/elixir_exp_clauses.erl b/lib/elixir/src/elixir_exp_clauses.erl deleted file mode 100644 index 577e7d6dc8a..00000000000 --- a/lib/elixir/src/elixir_exp_clauses.erl +++ /dev/null @@ -1,214 +0,0 @@ -%% Handle code related to args, guard and -> matching for case, -%% fn, receive and friends. try is handled in elixir_try. --module(elixir_exp_clauses). --export([match/3, clause/5, def/5, head/2, - 'case'/3, 'receive'/3, 'try'/3, 'cond'/3]). --import(elixir_errors, [compile_error/3, compile_error/4]). --include("elixir.hrl"). - -match(Fun, Expr, #{context := Context} = E) -> - {EExpr, EE} = Fun(Expr, E#{context := match}), - {EExpr, EE#{context := Context}}. - -def(Fun, Args, Guards, Body, E) -> - {EArgs, EA} = match(Fun, Args, E), - {EGuards, EG} = guard(Guards, EA#{context := guard}), - {EBody, EB} = elixir_exp:expand(Body, EG#{context := ?m(E, context)}), - {EArgs, EGuards, EBody, EB}. - -clause(Meta, Kind, Fun, {'->', ClauseMeta, [_, _]} = Clause, E) when is_function(Fun, 3) -> - clause(Meta, Kind, fun(X, Acc) -> Fun(ClauseMeta, X, Acc) end, Clause, E); -clause(_Meta, _Kind, Fun, {'->', Meta, [Left, Right]}, E) -> - {ELeft, EL} = Fun(Left, E), - {ERight, ER} = elixir_exp:expand(Right, EL), - {{'->', Meta, [ELeft, ERight]}, ER}; -clause(Meta, Kind, _Fun, _, E) -> - compile_error(Meta, ?m(E, file), "expected -> clauses in ~ts", [Kind]). - -head([{'when', Meta, [_,_|_] = All}], E) -> - {Args, Guard} = elixir_utils:split_last(All), - {EArgs, EA} = match(fun elixir_exp:expand_args/2, Args, E), - {EGuard, EG} = guard(Guard, EA#{context := guard}), - {[{'when', Meta, EArgs ++ [EGuard]}], EG#{context := ?m(E, context)}}; -head(Args, E) -> - match(fun elixir_exp:expand_args/2, Args, E). - -guard({'when', Meta, [Left, Right]}, E) -> - {ELeft, EL} = guard(Left, E), - {ERight, ER} = guard(Right, EL), - {{'when', Meta, [ELeft, ERight]}, ER}; -guard(Other, E) -> - elixir_exp:expand(Other, E). - -%% Case - -'case'(Meta, [], E) -> - compile_error(Meta, ?m(E, file), "missing do keyword in case"); -'case'(Meta, KV, E) when not is_list(KV) -> - compile_error(Meta, ?m(E, file), "invalid arguments for case"); -'case'(Meta, KV, E) -> - EE = E#{export_vars := []}, - {EClauses, EVars} = lists:mapfoldl(fun(X, Acc) -> do_case(Meta, X, Acc, EE) end, [], KV), - {EClauses, elixir_env:mergev(EVars, E)}. - -do_case(Meta, {'do', _} = Do, Acc, E) -> - Fun = expand_one(Meta, 'case', 'do', fun head/2), - expand_with_export(Meta, 'case', Fun, Do, Acc, E); -do_case(Meta, {Key, _}, _Acc, E) -> - compile_error(Meta, ?m(E, file), "unexpected keyword ~ts in case", [Key]). - -%% Cond - -'cond'(Meta, [], E) -> - compile_error(Meta, ?m(E, file), "missing do keyword in cond"); -'cond'(Meta, KV, E) when not is_list(KV) -> - compile_error(Meta, ?m(E, file), "invalid arguments for cond"); -'cond'(Meta, KV, E) -> - EE = E#{export_vars := []}, - {EClauses, EVars} = lists:mapfoldl(fun(X, Acc) -> do_cond(Meta, X, Acc, EE) end, [], KV), - {EClauses, elixir_env:mergev(EVars, E)}. - -do_cond(Meta, {'do', _} = Do, Acc, E) -> - Fun = expand_one(Meta, 'cond', 'do', fun elixir_exp:expand_args/2), - expand_with_export(Meta, 'cond', Fun, Do, Acc, E); -do_cond(Meta, {Key, _}, _Acc, E) -> - compile_error(Meta, ?m(E, file), "unexpected keyword ~ts in cond", [Key]). - -%% Receive - -'receive'(Meta, [], E) -> - compile_error(Meta, ?m(E, file), "missing do or after keyword in receive"); -'receive'(Meta, KV, E) when not is_list(KV) -> - compile_error(Meta, ?m(E, file), "invalid arguments for receive"); -'receive'(Meta, KV, E) -> - EE = E#{export_vars := []}, - {EClauses, EVars} = lists:mapfoldl(fun(X, Acc) -> do_receive(Meta, X, Acc, EE) end, [], KV), - {EClauses, elixir_env:mergev(EVars, E)}. - -do_receive(_Meta, {'do', nil} = Do, Acc, _E) -> - {Do, Acc}; -do_receive(Meta, {'do', _} = Do, Acc, E) -> - Fun = expand_one(Meta, 'receive', 'do', fun head/2), - expand_with_export(Meta, 'receive', Fun, Do, Acc, E); -do_receive(Meta, {'after', [_]} = After, Acc, E) -> - Fun = expand_one(Meta, 'receive', 'after', fun elixir_exp:expand_args/2), - expand_with_export(Meta, 'receive', Fun, After, Acc, E); -do_receive(Meta, {'after', _}, _Acc, E) -> - compile_error(Meta, ?m(E, file), "expected a single -> clause for after in receive"); -do_receive(Meta, {Key, _}, _Acc, E) -> - compile_error(Meta, ?m(E, file), "unexpected keyword ~ts in receive", [Key]). - -%% Try - -'try'(Meta, [], E) -> - compile_error(Meta, ?m(E, file), "missing do keywords in try"); -'try'(Meta, KV, E) when not is_list(KV) -> - elixir_errors:compile_error(Meta, ?m(E, file), "invalid arguments for try"); -'try'(Meta, KV, E) -> - {lists:map(fun(X) -> do_try(Meta, X, E) end, KV), E}. - -do_try(_Meta, {'do', Expr}, E) -> - {EExpr, _} = elixir_exp:expand(Expr, E), - {'do', EExpr}; -do_try(_Meta, {'after', Expr}, E) -> - {EExpr, _} = elixir_exp:expand(Expr, E), - {'after', EExpr}; -do_try(Meta, {'else', _} = Else, E) -> - Fun = expand_one(Meta, 'try', 'else', fun head/2), - expand_without_export(Meta, 'try', Fun, Else, E); -do_try(Meta, {'catch', _} = Catch, E) -> - expand_without_export(Meta, 'try', fun expand_catch/3, Catch, E); -do_try(Meta, {'rescue', _} = Rescue, E) -> - expand_without_export(Meta, 'try', fun expand_rescue/3, Rescue, E); -do_try(Meta, {Key, _}, E) -> - compile_error(Meta, ?m(E, file), "unexpected keyword ~ts in try", [Key]). - -expand_catch(_Meta, [_] = Args, E) -> - head(Args, E); -expand_catch(_Meta, [_, _] = Args, E) -> - head(Args, E); -expand_catch(Meta, _, E) -> - compile_error(Meta, ?m(E, file), "expected one or two args for catch clauses (->) in try"). - -expand_rescue(Meta, [Arg], E) -> - case expand_rescue(Arg, E) of - {EArg, EA} -> - {[EArg], EA}; - false -> - compile_error(Meta, ?m(E, file), "invalid rescue clause. The clause should " - "match on an alias, a variable or be in the `var in [alias]` format") - end; -expand_rescue(Meta, _, E) -> - compile_error(Meta, ?m(E, file), "expected one arg for rescue clauses (->) in try"). - -%% rescue var => var in _ -expand_rescue({Name, _, Atom} = Var, E) when is_atom(Name), is_atom(Atom) -> - expand_rescue({in, [], [Var, {'_', [], ?m(E, module)}]}, E); - -%% rescue var in [Exprs] -expand_rescue({in, Meta, [Left, Right]}, E) -> - {ELeft, EL} = match(fun elixir_exp:expand/2, Left, E), - {ERight, ER} = elixir_exp:expand(Right, EL), - - case ELeft of - {Name, _, Atom} when is_atom(Name), is_atom(Atom) -> - case normalize_rescue(ERight) of - false -> false; - Other -> {{in, Meta, [ELeft, Other]}, ER} - end; - _ -> - false - end; - -%% rescue Error => _ in [Error] -expand_rescue(Arg, E) -> - expand_rescue({in, [], [{'_', [], ?m(E, module)}, Arg]}, E). - -normalize_rescue({'_', _, Atom} = N) when is_atom(Atom) -> N; -normalize_rescue(Atom) when is_atom(Atom) -> [Atom]; -normalize_rescue(Other) -> - is_list(Other) andalso lists:all(fun is_atom/1, Other) andalso Other. - -%% Expansion helpers - -%% Returns a function that expands arguments -%% considering we have at maximum one entry. -expand_one(Meta, Kind, Key, Fun) -> - fun - ([_] = Args, E) -> - Fun(Args, E); - (_, E) -> - compile_error(Meta, ?m(E, file), - "expected one arg for ~ts clauses (->) in ~ts", [Key, Kind]) - end. - -%% Expands all -> pairs in a given key keeping the overall vars. -expand_with_export(Meta, Kind, Fun, {Key, Clauses}, Acc, E) when is_list(Clauses) -> - EFun = - case lists:keyfind(export_head, 1, Meta) of - {export_head, true} -> - Fun; - _ -> - fun(Args, #{export_vars := ExportVars} = EE) -> - {FArgs, FE} = Fun(Args, EE), - {FArgs, FE#{export_vars := ExportVars}} - end - end, - Transformer = fun(Clause, Vars) -> - {EClause, EC} = clause(Meta, Kind, EFun, Clause, E), - {EClause, elixir_env:merge_vars(Vars, ?m(EC, export_vars))} - end, - {EClauses, EVars} = lists:mapfoldl(Transformer, Acc, Clauses), - {{Key, EClauses}, EVars}; -expand_with_export(Meta, Kind, _Fun, {Key, _}, _Acc, E) -> - compile_error(Meta, ?m(E, file), "expected -> clauses for ~ts in ~ts", [Key, Kind]). - -%% Expands all -> pairs in a given key but do not keep the overall vars. -expand_without_export(Meta, Kind, Fun, {Key, Clauses}, E) when is_list(Clauses) -> - Transformer = fun(Clause) -> - {EClause, _} = clause(Meta, Kind, Fun, Clause, E), - EClause - end, - {Key, lists:map(Transformer, Clauses)}; -expand_without_export(Meta, Kind, _Fun, {Key, _}, E) -> - compile_error(Meta, ?m(E, file), "expected -> clauses for ~ts in ~ts", [Key, Kind]). diff --git a/lib/elixir/src/elixir_expand.erl b/lib/elixir/src/elixir_expand.erl new file mode 100644 index 00000000000..8373124cd64 --- /dev/null +++ b/lib/elixir/src/elixir_expand.erl @@ -0,0 +1,921 @@ +-module(elixir_expand). +-export([expand/2, expand_args/2, expand_arg/2, format_error/1]). +-import(elixir_errors, [form_error/4]). +-include("elixir.hrl"). + +%% = + +expand({'=', Meta, [Left, Right]}, E) -> + assert_no_guard_scope(Meta, '=', E), + {ERight, ER} = expand(Right, E), + {ELeft, EL} = elixir_clauses:match(fun expand/2, Left, E), + {{'=', Meta, [ELeft, ERight]}, elixir_env:mergev(EL, ER)}; + +%% Literal operators + +expand({'{}', Meta, Args}, E) -> + {EArgs, EA} = expand_args(Args, E), + {{'{}', Meta, EArgs}, EA}; + +expand({'%{}', Meta, Args}, E) -> + elixir_map:expand_map(Meta, Args, E); + +expand({'%', Meta, [Left, Right]}, E) -> + elixir_map:expand_struct(Meta, Left, Right, E); + +expand({'<<>>', Meta, Args}, E) -> + elixir_bitstring:expand(Meta, Args, E, false); + +expand({'->', Meta, _Args}, E) -> + form_error(Meta, ?key(E, file), ?MODULE, unhandled_arrow_op); + +%% __block__ + +expand({'__block__', _Meta, []}, E) -> + {nil, E}; +expand({'__block__', _Meta, [Arg]}, E) -> + expand(Arg, E); +expand({'__block__', Meta, Args}, E) when is_list(Args) -> + {EArgs, EA} = expand_block(Args, [], Meta, E), + {{'__block__', Meta, EArgs}, EA}; + +%% __aliases__ + +expand({'__aliases__', _, _} = Alias, E) -> + expand_aliases(Alias, E, true); + +%% alias + +expand({Kind, Meta, [{{'.', _, [Base, '{}']}, _, Refs} | Rest]}, E) + when Kind == alias; Kind == require; Kind == import -> + case Rest of + [] -> + expand_multi_alias_call(Kind, Meta, Base, Refs, [], E); + [Opts] -> + case lists:keymember(as, 1, Opts) of + true -> + form_error(Meta, ?key(E, file), ?MODULE, as_in_multi_alias_call); + false -> + expand_multi_alias_call(Kind, Meta, Base, Refs, Opts, E) + end + end; +expand({alias, Meta, [Ref]}, E) -> + expand({alias, Meta, [Ref, []]}, E); +expand({alias, Meta, [Ref, Opts]}, E) -> + assert_no_match_or_guard_scope(Meta, alias, E), + {ERef, ER} = expand_without_aliases_report(Ref, E), + {EOpts, ET} = expand_opts(Meta, alias, [as, warn], no_alias_opts(Opts), ER), + + if + is_atom(ERef) -> + {ERef, expand_alias(Meta, true, ERef, EOpts, ET)}; + true -> + form_error(Meta, ?key(E, file), ?MODULE, {expected_compile_time_module, alias, Ref}) + end; + +expand({require, Meta, [Ref]}, E) -> + expand({require, Meta, [Ref, []]}, E); +expand({require, Meta, [Ref, Opts]}, E) -> + assert_no_match_or_guard_scope(Meta, require, E), + + {ERef, ER} = expand_without_aliases_report(Ref, E), + {EOpts, ET} = expand_opts(Meta, require, [as, warn], no_alias_opts(Opts), ER), + + if + is_atom(ERef) -> + elixir_aliases:ensure_loaded(Meta, ERef, ET), + {ERef, expand_require(Meta, ERef, EOpts, ET)}; + true -> + form_error(Meta, ?key(E, file), ?MODULE, {expected_compile_time_module, require, Ref}) + end; + +expand({import, Meta, [Left]}, E) -> + expand({import, Meta, [Left, []]}, E); + +expand({import, Meta, [Ref, Opts]}, E) -> + assert_no_match_or_guard_scope(Meta, import, E), + {ERef, ER} = expand_without_aliases_report(Ref, E), + {EOpts, ET} = expand_opts(Meta, import, [only, except, warn], Opts, ER), + + if + is_atom(ERef) -> + elixir_aliases:ensure_loaded(Meta, ERef, ET), + {Functions, Macros} = elixir_import:import(Meta, ERef, EOpts, ET), + {ERef, expand_require(Meta, ERef, EOpts, ET#{functions := Functions, macros := Macros})}; + true -> + form_error(Meta, ?key(E, file), ?MODULE, {expected_compile_time_module, import, Ref}) + end; + +%% Compilation environment macros + +expand({'__MODULE__', _, Atom}, E) when is_atom(Atom) -> + {?key(E, module), E}; +expand({'__DIR__', _, Atom}, E) when is_atom(Atom) -> + {filename:dirname(?key(E, file)), E}; +expand({'__CALLER__', _, Atom} = Caller, E) when is_atom(Atom) -> + {Caller, E}; +expand({'__ENV__', Meta, Atom}, E) when is_atom(Atom) -> + Env = + case E of + #{function := nil} -> E; + _ -> maps:put(lexical_tracker, nil, E) + end, + LinifiedEnv = elixir_env:linify({?line(Meta), Env}), + {{'%{}', [], maps:to_list(LinifiedEnv)}, E}; +expand({{'.', DotMeta, [{'__ENV__', Meta, Atom}, Field]}, CallMeta, []}, E) when is_atom(Atom), is_atom(Field) -> + Env = elixir_env:linify({?line(Meta), E}), + case maps:is_key(Field, Env) of + true -> {maps:get(Field, Env), E}; + false -> {{{'.', DotMeta, [{'%{}', [], maps:to_list(Env)}, Field]}, CallMeta, []}, E} + end; + +%% Quote + +expand({Unquote, Meta, [_]}, E) when Unquote == unquote; Unquote == unquote_splicing -> + form_error(Meta, ?key(E, file), ?MODULE, {unquote_outside_quote, Unquote}); + +expand({quote, Meta, [Opts]}, E) when is_list(Opts) -> + case lists:keyfind(do, 1, Opts) of + {do, Do} -> + expand({quote, Meta, [lists:keydelete(do, 1, Opts), [{do, Do}]]}, E); + false -> + form_error(Meta, ?key(E, file), ?MODULE, {missing_option, 'quote', [do]}) + end; + +expand({quote, Meta, [_]}, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_args, 'quote'}); + +expand({quote, Meta, [Opts, Do]}, E) when is_list(Do) -> + Exprs = + case lists:keyfind(do, 1, Do) of + {do, Expr} -> Expr; + false -> form_error(Meta, ?key(E, file), ?MODULE, {missing_option, 'quote', [do]}) + end, + + ValidOpts = [context, location, line, file, unquote, bind_quoted, generated], + {EOpts, ET} = expand_opts(Meta, quote, ValidOpts, Opts, E), + + Context = case lists:keyfind(context, 1, EOpts) of + {context, Ctx} when is_atom(Ctx) and (Ctx /= nil) -> + Ctx; + {context, Ctx} -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_context_opt_for_quote, Ctx}); + false -> + case ?key(E, module) of + nil -> 'Elixir'; + Mod -> Mod + end + end, + + {File, Line} = case lists:keyfind(location, 1, EOpts) of + {location, keep} -> + {elixir_utils:relative_to_cwd(?key(E, file)), false}; + false -> + { case lists:keyfind(file, 1, EOpts) of + {file, F} -> F; + false -> nil + end, + + case lists:keyfind(line, 1, EOpts) of + {line, L} -> L; + false -> false + end } + end, + + {Binding, DefaultUnquote} = case lists:keyfind(bind_quoted, 1, EOpts) of + {bind_quoted, BQ} -> {BQ, false}; + false -> {nil, true} + end, + + Unquote = case lists:keyfind(unquote, 1, EOpts) of + {unquote, U} when is_boolean(U) -> U; + false -> DefaultUnquote + end, + + Generated = lists:keyfind(generated, 1, EOpts) == {generated, true}, + + %% TODO: Do not allow negative line numbers once Erlang 18 + %% support is dropped as it only allows negative line + %% annotations alongside the generated check. + Q = #elixir_quote{line=Line, file=File, unquote=Unquote, + context=Context, generated=Generated}, + + {Quoted, _Q} = elixir_quote:quote(Exprs, Binding, Q, ET), + expand(Quoted, ET); + +expand({quote, Meta, [_, _]}, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_args, 'quote'}); + +%% Functions + +expand({'&', Meta, [Arg]}, E) -> + assert_no_match_or_guard_scope(Meta, '&', E), + case elixir_fn:capture(Meta, Arg, E) of + {remote, Remote, Fun, Arity} -> + is_atom(Remote) andalso + elixir_lexical:record_remote(Remote, Fun, Arity, ?key(E, function), ?line(Meta), ?key(E, lexical_tracker)), + {{'&', Meta, [{'/', [], [{{'.', [], [Remote, Fun]}, [], []}, Arity]}]}, E}; + {local, Fun, Arity} -> + {{'&', Meta, [{'/', [], [{Fun, [], nil}, Arity]}]}, E}; + {expand, Expr, EE} -> + expand(Expr, EE) + end; + +expand({fn, Meta, Pairs}, E) -> + assert_no_match_or_guard_scope(Meta, fn, E), + elixir_fn:expand(Meta, Pairs, E); + +%% Case/Receive/Try + +expand({'cond', Meta, [Opts]}, E) -> + assert_no_match_or_guard_scope(Meta, 'cond', E), + assert_no_underscore_clause_in_cond(Opts, E), + {EClauses, EC} = elixir_clauses:'cond'(Meta, Opts, E), + {{'cond', Meta, [EClauses]}, EC}; + +expand({'case', Meta, [Expr, Options]}, Env) -> + ShouldExportVars = proplists:get_value(export_vars, Meta, true), + expand_case(ShouldExportVars, Meta, Expr, Options, Env); + +expand({'receive', Meta, [Opts]}, E) -> + assert_no_match_or_guard_scope(Meta, 'receive', E), + {EClauses, EC} = elixir_clauses:'receive'(Meta, Opts, E), + {{'receive', Meta, [EClauses]}, EC}; + +expand({'try', Meta, [Opts]}, E) -> + assert_no_match_or_guard_scope(Meta, 'try', E), + {EClauses, EC} = elixir_clauses:'try'(Meta, Opts, E), + {{'try', Meta, [EClauses]}, EC}; + +%% Comprehensions + +expand({for, Meta, [_ | _] = Args}, E) -> + {Cases, Block} = + case elixir_utils:split_last(Args) of + {OuterCases, OuterOpts} when is_list(OuterOpts) -> + case elixir_utils:split_last(OuterCases) of + {InnerCases, InnerOpts} when is_list(InnerOpts) -> + {InnerCases, InnerOpts ++ OuterOpts}; + _ -> + {OuterCases, OuterOpts} + end; + _ -> + {Args, []} + end, + + validate_opts(Meta, for, [do, into], Block, E), + {Expr, Opts} = + case lists:keytake(do, 1, Block) of + {value, {do, Do}, DoOpts} -> + {Do, DoOpts}; + false -> + form_error(Meta, ?key(E, file), ?MODULE, {missing_option, for, [do]}) + end, + + {EOpts, EO} = expand(Opts, E), + {ECases, EC} = lists:mapfoldl(fun expand_for/2, EO, Cases), + {EExpr, _} = expand(Expr, EC), + assert_generator_start(Meta, ECases, E), + {{for, Meta, ECases ++ [[{do, EExpr} | EOpts]]}, E}; + +%% With + +expand({with, Meta, [_ | _] = Args}, E) -> + elixir_with:expand(Meta, Args, E); + +%% Super + +expand({super, Meta, Args}, #{file := File} = E) when is_list(Args) -> + Module = assert_module_scope(Meta, super, E), + Function = assert_function_scope(Meta, super, E), + {_, Arity} = Function, + + case length(Args) of + Arity -> + {OName, OArity} = elixir_overridable:super(Meta, File, Module, Function), + {EArgs, EA} = expand_args(Args, E), + OArgs = + if + OArity > Arity -> [{'__CALLER__', [], nil} | EArgs]; + true -> EArgs + end, + {{OName, Meta, OArgs}, EA}; + _ -> + form_error(Meta, File, ?MODULE, wrong_number_of_args_for_super) + end; + +%% Vars + +expand({'^', Meta, [Arg]}, #{context := match, prematch_vars := nil} = E) -> + form_error(Meta, ?key(E, file), ?MODULE, {pin_inside_definition, Arg}); + +expand({'^', Meta, [Arg]}, #{context := match, prematch_vars := PrematchVars} = E) -> + case expand(Arg, E) of + {{VarName, VarMeta, Kind} = Var, EA} when is_atom(VarName), is_atom(Kind) -> + %% If the variable was defined, then we return the expanded ^, otherwise + %% we raise. We cannot use the expanded env because it would contain the + %% variable. + case lists:member({VarName, var_kind(VarMeta, Kind)}, PrematchVars) of + true -> + {{'^', Meta, [Var]}, EA}; + false -> + form_error(Meta, ?key(EA, file), ?MODULE, {unbound_variable_pin, VarName}) + end; + _ -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_arg_for_pin, Arg}) + end; +expand({'^', Meta, [Arg]}, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {pin_outside_of_match, Arg}); + +expand({'_', _Meta, Kind} = Var, #{context := match} = E) when is_atom(Kind) -> + {Var, E}; +expand({'_', Meta, Kind}, E) when is_atom(Kind) -> + form_error(Meta, ?key(E, file), ?MODULE, unbound_underscore); + +expand({Name, Meta, Kind} = Var, #{context := match, export_vars := Export} = E) when is_atom(Name), is_atom(Kind) -> + Pair = {Name, var_kind(Meta, Kind)}, + NewVars = ordsets:add_element(Pair, ?key(E, vars)), + NewExport = case (Export /= nil) of + true -> ordsets:add_element(Pair, Export); + false -> Export + end, + {Var, E#{vars := NewVars, export_vars := NewExport}}; +expand({Name, Meta, Kind} = Var, #{vars := Vars} = E) when is_atom(Name), is_atom(Kind) -> + case lists:member({Name, var_kind(Meta, Kind)}, Vars) of + true -> + {Var, E}; + false -> + case lists:keyfind(var, 1, Meta) of + {var, true} -> + form_error(Meta, ?key(E, file), ?MODULE, {undefined_var, Name, Kind}); + _ -> + Message = + io_lib:format("variable \"~ts\" does not exist and is being expanded to \"~ts()\"," + " please use parentheses to remove the ambiguity or change the variable name", [Name, Name]), + elixir_errors:warn(?line(Meta), ?key(E, file), Message), + expand({Name, Meta, []}, E) + end + end; + +%% Local calls + +expand({Atom, Meta, Args}, E) when is_atom(Atom), is_list(Meta), is_list(Args) -> + assert_no_ambiguous_op(Atom, Meta, Args, E), + + elixir_dispatch:dispatch_import(Meta, Atom, Args, E, fun() -> + expand_local(Meta, Atom, Args, E) + end); + +%% Remote calls + +expand({{'.', Meta, [erlang, 'orelse']}, _, [Left, Right]}, #{context := nil} = Env) -> + Generated = ?generated(Meta), + TrueClause = {'->', Generated, [[true], true]}, + FalseClause = {'->', Generated, [[false], Right]}, + expand_boolean_check('or', Left, TrueClause, FalseClause, Meta, Env); + +expand({{'.', Meta, [erlang, 'andalso']}, _, [Left, Right]}, #{context := nil} = Env) -> + Generated = ?generated(Meta), + TrueClause = {'->', Generated, [[true], Right]}, + FalseClause = {'->', Generated, [[false], false]}, + expand_boolean_check('and', Left, TrueClause, FalseClause, Meta, Env); + +expand({{'.', DotMeta, [Left, Right]}, Meta, Args}, E) + when (is_tuple(Left) orelse is_atom(Left)), is_atom(Right), is_list(Meta), is_list(Args) -> + {ELeft, EL} = expand(Left, E), + + elixir_dispatch:dispatch_require(Meta, ELeft, Right, Args, EL, fun(AR, AF, AA) -> + expand_remote(AR, DotMeta, AF, Meta, AA, E, EL) + end); + +%% Anonymous calls + +expand({{'.', DotMeta, [Expr]}, Meta, Args}, E) when is_list(Args) -> + {EExpr, EE} = expand(Expr, E), + if + is_atom(EExpr) -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_function_call, EExpr}); + true -> + {EArgs, EA} = expand_args(Args, elixir_env:mergea(E, EE)), + {{{'.', DotMeta, [EExpr]}, Meta, EArgs}, elixir_env:mergev(EE, EA)} + end; + +%% Invalid calls + +expand({_, Meta, Args} = Invalid, E) when is_list(Meta) and is_list(Args) -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_call, Invalid}); + +expand({_, _, _} = Tuple, E) -> + form_error([{line, 0}], ?key(E, file), ?MODULE, {invalid_quoted_expr, Tuple}); + +%% Literals + +expand({Left, Right}, E) -> + {[ELeft, ERight], EE} = expand_args([Left, Right], E), + {{ELeft, ERight}, EE}; + +expand(List, #{context := match} = E) when is_list(List) -> + expand_list(List, fun expand/2, E, []); + +expand(List, E) when is_list(List) -> + {EArgs, {EC, EV}} = expand_list(List, fun expand_arg/2, {E, E}, []), + {EArgs, elixir_env:mergea(EV, EC)}; + +expand(Function, E) when is_function(Function) -> + case (erlang:fun_info(Function, type) == {type, external}) andalso + (erlang:fun_info(Function, env) == {env, []}) of + true -> + {Function, E}; + false -> + form_error([{line, 0}], ?key(E, file), ?MODULE, {invalid_quoted_expr, Function}) + end; + + +expand(PidOrRef, E) when is_pid(PidOrRef); is_reference(PidOrRef) -> + case ?key(E, function) of + nil -> + PidOrRef; + Function -> + %% TODO: Make me an error on 2.0 + elixir_errors:form_warn([], ?key(E, file), ?MODULE, + {invalid_pid_or_ref_in_function, PidOrRef, Function}) + end; + +expand(Other, E) when is_number(Other); is_atom(Other); is_binary(Other) -> + {Other, E}; + +expand(Other, E) -> + form_error([{line, 0}], ?key(E, file), ?MODULE, {invalid_quoted_expr, Other}). + +%% Helpers + +expand_boolean_check(Op, Expr, TrueClause, FalseClause, Meta, Env) -> + {EExpr, EnvExpr} = expand(Expr, Env), + Clauses = + case elixir_utils:returns_boolean(EExpr) of + true -> + [TrueClause, FalseClause]; + false -> + Other = {other, Meta, ?var_context}, + OtherExpr = {{'.', Meta, [erlang, error]}, Meta, [{'{}', [], [badbool, Op, Other]}]}, + [TrueClause, FalseClause, {'->', ?generated(Meta), [[Other], OtherExpr]}] + end, + {EClauses, EnvCase} = elixir_clauses:'case'(Meta, [{do, Clauses}], EnvExpr), + {{'case', Meta, [EExpr, EClauses]}, EnvCase}. + +expand_multi_alias_call(Kind, Meta, Base, Refs, Opts, E) -> + {BaseRef, EB} = expand_without_aliases_report(Base, E), + Fun = fun + ({'__aliases__', _, Ref}, ER) -> + expand({Kind, Meta, [elixir_aliases:concat([BaseRef | Ref]), Opts]}, ER); + (Ref, ER) when is_atom(Ref) -> + expand({Kind, Meta, [elixir_aliases:concat([BaseRef, Ref]), Opts]}, ER); + (Other, _ER) -> + form_error(Meta, ?key(E, file), ?MODULE, {expected_compile_time_module, Kind, Other}) + end, + lists:mapfoldl(Fun, EB, Refs). + +expand_list([{'|', Meta, [_, _] = Args}], Fun, Acc, List) -> + {EArgs, EAcc} = lists:mapfoldl(Fun, Acc, Args), + expand_list([], Fun, EAcc, [{'|', Meta, EArgs} | List]); +expand_list([H | T], Fun, Acc, List) -> + {EArg, EAcc} = Fun(H, Acc), + expand_list(T, Fun, EAcc, [EArg | List]); +expand_list([], _Fun, Acc, List) -> + {lists:reverse(List), Acc}. + +expand_block([], Acc, _Meta, E) -> + {lists:reverse(Acc), E}; +expand_block([H], Acc, Meta, E) -> + {EH, EE} = expand(H, E), + expand_block([], [EH | Acc], Meta, EE); +expand_block([H | T], Acc, Meta, E) -> + {EH, EE} = expand(H, E), + + %% Notice checks rely on the code BEFORE expansion + %% instead of relying on Erlang checks. + %% + %% That's because expansion may generate useless + %% terms on their own (think compile time removed + %% logger calls) and we don't want to catch those. + %% + %% Or, similarly, the work is all in the expansion + %% (for example, to register something) and it is + %% simply returning something as replacement. + case is_useless_building(H, EH, Meta) of + {UselessMeta, UselessTerm} -> + elixir_errors:form_warn(UselessMeta, ?key(E, file), ?MODULE, UselessTerm); + false -> + ok + end, + + expand_block(T, [EH | Acc], Meta, EE). + +%% Notice we don't handle atoms on purpose. They are common +%% when unquoting AST and it is unlikely that we would catch +%% bugs as we don't do binary operations on them like in +%% strings or numbers. +is_useless_building(H, _, Meta) when is_binary(H); is_number(H) -> + {Meta, {useless_literal, H}}; +is_useless_building({'@', Meta, [{Var, _, Ctx}]}, _, _) when is_atom(Ctx); Ctx == [] -> + {Meta, {useless_attr, Var}}; +is_useless_building({Var, Meta, Ctx}, {Var, _, Ctx}, _) when is_atom(Ctx) -> + {Meta, {useless_var, Var}}; +is_useless_building(_, _, _) -> + false. + +%% Variables in arguments are not propagated from one +%% argument to the other. For instance: +%% +%% x = 1 +%% foo(x = x + 2, x) +%% x +%% +%% Should be the same as: +%% +%% foo(3, 1) +%% 3 +%% +%% However, lexical information is. +expand_arg(Arg, Acc) when is_number(Arg); is_atom(Arg); is_binary(Arg); is_pid(Arg) -> + {Arg, Acc}; +expand_arg(Arg, {Acc1, Acc2}) -> + {EArg, EAcc} = expand(Arg, Acc1), + {EArg, {elixir_env:mergea(Acc1, EAcc), elixir_env:mergev(Acc2, EAcc)}}. + +expand_args([Arg], E) -> + {EArg, EE} = expand(Arg, E), + {[EArg], EE}; +expand_args(Args, #{context := match} = E) -> + lists:mapfoldl(fun expand/2, E, Args); +expand_args(Args, E) -> + {EArgs, {EC, EV}} = lists:mapfoldl(fun expand_arg/2, {E, E}, Args), + {EArgs, elixir_env:mergea(EV, EC)}. + +%% Match/var helpers + +var_kind(Meta, Kind) -> + case lists:keyfind(counter, 1, Meta) of + {counter, Counter} -> Counter; + false -> Kind + end. + +%% Case + +expand_case(true, Meta, Expr, Opts, E) -> + assert_no_match_or_guard_scope(Meta, 'case', E), + {EExpr, EE} = expand(Expr, E), + {EOpts, EO} = elixir_clauses:'case'(Meta, Opts, EE), + ROpts = + case proplists:get_value(optimize_boolean, Meta, false) andalso + elixir_utils:returns_boolean(EExpr) of + true -> rewrite_case_clauses(EOpts); + false -> EOpts + end, + {{'case', Meta, [EExpr, ROpts]}, EO}; +expand_case(false, Meta, Expr, Opts, E) -> + {Case, _} = expand_case(true, Meta, Expr, Opts, E), + {Case, E}. + +rewrite_case_clauses([{do, [ + {'->', FalseMeta, [ + [{'when', _, [Var, {{'.', _, [erlang, 'or']}, _, [ + {{'.', _, [erlang, '=:=']}, _, [Var, nil]}, + {{'.', _, [erlang, '=:=']}, _, [Var, false]} + ]}]}], + FalseExpr + ]}, + {'->', TrueMeta, [ + [{'_', _, _}], + TrueExpr + ]} +]}]) -> + [{do, [ + {'->', FalseMeta, [[false], FalseExpr]}, + {'->', TrueMeta, [[true], TrueExpr]} + ]}]; +rewrite_case_clauses(Other) -> + Other. + +%% Locals + +assert_no_ambiguous_op(Name, Meta, [Arg], E) -> + case lists:keyfind(ambiguous_op, 1, Meta) of + {ambiguous_op, Kind} -> + case lists:member({Name, Kind}, ?key(E, vars)) of + true -> + form_error(Meta, ?key(E, file), ?MODULE, {op_ambiguity, Name, Arg}); + false -> + ok + end; + _ -> + ok + end; +assert_no_ambiguous_op(_Atom, _Meta, _Args, _E) -> + ok. + +expand_local(Meta, Name, Args, #{function := nil} = E) -> + form_error(Meta, ?key(E, file), ?MODULE, {undefined_function, Name, Args}); +expand_local(Meta, Name, Args, #{context := Context} = E) when Context == match; Context == guard -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_local_invocation, Context, {Name, Meta, Args}}); +expand_local(Meta, Name, Args, #{module := Module, function := Function} = E) -> + elixir_locals:record_local({Name, length(Args)}, Module, Function), + {EArgs, EA} = expand_args(Args, E), + {{Name, Meta, EArgs}, EA}. + +%% Remote + +expand_remote(Receiver, DotMeta, Right, Meta, Args, #{context := Context} = E, EL) -> + Arity = length(Args), + is_atom(Receiver) andalso + elixir_lexical:record_remote(Receiver, Right, Arity, + ?key(E, function), ?line(Meta), ?key(E, lexical_tracker)), + {EArgs, EA} = expand_args(Args, E), + Rewritten = elixir_rewrite:rewrite(Receiver, DotMeta, Right, Meta, EArgs), + case allowed_in_context(Rewritten, Arity, Context) of + true -> + {Rewritten, elixir_env:mergev(EL, EA)}; + false -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_remote_invocation, Context, Receiver, Right, Arity}) + end. + +allowed_in_context({{'.', _, [erlang, Right]}, _, _}, Arity, match) -> + elixir_utils:match_op(Right, Arity); +allowed_in_context(_, _Arity, match) -> + false; +allowed_in_context({{'.', _, [erlang, Right]}, _, _}, Arity, guard) -> + erl_internal:guard_bif(Right, Arity) orelse elixir_utils:guard_op(Right, Arity); +allowed_in_context(_, _Arity, guard) -> + false; +allowed_in_context(_, _, _) -> + true. + +%% Lexical helpers + +expand_opts(Meta, Kind, Allowed, Opts, E) -> + {EOpts, EE} = expand(Opts, E), + validate_opts(Meta, Kind, Allowed, EOpts, EE), + {EOpts, EE}. + +validate_opts(Meta, Kind, Allowed, Opts, E) when is_list(Opts) -> + [begin + form_error(Meta, ?key(E, file), ?MODULE, {unsupported_option, Kind, Key}) + end || {Key, _} <- Opts, not lists:member(Key, Allowed)]; + +validate_opts(Meta, Kind, _Allowed, Opts, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {options_are_not_keyword, Kind, Opts}). + +no_alias_opts(Opts) when is_list(Opts) -> + case lists:keyfind(as, 1, Opts) of + {as, As} -> lists:keystore(as, 1, Opts, {as, no_alias_expansion(As)}); + false -> Opts + end; +no_alias_opts(Opts) -> Opts. + +no_alias_expansion({'__aliases__', _, [H | T]}) when is_atom(H) -> + elixir_aliases:concat([H | T]); +no_alias_expansion(Other) -> + Other. + +expand_require(Meta, Ref, Opts, E) -> + %% We always record requires when they are defined + %% as they expect the reference at compile time. + elixir_lexical:record_remote(Ref, nil, ?key(E, lexical_tracker)), + RE = E#{requires := ordsets:add_element(Ref, ?key(E, requires))}, + expand_alias(Meta, false, Ref, Opts, RE). + +expand_alias(Meta, IncludeByDefault, Ref, Opts, #{context_modules := Context} = E) -> + New = expand_as(lists:keyfind(as, 1, Opts), Meta, IncludeByDefault, Ref, E), + + %% Add the alias to context_modules if defined is set. + %% This is used by defmodule in order to store the defined + %% module in context modules. + NewContext = + case lists:keyfind(defined, 1, Meta) of + {defined, Mod} when is_atom(Mod) -> [Mod | Context]; + false -> Context + end, + + {Aliases, MacroAliases} = elixir_aliases:store(Meta, New, Ref, Opts, ?key(E, aliases), + ?key(E, macro_aliases), ?key(E, lexical_tracker)), + + E#{aliases := Aliases, macro_aliases := MacroAliases, context_modules := NewContext}. + +expand_as({as, nil}, _Meta, _IncludeByDefault, Ref, _E) -> + Ref; +expand_as({as, Atom}, Meta, _IncludeByDefault, _Ref, E) when is_atom(Atom), not is_boolean(Atom) -> + case atom_to_list(Atom) of + "Elixir." ++ Rest -> + case string:tokens(Rest, ".") of + [Rest] -> + Atom; + _ -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_alias_for_as, nested_alias, Atom}) + end; + _ -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_alias_for_as, not_alias, Atom}) + end; +expand_as(false, _Meta, IncludeByDefault, Ref, _E) -> + if IncludeByDefault -> elixir_aliases:last(Ref); + true -> Ref + end; +expand_as({as, Other}, Meta, _IncludeByDefault, _Ref, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_alias_for_as, not_alias, Other}). + +%% Aliases + +expand_without_aliases_report({'__aliases__', _, _} = Alias, E) -> + expand_aliases(Alias, E, false); +expand_without_aliases_report(Other, E) -> + expand(Other, E). + +expand_aliases({'__aliases__', Meta, _} = Alias, E, Report) -> + case elixir_aliases:expand(Alias, ?key(E, aliases), ?key(E, macro_aliases), ?key(E, lexical_tracker)) of + Receiver when is_atom(Receiver) -> + Report andalso + elixir_lexical:record_remote(Receiver, ?key(E, function), ?key(E, lexical_tracker)), + {Receiver, E}; + Aliases -> + {EAliases, EA} = expand_args(Aliases, E), + + case lists:all(fun is_atom/1, EAliases) of + true -> + Receiver = elixir_aliases:concat(EAliases), + Report andalso + elixir_lexical:record_remote(Receiver, ?key(E, function), ?key(E, lexical_tracker)), + {Receiver, EA}; + false -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_alias, Alias}) + end + end. + +%% Comprehensions + +expand_for({'<-', Meta, [Left, Right]}, E) -> + {ERight, ER} = expand(Right, E), + {[ELeft], EL} = elixir_clauses:head([Left], E), + {{'<-', Meta, [ELeft, ERight]}, elixir_env:mergev(EL, ER)}; +expand_for({'<<>>', Meta, Args} = X, E) when is_list(Args) -> + case elixir_utils:split_last(Args) of + {LeftStart, {'<-', OpMeta, [LeftEnd, Right]}} -> + {ERight, ER} = expand(Right, E), + {ELeft, EL} = elixir_clauses:match(fun(BArg, BE) -> + elixir_bitstring:expand(Meta, BArg, BE, true) + end, LeftStart ++ [LeftEnd], E), + {{'<<>>', [], [{'<-', OpMeta, [ELeft, ERight]}]}, elixir_env:mergev(EL, ER)}; + _ -> + expand(X, E) + end; +expand_for(X, E) -> + expand(X, E). + +assert_generator_start(_, [{'<-', _, [_, _]} | _], _) -> + ok; +assert_generator_start(_, [{'<<>>', _, [{'<-', _, [_, _]}]} | _], _) -> + ok; +assert_generator_start(Meta, _, E) -> + elixir_errors:form_error(Meta, ?key(E, file), ?MODULE, for_generator_start). + +%% Assertions + +assert_module_scope(Meta, Kind, #{module := nil, file := File}) -> + form_error(Meta, File, ?MODULE, {invalid_expr_in_scope, "module", Kind}); +assert_module_scope(_Meta, _Kind, #{module:=Module}) -> Module. + +assert_function_scope(Meta, Kind, #{function := nil, file := File}) -> + form_error(Meta, File, ?MODULE, {invalid_expr_in_scope, "function", Kind}); +assert_function_scope(_Meta, _Kind, #{function := Function}) -> Function. + +assert_no_match_or_guard_scope(Meta, Kind, E) -> + assert_no_match_scope(Meta, Kind, E), + assert_no_guard_scope(Meta, Kind, E). +assert_no_match_scope(Meta, _Kind, #{context := match, file := File}) -> + form_error(Meta, File, ?MODULE, invalid_pattern_in_match); +assert_no_match_scope(_Meta, _Kind, _E) -> []. +assert_no_guard_scope(Meta, _Kind, #{context := guard, file := File}) -> + form_error(Meta, File, ?MODULE, invalid_expr_in_guard); +assert_no_guard_scope(_Meta, _Kind, _E) -> []. + +%% Here we look into the Clauses "optimistically", that is, we don't check for +%% multiple "do"s and similar stuff. After all, the error we're gonna give here +%% is just a friendlier version of the "undefined variable _" error that we +%% would raise if we found a "_ -> ..." clause in a "cond". For this reason, if +%% Clauses has a bad shape, we just do nothing and let future functions catch +%% this. +assert_no_underscore_clause_in_cond([{do, Clauses}], E) when is_list(Clauses) -> + case lists:last(Clauses) of + {'->', Meta, [[{'_', _, Atom}], _]} when is_atom(Atom) -> + form_error(Meta, ?key(E, file), ?MODULE, underscore_in_cond); + _Other -> + ok + end; +assert_no_underscore_clause_in_cond(_Other, _E) -> + ok. + +%% Warnings + +format_error({useless_literal, Term}) -> + io_lib:format("code block contains unused literal ~ts " + "(remove the literal or assign it to _ to avoid warnings)", + ['Elixir.Macro':to_string(Term)]); +format_error({useless_var, Var}) -> + io_lib:format("variable ~ts in code block has no effect as it is never returned " + "(remove the variable or assign it to _ to avoid warnings)", + [Var]); +format_error({useless_attr, Attr}) -> + io_lib:format("module attribute @~ts in code block has no effect as it is never returned " + "(remove the attribute or assign it to _ to avoid warnings)", + [Attr]); + +%% Errors + +format_error({missing_option, Construct, Opts}) when is_list(Opts) -> + StringOpts = lists:map(fun(Opt) -> [$: | atom_to_list(Opt)] end, Opts), + io_lib:format("missing ~ts option in \"~ts\"", [string:join(StringOpts, "/"), Construct]); +format_error({invalid_args, Construct}) -> + io_lib:format("invalid arguments for \"~ts\"", [Construct]); +format_error(for_generator_start) -> + "for comprehensions must start with a generator"; +format_error(unhandled_arrow_op) -> + "unhandled operator ->"; +format_error(as_in_multi_alias_call) -> + ":as option is not supported by multi-alias call"; +format_error({expected_compile_time_module, Kind, GivenTerm}) -> + io_lib:format("invalid argument for ~ts, expected a compile time atom or alias, got: ~ts", + [Kind, 'Elixir.Macro':to_string(GivenTerm)]); +format_error({unquote_outside_quote, Unquote}) -> + %% Unquote can be "unquote" or "unquote_splicing". + io_lib:format("~p called outside quote", [Unquote]); +format_error({invalid_context_opt_for_quote, Context}) -> + io_lib:format("invalid :context for quote, expected non-nil compile time atom or alias, got: ~ts", + ['Elixir.Macro':to_string(Context)]); +format_error(wrong_number_of_args_for_super) -> + "super must be called with the same number of arguments as the current definition"; +format_error({unbound_variable_pin, VarName}) -> + io_lib:format("unbound variable ^~ts", [VarName]); +format_error({invalid_arg_for_pin, Arg}) -> + io_lib:format("invalid argument for unary operator ^, expected an existing variable, got: ^~ts", + ['Elixir.Macro':to_string(Arg)]); +format_error({pin_outside_of_match, Arg}) -> + io_lib:format("cannot use ^~ts outside of match clauses", ['Elixir.Macro':to_string(Arg)]); +format_error({pin_inside_definition, Arg}) -> + io_lib:format("cannot use ^~ts on function/macro definition as there are no previous variables", ['Elixir.Macro':to_string(Arg)]); +format_error(unbound_underscore) -> + "unbound variable _"; +format_error({undefined_var, Name, Kind}) -> + Message = + "expected variable \"~ts\"~ts to expand to an existing variable " + "or be part of a match", + io_lib:format(Message, [Name, elixir_erl_var:context_info(Kind)]); +format_error(underscore_in_cond) -> + "unbound variable _ inside \"cond\". If you want the last clause to always match, " + "you probably meant to use: true ->"; +format_error(invalid_expr_in_guard) -> + "invalid expression in guard"; +format_error(invalid_pattern_in_match) -> + "invalid pattern in match"; +format_error({invalid_expr_in_scope, Scope, Kind}) -> + io_lib:format("cannot invoke ~ts outside ~ts", [Kind, Scope]); +format_error({invalid_alias, Expr}) -> + Message = + "invalid alias: \"~ts\". If you wanted to define an alias, an alias must expand " + "to an atom at compile time but it did not, you may use Module.concat/2 to build " + "it at runtime. If instead you wanted to invoke a function or access a field, " + "wrap the function or field name in double quotes", + io_lib:format(Message, ['Elixir.Macro':to_string(Expr)]); +format_error({op_ambiguity, Name, Arg}) -> + Message = + "\"~ts ~ts\" looks like a function call but there is a variable named \"~ts\", " + "please use explicit parentheses or even spaces", + io_lib:format(Message, [Name, 'Elixir.Macro':to_string(Arg), Name]); +format_error({invalid_alias_for_as, Reason, Value}) -> + ExpectedGot = + case Reason of + not_alias -> "expected an alias, got"; + nested_alias -> "expected a simple alias, got nested alias" + end, + io_lib:format("invalid value for option :as, ~ts: ~ts", + [ExpectedGot, 'Elixir.Macro':to_string(Value)]); +format_error({invalid_function_call, Expr}) -> + io_lib:format("invalid function call :~ts.()", [Expr]); +format_error({invalid_call, Call}) -> + io_lib:format("invalid call ~ts", ['Elixir.Macro':to_string(Call)]); +format_error({invalid_quoted_expr, Expr}) -> + io_lib:format("invalid quoted expression: ~ts", ['Elixir.Kernel':inspect(Expr, [])]); +format_error({invalid_local_invocation, Context, {Name, _, Args} = Call}) -> + io_lib:format("cannot invoke local ~ts/~B inside ~ts, called as: ~ts", + [Name, length(Args), Context, 'Elixir.Macro':to_string(Call)]); +format_error({invalid_remote_invocation, Context, Receiver, Right, Arity}) -> + io_lib:format("cannot invoke remote function ~ts.~ts/~B inside ~ts", + ['Elixir.Macro':to_string(Receiver), Right, Arity, Context]); +format_error({invalid_pid_or_ref_in_function, PidOrRef, {Name, Arity}}) -> + io_lib:format("cannot compile PID/Reference ~ts inside quoted expression for function ~ts/~B", + ['Elixir.Kernel':inspect(PidOrRef, []), Name, Arity]); +format_error({unsupported_option, Kind, Key}) -> + io_lib:format("unsupported option ~ts given to ~s", + ['Elixir.Macro':to_string(Key), Kind]); +format_error({options_are_not_keyword, Kind, Opts}) -> + io_lib:format("invalid options for ~s, expected a keyword list, got: ~ts", + [Kind, 'Elixir.Macro':to_string(Opts)]); +format_error({undefined_function, Name, Args}) -> + io_lib:format("undefined function ~ts/~B", [Name, length(Args)]). diff --git a/lib/elixir/src/elixir_fn.erl b/lib/elixir/src/elixir_fn.erl index d51d5702818..b404cb22f0e 100644 --- a/lib/elixir/src/elixir_fn.erl +++ b/lib/elixir/src/elixir_fn.erl @@ -1,47 +1,37 @@ -module(elixir_fn). --export([translate/3, capture/3, expand/3]). --import(elixir_errors, [compile_error/3, compile_error/4]). +-export([capture/3, expand/3, format_error/1]). +-import(elixir_errors, [form_error/4]). -include("elixir.hrl"). -translate(Meta, Clauses, S) -> - Transformer = fun({'->', CMeta, [ArgsWithGuards, Expr]}, Acc) -> - {Args, Guards} = elixir_clauses:extract_splat_guards(ArgsWithGuards), - {TClause, TS } = elixir_clauses:clause(?line(CMeta), fun translate_fn_match/2, - Args, Expr, Guards, true, Acc), - {TClause, elixir_scope:mergef(S, TS)} +%% Anonymous functions + +expand(Meta, Clauses, E) when is_list(Clauses) -> + Transformer = fun(Clause) -> + {EClause, _} = elixir_clauses:clause(Meta, fn, fun elixir_clauses:head/2, Clause, E), + EClause end, - {TClauses, NS} = lists:mapfoldl(Transformer, S, Clauses), - Arities = [length(Args) || {clause, _Line, Args, _Guards, _Exprs} <- TClauses], + EClauses = lists:map(Transformer, Clauses), + EArities = [fn_arity(Args) || {'->', _, [Args, _]} <- EClauses], - case lists:usort(Arities) of + case lists:usort(EArities) of [_] -> - {{'fun', ?line(Meta), {clauses, TClauses}}, NS}; + {{fn, Meta, EClauses}, E}; _ -> - compile_error(Meta, S#elixir_scope.file, - "cannot mix clauses with different arities in function definition") + form_error(Meta, ?key(E, file), ?MODULE, clauses_with_different_arities) end. -translate_fn_match(Arg, S) -> - {TArg, TS} = elixir_translator:translate_args(Arg, S#elixir_scope{backup_vars=orddict:new()}), - {TArg, TS#elixir_scope{backup_vars=S#elixir_scope.backup_vars}}. - -%% Expansion - -expand(Meta, Clauses, E) when is_list(Clauses) -> - Transformer = fun(Clause) -> - {EClause, _} = elixir_exp_clauses:clause(Meta, fn, fun elixir_exp_clauses:head/2, Clause, E), - EClause - end, - {{fn, Meta, lists:map(Transformer, Clauses)}, E}. +fn_arity([{'when', _, Args}]) -> length(Args) - 1; +fn_arity(Args) -> length(Args). %% Capture -capture(Meta, {'/', _, [{{'.', _, [_, F]} = Dot, RequireMeta , []}, A]}, E) when is_atom(F), is_integer(A) -> - Args = [{'&', [], [X]} || X <- lists:seq(1, A)], +capture(Meta, {'/', _, [{{'.', _, [_, F]} = Dot, RequireMeta, []}, A]}, E) when is_atom(F), is_integer(A) -> + Args = args_from_arity(Meta, A, E), capture_require(Meta, {Dot, RequireMeta, Args}, E, true); capture(Meta, {'/', _, [{F, _, C}, A]}, E) when is_atom(F), is_integer(A), is_atom(C) -> + Args = args_from_arity(Meta, A, E), ImportMeta = case lists:keyfind(import_fa, 1, Meta) of {import_fa, {Receiver, Context}} -> @@ -51,21 +41,19 @@ capture(Meta, {'/', _, [{F, _, C}, A]}, E) when is_atom(F), is_integer(A), is_at ); false -> Meta end, - Args = [{'&', [], [X]} || X <- lists:seq(1, A)], capture_import(Meta, {F, ImportMeta, Args}, E, true); capture(Meta, {{'.', _, [_, Fun]}, _, Args} = Expr, E) when is_atom(Fun), is_list(Args) -> capture_require(Meta, Expr, E, is_sequential_and_not_empty(Args)); capture(Meta, {{'.', _, [_]}, _, Args} = Expr, E) when is_list(Args) -> - do_capture(Meta, Expr, E, false); + capture_expr(Meta, Expr, E, false); capture(Meta, {'__block__', _, [Expr]}, E) -> capture(Meta, Expr, E); capture(Meta, {'__block__', _, _} = Expr, E) -> - Message = "invalid args for &, block expressions are not allowed, got: ~ts", - compile_error(Meta, ?m(E, file), Message, ['Elixir.Macro':to_string(Expr)]); + form_error(Meta, ?key(E, file), ?MODULE, {block_expr_in_capture, Expr}); capture(Meta, {Atom, _, Args} = Expr, E) when is_atom(Atom), is_list(Args) -> capture_import(Meta, Expr, E, is_sequential_and_not_empty(Args)); @@ -74,7 +62,10 @@ capture(Meta, {Left, Right}, E) -> capture(Meta, {'{}', Meta, [Left, Right]}, E); capture(Meta, List, E) when is_list(List) -> - do_capture(Meta, List, E, is_sequential_and_not_empty(List)); + capture_expr(Meta, List, E, is_sequential_and_not_empty(List)); + +capture(Meta, Integer, E) when is_integer(Integer) -> + form_error(Meta, ?key(E, file), ?MODULE, {capture_arg_outside_of_capture, Integer}); capture(Meta, Arg, E) -> invalid_capture(Meta, Arg, E). @@ -84,81 +75,101 @@ capture_import(Meta, {Atom, ImportMeta, Args} = Expr, E, Sequential) -> elixir_dispatch:import_function(ImportMeta, Atom, length(Args), E), handle_capture(Res, Meta, Expr, E, Sequential). -capture_require(Meta, {{'.', _, [Left, Right]}, RequireMeta, Args} = Expr, E, Sequential) -> - {Mod, EE} = elixir_exp:expand(Left, E), - Res = Sequential andalso case Mod of - {Name, _, Context} when is_atom(Name), is_atom(Context) -> - {remote, Mod, Right, length(Args)}; - _ when is_atom(Mod) -> - elixir_dispatch:require_function(RequireMeta, Mod, Right, length(Args), EE); - _ -> - false - end, - handle_capture(Res, Meta, Expr, EE, Sequential). +capture_require(Meta, {{'.', DotMeta, [Left, Right]}, RequireMeta, Args}, E, Sequential) -> + case escape(Left, E, []) of + {EscLeft, []} -> + {ELeft, EE} = elixir_expand:expand(EscLeft, E), + Res = Sequential andalso case ELeft of + {Name, _, Context} when is_atom(Name), is_atom(Context) -> + {remote, ELeft, Right, length(Args)}; + _ when is_atom(ELeft) -> + elixir_dispatch:require_function(RequireMeta, ELeft, Right, length(Args), EE); + _ -> + false + end, + handle_capture(Res, Meta, {{'.', DotMeta, [ELeft, Right]}, RequireMeta, Args}, + EE, Sequential); + {EscLeft, Escaped} -> + capture_expr(Meta, {{'.', DotMeta, [EscLeft, Right]}, RequireMeta, Args}, + E, Escaped, Sequential) + end. -handle_capture({local, Fun, Arity}, _Meta, _Expr, _E, _Sequential) -> - {local, Fun, Arity}; -handle_capture({remote, Receiver, Fun, Arity}, Meta, _Expr, E, _Sequential) -> - Tree = {{'.', [], [erlang, make_fun]}, Meta, [Receiver, Fun, Arity]}, - {expanded, Tree, E}; handle_capture(false, Meta, Expr, E, Sequential) -> - do_capture(Meta, Expr, E, Sequential). - -do_capture(Meta, Expr, E, Sequential) -> - case do_escape(Expr, elixir_counter:next(), E, []) of + capture_expr(Meta, Expr, E, Sequential); +handle_capture(LocalOrRemote, _Meta, _Expr, _E, _Sequential) -> + LocalOrRemote. + +capture_expr(Meta, Expr, E, Sequential) -> + capture_expr(Meta, Expr, E, [], Sequential). +capture_expr(Meta, Expr, E, Escaped, Sequential) -> + case escape(Expr, E, Escaped) of {_, []} when not Sequential -> invalid_capture(Meta, Expr, E); {EExpr, EDict} -> EVars = validate(Meta, EDict, 1, E), Fn = {fn, Meta, [{'->', Meta, [EVars, EExpr]}]}, - {expanded, Fn, E} + {expand, Fn, E} end. invalid_capture(Meta, Arg, E) -> - Message = "invalid args for &, expected an expression in the format of &Mod.fun/arity, " - "&local/arity or a capture containing at least one argument as &1, got: ~ts", - compile_error(Meta, ?m(E, file), Message, ['Elixir.Macro':to_string(Arg)]). - -validate(Meta, [{Pos, Var}|T], Pos, E) -> - [Var|validate(Meta, T, Pos + 1, E)]; - -validate(Meta, [{Pos, _}|_], Expected, E) -> - compile_error(Meta, ?m(E, file), "capture &~B cannot be defined without &~B", [Pos, Expected]); + form_error(Meta, ?key(E, file), ?MODULE, {invalid_args_for_capture, Arg}). +validate(Meta, [{Pos, Var} | T], Pos, E) -> + [Var | validate(Meta, T, Pos + 1, E)]; +validate(Meta, [{Pos, _} | _], Expected, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {capture_arg_without_predecessor, Pos, Expected}); validate(_Meta, [], _Pos, _E) -> []. -do_escape({'&', _, [Pos]}, Counter, _E, Dict) when is_integer(Pos), Pos > 0 -> - Var = {list_to_atom([$x, $@+Pos]), [{counter, Counter}], elixir_fn}, +escape({'&', _, [Pos]}, _E, Dict) when is_integer(Pos), Pos > 0 -> + Var = {list_to_atom([$x | integer_to_list(Pos)]), [], ?var_context}, {Var, orddict:store(Pos, Var, Dict)}; - -do_escape({'&', Meta, [Pos]}, _Counter, E, _Dict) when is_integer(Pos) -> - compile_error(Meta, ?m(E, file), "capture &~B is not allowed", [Pos]); - -do_escape({'&', Meta, _} = Arg, _Counter, E, _Dict) -> - Message = "nested captures via & are not allowed: ~ts", - compile_error(Meta, ?m(E, file), Message, ['Elixir.Macro':to_string(Arg)]); - -do_escape({Left, Meta, Right}, Counter, E, Dict0) -> - {TLeft, Dict1} = do_escape(Left, Counter, E, Dict0), - {TRight, Dict2} = do_escape(Right, Counter, E, Dict1), +escape({'&', Meta, [Pos]}, E, _Dict) when is_integer(Pos) -> + form_error(Meta, ?key(E, file), ?MODULE, {unallowed_capture_arg, Pos}); +escape({'&', Meta, _} = Arg, E, _Dict) -> + form_error(Meta, ?key(E, file), ?MODULE, {nested_capture, Arg}); +escape({Left, Meta, Right}, E, Dict0) -> + {TLeft, Dict1} = escape(Left, E, Dict0), + {TRight, Dict2} = escape(Right, E, Dict1), {{TLeft, Meta, TRight}, Dict2}; - -do_escape({Left, Right}, Counter, E, Dict0) -> - {TLeft, Dict1} = do_escape(Left, Counter, E, Dict0), - {TRight, Dict2} = do_escape(Right, Counter, E, Dict1), +escape({Left, Right}, E, Dict0) -> + {TLeft, Dict1} = escape(Left, E, Dict0), + {TRight, Dict2} = escape(Right, E, Dict1), {{TLeft, TRight}, Dict2}; - -do_escape(List, Counter, E, Dict) when is_list(List) -> - lists:mapfoldl(fun(X, Acc) -> do_escape(X, Counter, E, Acc) end, Dict, List); - -do_escape(Other, _Counter, _E, Dict) -> +escape(List, E, Dict) when is_list(List) -> + lists:mapfoldl(fun(X, Acc) -> escape(X, E, Acc) end, Dict, List); +escape(Other, _E, Dict) -> {Other, Dict}. +args_from_arity(_Meta, A, _E) when is_integer(A), A >= 0, A =< 255 -> + [{'&', [], [X]} || X <- lists:seq(1, A)]; +args_from_arity(Meta, A, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_arity_for_capture, A}). + is_sequential_and_not_empty([]) -> false; is_sequential_and_not_empty(List) -> is_sequential(List, 1). -is_sequential([{'&', _, [Int]}|T], Int) -> - is_sequential(T, Int + 1); +is_sequential([{'&', _, [Int]} | T], Int) -> is_sequential(T, Int + 1); is_sequential([], _Int) -> true; is_sequential(_, _Int) -> false. + +format_error(clauses_with_different_arities) -> + "cannot mix clauses with different arities in anonymous functions"; +format_error({block_expr_in_capture, Expr}) -> + io_lib:format("invalid args for &, block expressions are not allowed, got: ~ts", + ['Elixir.Macro':to_string(Expr)]); +format_error({nested_capture, Arg}) -> + io_lib:format("nested captures via & are not allowed: ~ts", ['Elixir.Macro':to_string(Arg)]); +format_error({invalid_arity_for_capture, Arity}) -> + io_lib:format("invalid arity for &, expected a number between 0 and 255, got: ~b", [Arity]); +format_error({capture_arg_outside_of_capture, Integer}) -> + io_lib:format("unhandled &~B outside of a capture", [Integer]); +format_error({capture_arg_without_predecessor, Pos, Expected}) -> + io_lib:format("capture &~B cannot be defined without &~B", [Pos, Expected]); +format_error({unallowed_capture_arg, Integer}) -> + io_lib:format("capture &~B is not allowed", [Integer]); +format_error({invalid_args_for_capture, Arg}) -> + Message = + "invalid args for &, expected an expression in the format of &Mod.fun/arity, " + "&local/arity or a capture containing at least one argument as &1, got: ~ts", + io_lib:format(Message, ['Elixir.Macro':to_string(Arg)]). diff --git a/lib/elixir/src/elixir_for.erl b/lib/elixir/src/elixir_for.erl deleted file mode 100644 index d017f6db4d0..00000000000 --- a/lib/elixir/src/elixir_for.erl +++ /dev/null @@ -1,338 +0,0 @@ --module(elixir_for). --export([expand/3, translate/3]). --include("elixir.hrl"). - -%% Expansion - -expand(Meta, Args, E) -> - {Cases, Block} = - case elixir_utils:split_last(Args) of - {OuterCases, OuterOpts} when is_list(OuterOpts) -> - case elixir_utils:split_last(OuterCases) of - {InnerCases, InnerOpts} when is_list(InnerOpts) -> - {InnerCases, InnerOpts ++ OuterOpts}; - _ -> - {OuterCases, OuterOpts} - end; - _ -> - {Args, []} - end, - - {Expr, Opts} = - case lists:keyfind(do, 1, Block) of - {do, Do} -> {Do, lists:keydelete(do, 1, Block)}; - _ -> elixir_errors:compile_error(Meta, ?m(E, file), - "missing do keyword in for comprehension") - end, - - {EOpts, EO} = elixir_exp:expand(Opts, E), - {ECases, EC} = lists:mapfoldl(fun expand/2, EO, Cases), - {EExpr, _} = elixir_exp:expand(Expr, EC), - {{for, Meta, ECases ++ [[{do,EExpr}|EOpts]]}, E}. - -expand({'<-', Meta, [Left, Right]}, E) -> - {ERight, ER} = elixir_exp:expand(Right, E), - {ELeft, EL} = elixir_exp_clauses:match(fun elixir_exp:expand/2, Left, E), - {{'<-', Meta, [ELeft, ERight]}, elixir_env:mergev(EL, ER)}; -expand({'<<>>', Meta, Args} = X, E) when is_list(Args) -> - case elixir_utils:split_last(Args) of - {LeftStart, {'<-', OpMeta, [LeftEnd, Right]}} -> - {ERight, ER} = elixir_exp:expand(Right, E), - Left = {'<<>>', Meta, LeftStart ++ [LeftEnd]}, - {ELeft, EL} = elixir_exp_clauses:match(fun elixir_exp:expand/2, Left, E), - {{'<<>>', [], [ {'<-', OpMeta, [ELeft, ERight]}]}, elixir_env:mergev(EL, ER)}; - _ -> - elixir_exp:expand(X, E) - end; -expand(X, E) -> - elixir_exp:expand(X, E). - -%% Translation - -translate(Meta, Args, #elixir_scope{return=Return} = RS) -> - S = RS#elixir_scope{return=true}, - {AccName, _, SA} = elixir_scope:build_var('_', S), - {VarName, _, SV} = elixir_scope:build_var('_', SA), - - Line = ?line(Meta), - Acc = {var, Line, AccName}, - Var = {var, Line, VarName}, - - {Cases, [{do,Expr}|Opts]} = elixir_utils:split_last(Args), - - {TInto, SI} = - case lists:keyfind(into, 1, Opts) of - {into, Into} -> elixir_translator:translate(Into, SV); - false when Return -> {{nil, Line}, SV}; - false -> {false, SV} - end, - - {TCases, SC} = translate_gen(Meta, Cases, [], SI), - {TExpr, SE} = elixir_translator:translate_block(Expr, Return, SC), - SF = elixir_scope:mergec(SI, SE), - - case comprehension_expr(TInto, TExpr) of - {inline, TIntoExpr} -> - {build_inline(Line, TCases, TIntoExpr, TInto, Var, Acc, SE), SF}; - {into, TIntoExpr} -> - build_into(Line, TCases, TIntoExpr, TInto, Var, Acc, SF) - end. - -translate_gen(ForMeta, [{'<-', Meta, [Left, Right]}|T], Acc, S) -> - {TLeft, TRight, TFilters, TT, TS} = translate_gen(Meta, Left, Right, T, S), - TAcc = [{enum, Meta, TLeft, TRight, TFilters}|Acc], - translate_gen(ForMeta, TT, TAcc, TS); -translate_gen(ForMeta, [{'<<>>', _, [ {'<-', Meta, [Left, Right]} ]}|T], Acc, S) -> - {TLeft, TRight, TFilters, TT, TS} = translate_gen(Meta, Left, Right, T, S), - TAcc = [{bin, Meta, TLeft, TRight, TFilters}|Acc], - case elixir_bitstring:has_size(TLeft) of - true -> translate_gen(ForMeta, TT, TAcc, TS); - false -> - elixir_errors:compile_error(Meta, S#elixir_scope.file, - "bitstring fields without size are not allowed in bitstring generators") - end; -translate_gen(_ForMeta, [], Acc, S) -> - {lists:reverse(Acc), S}; -translate_gen(ForMeta, _, _, S) -> - elixir_errors:compile_error(ForMeta, S#elixir_scope.file, - "for comprehensions must start with a generator"). - -translate_gen(_Meta, Left, Right, T, S) -> - {TRight, SR} = elixir_translator:translate(Right, S), - {TLeft, SL} = elixir_clauses:match(fun elixir_translator:translate/2, Left, SR), - {TT, {TFilters, TS}} = translate_filters(T, SL), - {TLeft, TRight, TFilters, TT, TS}. - -translate_filters(T, S) -> - {Filters, Rest} = collect_filters(T, []), - {Rest, lists:mapfoldr(fun translate_filter/2, S, Filters)}. - -translate_filter(Filter, S) -> - {TFilter, TS} = elixir_translator:translate(Filter, S), - case elixir_utils:returns_boolean(Filter) of - true -> - {{nil, TFilter}, TS}; - false -> - {Name, _, VS} = elixir_scope:build_var('_', TS), - {{{var, 0, Name}, TFilter}, VS} - end. - -collect_filters([{'<-', _, [_, _]}|_] = T, Acc) -> - {Acc, T}; -collect_filters([{'<<>>', _, [{'<-', _, [_, _]}]}|_] = T, Acc) -> - {Acc, T}; -collect_filters([H|T], Acc) -> - collect_filters(T, [H|Acc]); -collect_filters([], Acc) -> - {Acc, []}. - -%% If all we have is one enum generator, we check if it is a list -%% for optimization otherwise fallback to the reduce generator. -build_inline(Line, [{enum, Meta, Left, Right, Filters}] = Orig, Expr, Into, Var, Acc, S) -> - case Right of - {cons, _, _, _} -> - build_comprehension(Line, Orig, Expr, Into); - {Other, _, _} when Other == tuple; Other == map -> - build_reduce(Orig, Expr, Into, Acc, S); - _ -> - Clauses = [{enum, Meta, Left, Var, Filters}], - - {'case', -1, Right, [ - {clause, -1, - [Var], - [[elixir_utils:erl_call(Line, erlang, is_list, [Var])]], - [build_comprehension(Line, Clauses, Expr, Into)]}, - {clause, -1, - [Var], - [], - [build_reduce(Clauses, Expr, Into, Acc, S)]} - ]} - end; - -build_inline(Line, Clauses, Expr, Into, _Var, Acc, S) -> - case lists:all(fun(Clause) -> element(1, Clause) == bin end, Clauses) of - true -> build_comprehension(Line, Clauses, Expr, Into); - false -> build_reduce(Clauses, Expr, Into, Acc, S) - end. - -build_into(Line, Clauses, Expr, Into, Fun, Acc, S) -> - {Kind, SK} = build_var(Line, S), - {Reason, SR} = build_var(Line, SK), - {Stack, ST} = build_var(Line, SR), - {Done, SD} = build_var(Line, ST), - - IntoExpr = {call, Line, Fun, [Acc, pair(Line, cont, Expr)]}, - MatchExpr = {match, Line, - {tuple, Line, [Acc, Fun]}, - elixir_utils:erl_call(Line, 'Elixir.Collectable', into, [Into]) - }, - - TryExpr = - {'try', Line, - [build_reduce_clause(Clauses, IntoExpr, Acc, Acc, SD)], - [{clause, Line, - [Done], - [], - [{call, Line, Fun, [Done, {atom, Line, done}]}]}], - [{clause, Line, - [{tuple, Line, [Kind, Reason, {var, Line, '_'}]}], - [], - [{match, Line, Stack, elixir_utils:erl_call(Line, erlang, get_stacktrace, [])}, - {call, Line, Fun, [Acc, {atom, Line, halt}]}, - elixir_utils:erl_call(Line, erlang, raise, [Kind, Reason, Stack])]}], - []}, - - {{block, Line, [MatchExpr, TryExpr]}, SD}. - -%% Helpers - -build_reduce(Clauses, Expr, false, Acc, S) -> - build_reduce_clause(Clauses, Expr, {nil, 0}, Acc, S); -build_reduce(Clauses, Expr, {nil, Line} = Into, Acc, S) -> - ListExpr = {cons, Line, Expr, Acc}, - elixir_utils:erl_call(Line, lists, reverse, - [build_reduce_clause(Clauses, ListExpr, Into, Acc, S)]); -build_reduce(Clauses, Expr, {bin, _, _} = Into, Acc, S) -> - {bin, Line, Elements} = Expr, - BinExpr = {bin, Line, [{bin_element, Line, Acc, default, [bitstring]}|Elements]}, - build_reduce_clause(Clauses, BinExpr, Into, Acc, S). - -build_reduce_clause([{enum, Meta, Left, Right, Filters}|T], Expr, Arg, Acc, S) -> - Line = ?line(Meta), - Inner = build_reduce_clause(T, Expr, Acc, Acc, S), - - True = pair(Line, cont, Inner), - False = pair(Line, cont, Acc), - - Clauses0 = - case is_var(Left) of - true -> []; - false -> - [{clause, -1, - [{var, Line, '_'}, Acc], [], - [False]}] - end, - - Clauses1 = - [{clause, Line, - [Left, Acc], [], - [join_filters(Line, Filters, True, False)]}|Clauses0], - - Args = [Right, pair(Line, cont, Arg), {'fun', Line, {clauses, Clauses1}}], - Tuple = elixir_utils:erl_call(Line, 'Elixir.Enumerable', reduce, Args), - - %% Use -1 because in case of no returns we don't care about the result - elixir_utils:erl_call(-1, erlang, element, [{integer, Line, 2}, Tuple]); - -build_reduce_clause([{bin, Meta, Left, Right, Filters}|T], Expr, Arg, Acc, S) -> - Line = ?line(Meta), - {Tail, ST} = build_var(Line, S), - {Fun, SF} = build_var(Line, ST), - - True = build_reduce_clause(T, Expr, Acc, Acc, SF), - False = Acc, - - {bin, _, Elements} = Left, - - BinMatch = - {bin, Line, Elements ++ [{bin_element, Line, Tail, default, [bitstring]}]}, - NoVarMatch = - {bin, Line, no_var(Elements) ++ [{bin_element, Line, Tail, default, [bitstring]}]}, - - Clauses = - [{clause, Line, - [BinMatch, Acc], [], - [{call, Line, Fun, [Tail, join_filters(Line, Filters, True, False)]}]}, - {clause, -1, - [NoVarMatch, Acc], [], - [{call, Line, Fun, [Tail, False]}]}, - {clause, -1, - [{bin, Line, []}, Acc], [], - [Acc]}, - {clause, -1, - [Tail, {var, Line, '_'}], [], - [elixir_utils:erl_call(Line, erlang, error, [pair(Line, badarg, Tail)])]}], - - {call, Line, - {named_fun, Line, element(3, Fun), Clauses}, - [Right, Arg]}; - -build_reduce_clause([], Expr, _Arg, _Acc, _S) -> - Expr. - -is_var({var, _, _}) -> true; -is_var(_) -> false. - -pair(Line, Atom, Arg) -> - {tuple, Line, [{atom, Line, Atom}, Arg]}. - -build_var(Line, S) -> - {Name, _, ST} = elixir_scope:build_var('_', S), - {{var, Line, Name}, ST}. - -no_var(Elements) -> - [{bin_element, Line, no_var_expr(Expr), Size, Types} || - {bin_element, Line, Expr, Size, Types} <- Elements]. -no_var_expr({var, Line, _}) -> - {var, Line, '_'}. - -build_comprehension(Line, Clauses, Expr, false) -> - {block, Line, [ - build_comprehension(Line, Clauses, Expr, {nil, Line}), - {nil, Line} - ]}; -build_comprehension(Line, Clauses, Expr, Into) -> - {comprehension_kind(Into), Line, Expr, comprehension_clause(Clauses)}. - -comprehension_clause([{Kind, Meta, Left, Right, Filters}|T]) -> - Line = ?line(Meta), - [{comprehension_generator(Kind), Line, Left, Right}] ++ - comprehension_filter(Line, Filters) ++ - comprehension_clause(T); -comprehension_clause([]) -> - []. - -comprehension_kind({nil, _}) -> lc; -comprehension_kind({bin, _, []}) -> bc. - -comprehension_generator(enum) -> generate; -comprehension_generator(bin) -> b_generate. - -comprehension_expr({bin, _, []}, {bin, _, _} = Expr) -> - {inline, Expr}; -comprehension_expr({bin, Line, []}, Expr) -> - BinExpr = {bin, Line, [{bin_element, Line, Expr, default, [bitstring]}]}, - {inline, BinExpr}; -comprehension_expr({nil, _}, Expr) -> - {inline, Expr}; -comprehension_expr(false, Expr) -> - {inline, Expr}; -comprehension_expr(_, Expr) -> - {into, Expr}. - -comprehension_filter(Line, Filters) -> - [join_filter(Line, Filter, {atom, Line, true}, {atom, Line, false}) || - Filter <- lists:reverse(Filters)]. - -join_filters(_Line, [], True, _False) -> - True; -join_filters(Line, [H|T], True, False) -> - lists:foldl(fun(Filter, Acc) -> - join_filter(Line, Filter, Acc, False) - end, join_filter(Line, H, True, False), T). - -join_filter(Line, {nil, Filter}, True, False) -> - {'case', Line, Filter, [ - {clause, Line, [{atom, Line, true}], [], [True]}, - {clause, Line, [{atom, Line, false}], [], [False]} - ]}; -join_filter(Line, {Var, Filter}, True, False) -> - Guard = - {op, Line, 'orelse', - {op, Line, '==', Var, {atom, Line, false}}, - {op, Line, '==', Var, {atom, Line, nil}}}, - - {'case', Line, Filter, [ - {clause, Line, [Var], [[Guard]], [False]}, - {clause, Line, [{var, Line, '_'}], [], [True]} - ]}. diff --git a/lib/elixir/src/elixir_import.erl b/lib/elixir/src/elixir_import.erl index ece17e12828..39081ba1715 100644 --- a/lib/elixir/src/elixir_import.erl +++ b/lib/elixir/src/elixir_import.erl @@ -1,72 +1,96 @@ %% Module responsible for handling imports and conflicts -%% in between local functions and imports. +%% between local functions and imports. %% For imports dispatch, please check elixir_dispatch. -module(elixir_import). -export([import/4, special_form/2, format_error/1]). -include("elixir.hrl"). -%% IMPORT - import(Meta, Ref, Opts, E) -> - Res = + {Functions, Macros, Added} = case keyfind(only, Opts) of {only, functions} -> - {import_functions(Meta, Ref, Opts, E), - ?m(E, macros)}; + {Added1, Funs} = import_functions(Meta, Ref, Opts, E), + {Funs, keydelete(Ref, ?key(E, macros)), Added1}; {only, macros} -> - {?m(E, functions), - import_macros(true, Meta, Ref, Opts, E)}; + {Added2, Macs} = import_macros(true, Meta, Ref, Opts, E), + {keydelete(Ref, ?key(E, functions)), Macs, Added2}; {only, List} when is_list(List) -> - {import_functions(Meta, Ref, Opts, E), - import_macros(false, Meta, Ref, Opts, E)}; + {Added1, Funs} = import_functions(Meta, Ref, Opts, E), + {Added2, Macs} = import_macros(false, Meta, Ref, Opts, E), + {Funs, Macs, Added1 or Added2}; false -> - {import_functions(Meta, Ref, Opts, E), - import_macros(false, Meta, Ref, Opts, E)} + {Added1, Funs} = import_functions(Meta, Ref, Opts, E), + {Added2, Macs} = import_macros(false, Meta, Ref, Opts, E), + {Funs, Macs, Added1 or Added2} end, - record_warn(Meta, Ref, Opts, E), - Res. + record_warn(Meta, Ref, Opts, Added, E), + {Functions, Macros}. import_functions(Meta, Ref, Opts, E) -> - calculate(Meta, Ref, Opts, ?m(E, functions), E, fun() -> get_functions(Ref) end). + calculate(Meta, Ref, Opts, ?key(E, functions), ?key(E, file), fun() -> + get_functions(Ref) + end). import_macros(Force, Meta, Ref, Opts, E) -> - calculate(Meta, Ref, Opts, ?m(E, macros), E, fun() -> - case Force of - true -> get_macros(Meta, Ref, E); - false -> get_optional_macros(Ref) + calculate(Meta, Ref, Opts, ?key(E, macros), ?key(E, file), fun() -> + case fetch_macros(Ref) of + {ok, Macros} -> + Macros; + error when Force -> + elixir_errors:form_error(Meta, ?key(E, file), ?MODULE, {no_macros, Ref}); + error -> + [] end end). -record_warn(Meta, Ref, Opts, E) -> +record_warn(Meta, Ref, Opts, Added, E) -> Warn = case keyfind(warn, Opts) of {warn, false} -> false; {warn, true} -> true; false -> not lists:keymember(context, 1, Meta) end, - elixir_lexical:record_import(Ref, ?line(Meta), Warn, ?m(E, lexical_tracker)). + + Only = + case keyfind(only, Opts) of + {only, List} when is_list(List) -> List; + _ -> [] + end, + + elixir_lexical:record_import(Ref, Only, ?line(Meta), Added and Warn, ?key(E, lexical_tracker)). %% Calculates the imports based on only and except -calculate(Meta, Key, Opts, Old, E, Existing) -> +calculate(Meta, Key, Opts, Old, File, Existing) -> New = case keyfind(only, Opts) of {only, Only} when is_list(Only) -> + ok = ensure_keyword_list(Meta, File, Only, only), + case keyfind(except, Opts) of + false -> + ok; + _ -> + elixir_errors:form_error(Meta, File, ?MODULE, only_and_except_given) + end, case Only -- get_exports(Key) of - [{Name,Arity}|_] -> - Tuple = {invalid_import, {Key, Name, Arity}}, - elixir_errors:form_error(Meta, ?m(E, file), ?MODULE, Tuple); + [{Name, Arity} | _] -> + elixir_errors:form_error(Meta, File, ?MODULE, {invalid_import, {Key, Name, Arity}}); _ -> intersection(Only, Existing()) end; _ -> case keyfind(except, Opts) of - false -> remove_underscored(Existing()); - {except, []} -> remove_underscored(Existing()); + false -> + remove_underscored(Existing()); {except, Except} when is_list(Except) -> + ok = ensure_keyword_list(Meta, File, Except, except), + %% We are not checking existence of exports listed in :except option + %% on purpose: to support backwards compatible code. + %% For example, "import String, except: [trim: 1]" + %% should work across all Elixir versions. case keyfind(Key, Old) of false -> remove_underscored(Existing()) -- Except; - {Key,OldImports} -> OldImports -- Except + {Key, OldImports} -> OldImports -- Except end end end, @@ -76,20 +100,17 @@ calculate(Meta, Key, Opts, Old, E, Existing) -> Final = remove_internals(Set), case Final of - [] -> keydelete(Key, Old); + [] -> + {false, keydelete(Key, Old)}; _ -> - ensure_no_special_form_conflict(Meta, ?m(E, file), Key, Final), - [{Key, Final}|keydelete(Key, Old)] + ensure_no_special_form_conflict(Meta, File, Key, Final), + {true, [{Key, Final} | keydelete(Key, Old)]} end. %% Retrieve functions and macros from modules get_exports(Module) -> - try - Module:'__info__'(functions) ++ Module:'__info__'(macros) - catch - error:undef -> Module:module_info(exports) - end. + get_functions(Module) ++ get_macros(Module). get_functions(Module) -> try @@ -98,46 +119,56 @@ get_functions(Module) -> error:undef -> Module:module_info(exports) end. -get_macros(Meta, Module, E) -> - try - Module:'__info__'(macros) - catch - error:undef -> - Tuple = {no_macros, Module}, - elixir_errors:form_error(Meta, ?m(E, file), ?MODULE, Tuple) +get_macros(Module) -> + case fetch_macros(Module) of + {ok, Macros} -> + Macros; + error -> + [] end. -get_optional_macros(Module) -> - case code:ensure_loaded(Module) of - {module, Module} -> - try - Module:'__info__'(macros) - catch - error:undef -> [] - end; - {error, _} -> [] +fetch_macros(Module) -> + try + {ok, Module:'__info__'(macros)} + catch + error:undef -> error end. %% VALIDATION HELPERS -ensure_no_special_form_conflict(Meta, File, Key, [{Name,Arity}|T]) -> +ensure_no_special_form_conflict(Meta, File, Key, [{Name, Arity} | T]) -> case special_form(Name, Arity) of true -> - Tuple = {special_form_conflict, {Key, Name, Arity}}, - elixir_errors:form_error(Meta, File, ?MODULE, Tuple); + elixir_errors:form_error(Meta, File, ?MODULE, {special_form_conflict, {Key, Name, Arity}}); false -> ensure_no_special_form_conflict(Meta, File, Key, T) end; ensure_no_special_form_conflict(_Meta, _File, _Key, []) -> ok. +ensure_keyword_list(_Meta, _File, [], _Kind) -> ok; + +ensure_keyword_list(Meta, File, [{Key, Value} | Rest], Kind) when is_atom(Key), is_integer(Value) -> + ensure_keyword_list(Meta, File, Rest, Kind); + +ensure_keyword_list(Meta, File, _Other, Kind) -> + elixir_errors:form_error(Meta, File, ?MODULE, {invalid_option, Kind}). + %% ERROR HANDLING -format_error({invalid_import,{Receiver, Name, Arity}}) -> - io_lib:format("cannot import ~ts.~ts/~B because it doesn't exist", +format_error(only_and_except_given) -> + ":only and :except can only be given together to import " + "when :only is either :functions or :macros"; + +format_error({invalid_import, {Receiver, Name, Arity}}) -> + io_lib:format("cannot import ~ts.~ts/~B because it is undefined or private", [elixir_aliases:inspect(Receiver), Name, Arity]); -format_error({special_form_conflict,{Receiver, Name, Arity}}) -> +format_error({invalid_option, Option}) -> + Message = "invalid :~s option for import, expected a keyword list with integer values", + io_lib:format(Message, [Option]); + +format_error({special_form_conflict, {Receiver, Name, Arity}}) -> io_lib:format("cannot import ~ts.~ts/~B because it conflicts with Elixir special forms", [elixir_aliases:inspect(Receiver), Name, Arity]); @@ -152,9 +183,9 @@ keyfind(Key, List) -> keydelete(Key, List) -> lists:keydelete(Key, 1, List). -intersection([H|T], All) -> +intersection([H | T], All) -> case lists:member(H, All) of - true -> [H|intersection(T, All)]; + true -> [H | intersection(T, All)]; false -> intersection(T, All) end; @@ -180,8 +211,7 @@ special_form('&', 1) -> true; special_form('^', 1) -> true; special_form('=', 2) -> true; special_form('%', 2) -> true; -special_form('__op__', 2) -> true; -special_form('__op__', 3) -> true; +special_form('::', 2) -> true; special_form('__block__', _) -> true; special_form('->', _) -> true; special_form('<<>>', _) -> true; @@ -205,6 +235,7 @@ special_form('unquote_splicing', 1) -> true; special_form('fn', _) -> true; special_form('super', _) -> true; special_form('for', _) -> true; +special_form('with', _) -> true; special_form('cond', 1) -> true; special_form('case', 2) -> true; special_form('try', 2) -> true; diff --git a/lib/elixir/src/elixir_interpolation.erl b/lib/elixir/src/elixir_interpolation.erl index 1221811b7a0..a4e7555df15 100644 --- a/lib/elixir/src/elixir_interpolation.erl +++ b/lib/elixir/src/elixir_interpolation.erl @@ -1,61 +1,66 @@ % Handle string and string-like interpolations. -module(elixir_interpolation). --export([extract/5, unescape_chars/1, unescape_chars/2, +-export([extract/6, unescape_chars/1, unescape_chars/2, unescape_tokens/1, unescape_tokens/2, unescape_map/1]). -include("elixir.hrl"). +-define(is_hex(S), ((S >= $0 andalso S =< $9) orelse + (S >= $A andalso S =< $F) orelse + (S >= $a andalso S =< $f))). %% Extract string interpolations -extract(Line, Raw, Interpol, String, Last) -> +extract(Line, Column, Raw, Interpol, String, Last) -> %% Ignore whatever is in the scope and enable terminator checking. Scope = Raw#elixir_tokenizer{terminators=[], check_terminators=true}, - extract(Line, Scope, Interpol, String, [], [], Last). + extract(Line, Column, Scope, Interpol, String, [], [], Last). %% Terminators -extract(Line, _Scope, _Interpol, [], Buffer, Output, []) -> - finish_extraction(Line, Buffer, Output, []); +extract(Line, Column, _Scope, _Interpol, [], Buffer, Output, []) -> + finish_extraction(Line, Column, Buffer, Output, []); -extract(Line, _Scope, _Interpol, [], _Buffer, _Output, Last) -> +extract(Line, _Column, _Scope, _Interpol, [], _Buffer, _Output, Last) -> {error, {string, Line, io_lib:format("missing terminator: ~ts", [[Last]]), []}}; -extract(Line, _Scope, _Interpol, [Last|Remaining], Buffer, Output, Last) -> - finish_extraction(Line, Buffer, Output, Remaining); +extract(Line, Column, _Scope, _Interpol, [Last | Remaining], Buffer, Output, Last) -> + finish_extraction(Line, Column + 1, Buffer, Output, Remaining); %% Going through the string -extract(Line, Scope, Interpol, [$\\, $\n|Rest], Buffer, Output, Last) -> - extract(Line+1, Scope, Interpol, Rest, Buffer, Output, Last); +extract(Line, _Column, Scope, true, [$\\, $\n | Rest], Buffer, Output, Last) -> + extract(Line+1, 1, Scope, true, Rest, Buffer, Output, Last); -extract(Line, Scope, Interpol, [$\\, $\r, $\n|Rest], Buffer, Output, Last) -> - extract(Line+1, Scope, Interpol, Rest, Buffer, Output, Last); +extract(Line, _Column, Scope, true, [$\\, $\r, $\n | Rest], Buffer, Output, Last) -> + extract(Line+1, 1, Scope, true, Rest, Buffer, Output, Last); -extract(Line, Scope, Interpol, [$\n|Rest], Buffer, Output, Last) -> - extract(Line+1, Scope, Interpol, Rest, [$\n|Buffer], Output, Last); +extract(Line, _Column, Scope, Interpol, [$\n | Rest], Buffer, Output, Last) -> + extract(Line+1, 1, Scope, Interpol, Rest, [$\n | Buffer], Output, Last); -extract(Line, Scope, Interpol, [$\\, $#, ${|Rest], Buffer, Output, Last) -> - extract(Line, Scope, Interpol, Rest, [${,$#|Buffer], Output, Last); +extract(Line, Column, Scope, Interpol, [$\\, Last | Rest], Buffer, Output, Last) -> + extract(Line, Column+2, Scope, Interpol, Rest, [Last | Buffer], Output, Last); -extract(Line, Scope, Interpol, [$\\,Char|Rest], Buffer, Output, Last) -> - extract(Line, Scope, Interpol, Rest, [Char,$\\|Buffer], Output, Last); +extract(Line, Column, Scope, true, [$\\, $#, ${ | Rest], Buffer, Output, Last) -> + extract(Line, Column+1, Scope, true, Rest, [${, $# | Buffer], Output, Last); -extract(Line, Scope, true, [$#, ${|Rest], Buffer, Output, Last) -> +extract(Line, Column, Scope, true, [$#, ${ | Rest], Buffer, Output, Last) -> Output1 = build_string(Line, Buffer, Output), - - case elixir_tokenizer:tokenize(Rest, Line, Scope) of - {error, {EndLine, _, "}"}, [$}|NewRest], Tokens} -> - Output2 = build_interpol(Line, Tokens, Output1), - extract(EndLine, Scope, true, NewRest, [], Output2, Last); + case elixir_tokenizer:tokenize(Rest, Line, Column + 2, Scope) of + {error, {{EndLine, _, EndColumn}, _, "}"}, [$} | NewRest], Tokens} -> + Output2 = build_interpol(Line, Column, EndColumn, Tokens, Output1), + extract(EndLine, EndColumn, Scope, true, NewRest, [], Output2, Last); {error, Reason, _, _} -> {error, Reason}; - {ok, _EndLine, _} -> + {ok, _EndLine, _EndColumn, _} -> {error, {string, Line, "missing interpolation terminator:}", []}} end; +extract(Line, Column, Scope, Interpol, [$\\, Char | Rest], Buffer, Output, Last) -> + extract(Line, Column+2, Scope, Interpol, Rest, [Char, $\\ | Buffer], Output, Last); + %% Catch all clause -extract(Line, Scope, Interpol, [Char|Rest], Buffer, Output, Last) -> - extract(Line, Scope, Interpol, Rest, [Char|Buffer], Output, Last). +extract(Line, Column, Scope, Interpol, [Char | Rest], Buffer, Output, Last) -> + extract(Line, Column + 1, Scope, Interpol, Rest, [Char | Buffer], Output, Last). %% Unescape a series of tokens as returned by extract. @@ -74,66 +79,109 @@ unescape_chars(String) -> unescape_chars(String, fun unescape_map/1). unescape_chars(String, Map) -> - Octals = Map($0) /= false, - Hex = Map($x) /= false, - unescape_chars(String, Map, Octals, Hex, <<>>). + unescape_chars(String, Map, <<>>). + +unescape_chars(<<$\\, $x, Rest/binary>>, Map, Acc) -> + case Map($x) of + true -> unescape_hex(Rest, Map, Acc); + false -> unescape_chars(Rest, Map, <>) + end; + +unescape_chars(<<$\\, $u, Rest/binary>>, Map, Acc) -> + case Map($u) of + true -> unescape_unicode(Rest, Map, Acc); + false -> unescape_chars(Rest, Map, <>) + end; -unescape_chars(<<$\\,A,B,C,Rest/binary>>, Map, true, Hex, Acc) when ?is_octal(A), A =< $3, ?is_octal(B), ?is_octal(C) -> - append_escaped(Rest, Map, [A,B,C], true, Hex, Acc, 8); +unescape_chars(<<$\\, Escaped, Rest/binary>>, Map, Acc) -> + case Map(Escaped) of + false -> unescape_chars(Rest, Map, <>); + Other -> unescape_chars(Rest, Map, <>) + end; + +unescape_chars(<>, Map, Acc) -> + unescape_chars(Rest, Map, <>); -unescape_chars(<<$\\,A,B,Rest/binary>>, Map, true, Hex, Acc) when ?is_octal(A), ?is_octal(B) -> - append_escaped(Rest, Map, [A,B], true, Hex, Acc, 8); +unescape_chars(<<>>, _Map, Acc) -> Acc. -unescape_chars(<<$\\,A,Rest/binary>>, Map, true, Hex, Acc) when ?is_octal(A) -> - append_escaped(Rest, Map, [A], true, Hex, Acc, 8); +% Unescape Helpers -unescape_chars(<<$\\,P,A,B,Rest/binary>>, Map, Octal, true, Acc) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) -> - append_escaped(Rest, Map, [A,B], Octal, true, Acc, 16); +unescape_hex(<>, Map, Acc) when ?is_hex(A), ?is_hex(B) -> + Bytes = list_to_integer([A, B], 16), + unescape_chars(Rest, Map, <>); -unescape_chars(<<$\\,P,A,Rest/binary>>, Map, Octal, true, Acc) when (P == $x orelse P == $X), ?is_hex(A) -> - append_escaped(Rest, Map, [A], Octal, true, Acc, 16); +%% TODO: Remove deprecated sequences on v2.0 -unescape_chars(<<$\\,P,${,A,$},Rest/binary>>, Map, Octal, true, Acc) when (P == $x orelse P == $X), ?is_hex(A) -> - append_escaped(Rest, Map, [A], Octal, true, Acc, 16); +unescape_hex(<>, Map, Acc) when ?is_hex(A) -> + io:format(standard_error, "warning: \\xH inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), + append_codepoint(Rest, Map, [A], Acc, 16); -unescape_chars(<<$\\,P,${,A,B,$},Rest/binary>>, Map, Octal, true, Acc) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) -> - append_escaped(Rest, Map, [A,B], Octal, true, Acc, 16); +unescape_hex(<<${, A, $}, Rest/binary>>, Map, Acc) when ?is_hex(A) -> + io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), + append_codepoint(Rest, Map, [A], Acc, 16); -unescape_chars(<<$\\,P,${,A,B,C,$},Rest/binary>>, Map, Octal, true, Acc) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C) -> - append_escaped(Rest, Map, [A,B,C], Octal, true, Acc, 16); +unescape_hex(<<${, A, B, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B) -> + io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), + append_codepoint(Rest, Map, [A, B], Acc, 16); -unescape_chars(<<$\\,P,${,A,B,C,D,$},Rest/binary>>, Map, Octal, true, Acc) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> - append_escaped(Rest, Map, [A,B,C,D], Octal, true, Acc, 16); +unescape_hex(<<${, A, B, C, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C) -> + io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), + append_codepoint(Rest, Map, [A, B, C], Acc, 16); -unescape_chars(<<$\\,P,${,A,B,C,D,E,$},Rest/binary>>, Map, Octal, true, Acc) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) -> - append_escaped(Rest, Map, [A,B,C,D,E], Octal, true, Acc, 16); +unescape_hex(<<${, A, B, C, D, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> + io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), + append_codepoint(Rest, Map, [A, B, C, D], Acc, 16); -unescape_chars(<<$\\,P,${,A,B,C,D,E,F,$},Rest/binary>>, Map, Octal, true, Acc) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) -> - append_escaped(Rest, Map, [A,B,C,D,E,F], Octal, true, Acc, 16); +unescape_hex(<<${, A, B, C, D, E, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) -> + io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), + append_codepoint(Rest, Map, [A, B, C, D, E], Acc, 16); -unescape_chars(<<$\\,Escaped,Rest/binary>>, Map, Octals, Hex, Acc) -> - case Map(Escaped) of - false -> unescape_chars(Rest, Map, Octals, Hex, <>); - Other -> unescape_chars(Rest, Map, Octals, Hex, <>) - end; +unescape_hex(<<${, A, B, C, D, E, F, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) -> + io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), + append_codepoint(Rest, Map, [A, B, C, D, E, F], Acc, 16); -unescape_chars(<>, Map, Octals, Hex, Acc) -> - unescape_chars(Rest, Map, Octals, Hex, <>); +unescape_hex(<<_/binary>>, _Map, _Acc) -> + Msg = <<"missing hex sequence after \\x, expected \\xHH">>, + error('Elixir.ArgumentError':exception([{message, Msg}])). -unescape_chars(<<>>, _Map, _Octals, _Hex, Acc) -> Acc. +%% Finish deprecated sequences -append_escaped(Rest, Map, List, Octal, Hex, Acc, Base) -> +unescape_unicode(<>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> + append_codepoint(Rest, Map, [A, B, C, D], Acc, 16); + +unescape_unicode(<<${, A, $}, Rest/binary>>, Map, Acc) when ?is_hex(A) -> + append_codepoint(Rest, Map, [A], Acc, 16); + +unescape_unicode(<<${, A, B, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B) -> + append_codepoint(Rest, Map, [A, B], Acc, 16); + +unescape_unicode(<<${, A, B, C, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C) -> + append_codepoint(Rest, Map, [A, B, C], Acc, 16); + +unescape_unicode(<<${, A, B, C, D, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> + append_codepoint(Rest, Map, [A, B, C, D], Acc, 16); + +unescape_unicode(<<${, A, B, C, D, E, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) -> + append_codepoint(Rest, Map, [A, B, C, D, E], Acc, 16); + +unescape_unicode(<<${, A, B, C, D, E, F, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) -> + append_codepoint(Rest, Map, [A, B, C, D, E, F], Acc, 16); + +unescape_unicode(<<_/binary>>, _Map, _Acc) -> + Msg = <<"invalid Unicode sequence after \\u, expected \\uHHHH or \\u{H*}">>, + error('Elixir.ArgumentError':exception([{message, Msg}])). + +append_codepoint(Rest, Map, List, Acc, Base) -> Codepoint = list_to_integer(List, Base), try <> of - Binary -> unescape_chars(Rest, Map, Octal, Hex, Binary) + Binary -> unescape_chars(Rest, Map, Binary) catch error:badarg -> - Msg = <<"invalid or reserved unicode codepoint ", (integer_to_binary(Codepoint))/binary>>, - error('Elixir.ArgumentError':exception([{message,Msg}])) + Msg = <<"invalid or reserved Unicode codepoint ", (integer_to_binary(Codepoint))/binary>>, + error('Elixir.ArgumentError':exception([{message, Msg}])) end. -% Unescape Helpers - +unescape_map($0) -> 0; unescape_map($a) -> 7; unescape_map($b) -> $\b; unescape_map($d) -> $\d; @@ -144,20 +192,23 @@ unescape_map($r) -> $\r; unescape_map($s) -> $\s; unescape_map($t) -> $\t; unescape_map($v) -> $\v; +unescape_map($x) -> true; +unescape_map($u) -> true; unescape_map(E) -> E. % Extract Helpers -finish_extraction(Line, Buffer, Output, Remaining) -> - case build_string(Line, Buffer, Output) of - [] -> Final = [<<>>]; - Final -> [] +finish_extraction(Line, Column, Buffer, Output, Remaining) -> + Final = case build_string(Line, Buffer, Output) of + [] -> [<<>>]; + F -> F end, - {Line, lists:reverse(Final), Remaining}. + + {Line, Column, lists:reverse(Final), Remaining}. build_string(_Line, [], Output) -> Output; build_string(_Line, Buffer, Output) -> - [elixir_utils:characters_to_binary(lists:reverse(Buffer))|Output]. + [elixir_utils:characters_to_binary(lists:reverse(Buffer)) | Output]. -build_interpol(Line, Buffer, Output) -> - [{Line, lists:reverse(Buffer)}|Output]. +build_interpol(Line, Column, EndColumn, Buffer, Output) -> + [{{Line, Column, EndColumn}, lists:reverse(Buffer)} | Output]. diff --git a/lib/elixir/src/elixir_lexical.erl b/lib/elixir/src/elixir_lexical.erl index 7f299442deb..703501235de 100644 --- a/lib/elixir/src/elixir_lexical.erl +++ b/lib/elixir/src/elixir_lexical.erl @@ -1,79 +1,85 @@ %% Module responsible for tracking lexical information. -module(elixir_lexical). --export([run/2, +-export([run/3, dest/1, record_alias/4, record_alias/2, - record_import/4, record_import/2, - record_remote/2, format_error/1 + record_import/6, record_import/5, + record_remote/3, record_remote/6, + format_error/1 ]). -include("elixir.hrl"). -define(tracker, 'Elixir.Kernel.LexicalTracker'). -run(File, Callback) -> - case code:is_loaded(?tracker) of - {file, _} -> - Pid = ?tracker:start_link(), - try - Callback(Pid) +run(File, Dest, Callback) -> + case elixir_compiler:get_opt(internal) of + false -> + {ok, Pid} = ?tracker:start_link(Dest), + try Callback(Pid) of + Res -> + warn_unused_aliases(File, Pid), + warn_unused_imports(File, Pid), + Res after - warn_unused_aliases(File, Pid), - warn_unused_imports(File, Pid), - unlink(Pid), ?tracker:stop(Pid) + unlink(Pid), + ?tracker:stop(Pid) end; - false -> + true -> Callback(nil) end. +dest(nil) -> nil; +dest(Pid) -> ?tracker:dest(Pid). + %% RECORD record_alias(Module, Line, Warn, Ref) -> - if_tracker(Ref, fun(Pid) -> - ?tracker:add_alias(Pid, Module, Line, Warn), - true - end). + if_tracker(Ref, fun(Pid) -> ?tracker:add_alias(Pid, Module, Line, Warn), ok end). -record_import(Module, Line, Warn, Ref) -> - if_tracker(Ref, fun(Pid) -> - ?tracker:add_import(Pid, Module, Line, Warn), - true - end). +record_import(Module, FAs, Line, Warn, Ref) -> + if_tracker(Ref, fun(Pid) -> ?tracker:add_import(Pid, Module, FAs, Line, Warn), ok end). record_alias(Module, Ref) -> - if_tracker(Ref, fun(Pid) -> - ?tracker:alias_dispatch(Pid, Module), - true - end). - -record_import(Module, Ref) -> - if_tracker(Ref, fun(Pid) -> - ?tracker:import_dispatch(Pid, Module), - true - end). - -record_remote(Module, Ref) -> - if_tracker(Ref, fun(Pid) -> - ?tracker:remote_dispatch(Pid, Module), - true - end). + if_tracker(Ref, fun(Pid) -> ?tracker:alias_dispatch(Pid, Module), ok end). + +record_import(Module, Function, Arity, EnvFunction, Line, Ref) -> + if_tracker(Ref, fun(Pid) -> ?tracker:import_dispatch(Pid, Module, {Function, Arity}, Line, mode(EnvFunction)), ok end). + +record_remote(Module, EnvFunction, Ref) -> + if_tracker(Ref, fun(Pid) -> ?tracker:remote_reference(Pid, Module, mode(EnvFunction)), ok end). + +record_remote(Module, Function, Arity, EnvFunction, Line, Ref) -> + if_tracker(Ref, fun(Pid) -> ?tracker:remote_dispatch(Pid, Module, {Function, Arity}, Line, mode(EnvFunction)), ok end). %% HELPERS -if_tracker(nil, _Callback) -> false; +mode(nil) -> compile; +mode({_, _}) -> runtime. + +if_tracker(nil, _Callback) -> ok; if_tracker(Pid, Callback) when is_pid(Pid) -> Callback(Pid). %% ERROR HANDLING warn_unused_imports(File, Pid) -> - [ begin - elixir_errors:handle_file_warning(File, {L, ?MODULE, {unused_import, M}}) - end || {M, L} <- ?tracker:collect_unused_imports(Pid)]. + {ModuleImports, MFAImports} = + lists:partition(fun({M, _}) -> is_atom(M) end, ?tracker:collect_unused_imports(Pid)), + Modules = [M || {M, _L} <- ModuleImports], + MFAImportsFiltered = [T || {{M, _, _}, _} = T <- MFAImports, not lists:member(M, Modules)], + + [begin + elixir_errors:form_warn([{line, L}], File, ?MODULE, {unused_import, M}) + end || {M, L} <- ModuleImports ++ MFAImportsFiltered], + ok. warn_unused_aliases(File, Pid) -> - [ begin - elixir_errors:handle_file_warning(File, {L, ?MODULE, {unused_alias, M}}) - end || {M, L} <- ?tracker:collect_unused_aliases(Pid)]. + [begin + elixir_errors:form_warn([{line, L}], File, ?MODULE, {unused_alias, M}) + end || {M, L} <- ?tracker:collect_unused_aliases(Pid)], + ok. format_error({unused_alias, Module}) -> io_lib:format("unused alias ~ts", [elixir_aliases:inspect(Module)]); +format_error({unused_import, {Module, Function, Arity}}) -> + io_lib:format("unused import ~ts.~ts/~w", [elixir_aliases:inspect(Module), Function, Arity]); format_error({unused_import, Module}) -> io_lib:format("unused import ~ts", [elixir_aliases:inspect(Module)]). diff --git a/lib/elixir/src/elixir_locals.erl b/lib/elixir/src/elixir_locals.erl index 92991d772a0..fa128107a94 100644 --- a/lib/elixir/src/elixir_locals.erl +++ b/lib/elixir/src/elixir_locals.erl @@ -4,178 +4,104 @@ setup/1, cleanup/1, cache_env/1, get_cached_env/1, record_local/2, record_local/3, record_import/4, record_definition/3, record_defaults/4, - ensure_no_function_conflict/4, warn_unused_local/3, format_error/1 + ensure_no_import_conflict/3, warn_unused_local/3, format_error/1 ]). --export([macro_for/3, local_for/3, local_for/4]). -include("elixir.hrl"). --define(attr, '__locals_tracker'). +-define(attr, {elixir, locals_tracker}). -define(tracker, 'Elixir.Module.LocalsTracker'). -macro_for(Module, Name, Arity) -> - Tuple = {Name, Arity}, - try elixir_def:lookup_definition(Module, Tuple) of - {{Tuple, Kind, Line, _, _, _, _}, [_|_] = Clauses} - when Kind == defmacro; Kind == defmacrop -> - fun() -> get_function(Line, Module, Clauses) end; - _ -> - false - catch - error:badarg -> false - end. - -local_for(Module, Name, Arity) -> - local_for(Module, Name, Arity, nil). -local_for(Module, Name, Arity, Given) -> - Tuple = {Name, Arity}, - case elixir_def:lookup_definition(Module, Tuple) of - {{Tuple, Kind, Line, _, _, _, _}, [_|_] = Clauses} - when Given == nil; Kind == Given -> - get_function(Line, Module, Clauses); - _ -> - [_|T] = erlang:get_stacktrace(), - erlang:raise(error, undef, [{Module,Name,Arity,[]}|T]) - end. - -get_function(Line, Module, Clauses) -> - RewrittenClauses = [rewrite_clause(Clause, Module) || Clause <- Clauses], - Fun = {'fun', Line, {clauses, RewrittenClauses}}, - {value, Result, _Binding} = erl_eval:exprs([Fun], []), - Result. - -rewrite_clause({call, Line, {atom, Line, RawName}, Args}, Module) -> - Remote = {remote, Line, - {atom, Line, ?MODULE}, - {atom, Line, local_for} - }, - - %% If we have a macro, its arity in the table is - %% actually one less than in the function call - {Name, Arity} = case atom_to_list(RawName) of - "MACRO-" ++ Rest -> {list_to_atom(Rest), length(Args) - 1}; - _ -> {RawName, length(Args)} - end, - - FunCall = {call, Line, Remote, [ - {atom, Line, Module}, {atom, Line, Name}, {integer, Line, Arity} - ]}, - {call, Line, FunCall, Args}; - -rewrite_clause(Tuple, Module) when is_tuple(Tuple) -> - list_to_tuple(rewrite_clause(tuple_to_list(Tuple), Module)); - -rewrite_clause(List, Module) when is_list(List) -> - [rewrite_clause(Item, Module) || Item <- List]; - -rewrite_clause(Else, _) -> Else. - -%% TRACKING - setup(Module) -> - case code:is_loaded(?tracker) of - {file, _} -> ets:insert(Module, {?attr, ?tracker:start_link()}); - false -> ok + case elixir_compiler:get_opt(internal) of + false -> + {ok, Pid} = ?tracker:start_link(), + ets:insert(elixir_module:data_table(Module), {?attr, Pid}), + ok; + true -> + ok end. cleanup(Module) -> - if_tracker(Module, fun(Pid) -> unlink(Pid), ?tracker:stop(Pid) end). + if_tracker(Module, fun(Pid) -> unlink(Pid), ?tracker:stop(Pid), ok end). record_local(Tuple, Module) when is_atom(Module) -> - if_tracker(Module, fun(Pid) -> - ?tracker:add_local(Pid, Tuple), - true - end). + if_tracker(Module, fun(Pid) -> ?tracker:add_local(Pid, Tuple), ok end). record_local(Tuple, _Module, Function) - when Function == nil; Function == Tuple -> false; + when Function == nil; Function == Tuple -> ok; record_local(Tuple, Module, Function) -> - if_tracker(Module, fun(Pid) -> - ?tracker:add_local(Pid, Function, Tuple), - true - end). + if_tracker(Module, fun(Pid) -> ?tracker:add_local(Pid, Function, Tuple), ok end). record_import(_Tuple, Receiver, Module, _Function) when Module == nil; Module == Receiver -> false; record_import(Tuple, Receiver, Module, Function) -> - if_tracker(Module, fun(Pid) -> - ?tracker:add_import(Pid, Function, Receiver, Tuple), - true - end). + if_tracker(Module, fun(Pid) -> ?tracker:add_import(Pid, Function, Receiver, Tuple), ok end). record_definition(Tuple, Kind, Module) -> - if_tracker(Module, fun(Pid) -> - ?tracker:add_definition(Pid, Kind, Tuple), - true - end). + if_tracker(Module, fun(Pid) -> ?tracker:add_definition(Pid, Kind, Tuple), ok end). record_defaults(_Tuple, _Kind, _Module, 0) -> - true; + ok; record_defaults(Tuple, Kind, Module, Defaults) -> - if_tracker(Module, fun(Pid) -> - ?tracker:add_defaults(Pid, Kind, Tuple, Defaults), - true - end). + if_tracker(Module, fun(Pid) -> ?tracker:add_defaults(Pid, Kind, Tuple, Defaults), ok end). if_tracker(Module, Callback) -> - try ets:lookup_element(Module, ?attr, 2) of + if_tracker(Module, ok, Callback). + +if_tracker(Module, Default, Callback) -> + try ets:lookup_element(elixir_module:data_table(Module), ?attr, 2) of Pid -> Callback(Pid) catch - error:badarg -> false + error:badarg -> Default end. %% CACHING -cache_env(#{module := Module} = RE) -> - E = RE#{line := nil,vars := []}, - try ets:lookup_element(Module, ?attr, 2) of +cache_env(#{module := Module, line := Line} = E) -> + try ets:lookup_element(elixir_module:data_table(Module), ?attr, 2) of Pid -> - {Pid, ?tracker:cache_env(Pid, E)} + {Pid, {Line, ?tracker:cache_env(Pid, E#{line := nil, vars := []})}} catch error:badarg -> - {Escaped, _} = elixir_quote:escape(E, false), + {Escaped, _} = elixir_quote:escape(E#{vars := []}, false), Escaped end. -get_cached_env({Pid,Ref}) -> ?tracker:get_cached_env(Pid, Ref); +get_cached_env({Pid, {Line, Ref}}) -> (?tracker:get_cached_env(Pid, Ref))#{line := Line}; get_cached_env(Env) -> Env. %% ERROR HANDLING -ensure_no_function_conflict(Meta, File, Module, AllDefined) -> - if_tracker(Module, fun(Pid) -> - [ begin - elixir_errors:form_error(Meta, File, ?MODULE, {function_conflict, Error}) - end || Error <- ?tracker:collect_imports_conflicts(Pid, AllDefined) ] - end), - ok. +ensure_no_import_conflict(_File, 'Elixir.Kernel', _All) -> + ok; +ensure_no_import_conflict(File, Module, All) -> + if_tracker(Module, ok, fun(Pid) -> + [elixir_errors:form_error(Meta, File, ?MODULE, {function_conflict, Error}) + || {Meta, Error} <- ?tracker:collect_imports_conflicts(Pid, All)], + ok + end). warn_unused_local(File, Module, Private) -> - if_tracker(Module, fun(Pid) -> - Args = [ {Fun, Kind, Defaults} || - {Fun, Kind, _Line, true, Defaults} <- Private], - - Unused = ?tracker:collect_unused_locals(Pid, Args), - - [ begin - {_, _, Line, _, _} = lists:keyfind(element(2, Error), 1, Private), - elixir_errors:handle_file_warning(File, {Line, ?MODULE, Error}) - end || Error <- Unused ] + if_tracker(Module, [], fun(Pid) -> + {Unreachable, Warnings} = ?tracker:collect_unused_locals(Pid, Private), + [elixir_errors:form_warn(Meta, File, ?MODULE, Error) || {Meta, Error} <- Warnings], + Unreachable end). -format_error({function_conflict,{Receivers, Name, Arity}}) -> +format_error({function_conflict, {Receivers, Name, Arity}}) -> io_lib:format("imported ~ts.~ts/~B conflicts with local function", [elixir_aliases:inspect(hd(Receivers)), Name, Arity]); -format_error({unused_args,{Name, Arity}}) -> +format_error({unused_args, {Name, Arity}}) -> io_lib:format("default arguments in ~ts/~B are never used", [Name, Arity]); -format_error({unused_args,{Name, Arity},1}) -> +format_error({unused_args, {Name, Arity}, 1}) -> io_lib:format("the first default argument in ~ts/~B is never used", [Name, Arity]); -format_error({unused_args,{Name, Arity},Count}) -> +format_error({unused_args, {Name, Arity}, Count}) -> io_lib:format("the first ~B default arguments in ~ts/~B are never used", [Count, Name, Arity]); -format_error({unused_def,{Name, Arity},defp}) -> +format_error({unused_def, {Name, Arity}, defp}) -> io_lib:format("function ~ts/~B is unused", [Name, Arity]); -format_error({unused_def,{Name, Arity},defmacrop}) -> +format_error({unused_def, {Name, Arity}, defmacrop}) -> io_lib:format("macro ~ts/~B is unused", [Name, Arity]). diff --git a/lib/elixir/src/elixir_map.erl b/lib/elixir/src/elixir_map.erl index 957530e8761..abab061488e 100644 --- a/lib/elixir/src/elixir_map.erl +++ b/lib/elixir/src/elixir_map.erl @@ -1,175 +1,226 @@ -module(elixir_map). --export([expand_map/3, translate_map/3, expand_struct/4, translate_struct/4]). --import(elixir_errors, [compile_error/4]). +-export([expand_map/3, expand_struct/4, format_error/1]). +-import(elixir_errors, [form_error/4, form_warn/4]). -include("elixir.hrl"). -expand_map(Meta, [{'|', UpdateMeta, [Left, Right]}], E) -> - {[ELeft|ERight], EA} = elixir_exp:expand_args([Left|Right], E), - {{'%{}', Meta, [{'|', UpdateMeta, [ELeft, ERight]}]}, EA}; +expand_map(Meta, [{'|', UpdateMeta, [Left, Right]}], #{context := nil} = E) -> + {[ELeft | ERight], EE} = elixir_expand:expand_args([Left | Right], E), + validate_kv(Meta, ERight, Right, E), + {{'%{}', Meta, [{'|', UpdateMeta, [ELeft, ERight]}]}, EE}; +expand_map(Meta, [{'|', _, [_, _]}] = Args, #{context := Context, file := File}) -> + form_error(Meta, File, ?MODULE, {update_syntax_in_wrong_context, Context, {'%{}', Meta, Args}}); +expand_map(Meta, Args, #{context := match} = E) -> + {EArgs, EE} = + lists:mapfoldl(fun + ({Key, Value}, EA) -> + {EKey, EK} = elixir_expand:expand(Key, EA), + validate_match_key(Meta, EKey, EK), + {EValue, EV} = elixir_expand:expand(Value, EK), + {{EKey, EValue}, EV}; + (Other, EA) -> + elixir_expand:expand(Other, EA) + end, E, Args), + validate_kv(Meta, EArgs, Args, E), + {{'%{}', Meta, EArgs}, EE}; expand_map(Meta, Args, E) -> - {EArgs, EA} = elixir_exp:expand_args(Args, E), - {{'%{}', Meta, EArgs}, EA}. + {EArgs, EE} = elixir_expand:expand_args(Args, E), + validate_kv(Meta, EArgs, Args, E), + {{'%{}', Meta, EArgs}, EE}. + +expand_struct(Meta, Left, {'%{}', MapMeta, MapArgs}, #{context := Context} = E) -> + CleanMapArgs = clean_struct_key_from_map_args(Meta, MapArgs, E), + {[ELeft, ERight], EE} = elixir_expand:expand_args([Left, {'%{}', MapMeta, CleanMapArgs}], E), + + case validate_struct(ELeft, Context) of + true when is_atom(ELeft) -> + %% We always record structs when they are expanded + %% as they expect the reference at compile time. + elixir_lexical:record_remote(ELeft, '__struct__', 1, nil, ?line(Meta), ?key(E, lexical_tracker)), + + %% We also include the current module because it won't be present + %% in context module in case the module name is defined dynamically. + InContext = lists:member(ELeft, [?key(E, module) | ?key(E, context_modules)]), + + case extract_struct_assocs(Meta, ERight, E) of + {expand, MapMeta, Assocs} when Context /= match -> %% Expand + Struct = load_struct(Meta, ELeft, [Assocs], InContext, EE), + assert_struct_keys(Meta, ELeft, Struct, Assocs, EE), + Keys = ['__struct__'] ++ [K || {K, _} <- Assocs], + {StructAssocs, _} = elixir_quote:escape(maps:to_list(maps:without(Keys, Struct)), false), + {{'%', Meta, [ELeft, {'%{}', MapMeta, StructAssocs ++ Assocs}]}, EE}; + + {_, _, Assocs} -> %% Update or match + Struct = load_struct(Meta, ELeft, [], InContext, EE), + assert_struct_keys(Meta, ELeft, Struct, Assocs, EE), + {{'%', Meta, [ELeft, ERight]}, EE} + end; -expand_struct(Meta, Left, Right, E) -> - {[ELeft, ERight], EE} = elixir_exp:expand_args([Left, Right], E), - - case is_atom(ELeft) of - true -> ok; - false -> - compile_error(Meta, ?m(E, file), "expected struct name to be a compile " - "time atom or alias, got: ~ts", ['Elixir.Macro':to_string(ELeft)]) - end, - - EMeta = - case lists:member(ELeft, ?m(E, context_modules)) of - true -> - case (ELeft == ?m(E, module)) and - (?m(E, function) == nil) of - true -> - compile_error(Meta, ?m(E, file), - "cannot access struct ~ts in body of the module that defines it as " - "the struct fields are not yet accessible", - [elixir_aliases:inspect(ELeft)]); - false -> - [{struct, context}|Meta] - end; - false -> - Meta - end, - - case ERight of - {'%{}', _, _} -> ok; - _ -> compile_error(Meta, ?m(E, file), - "expected struct to be followed by a map, got: ~ts", - ['Elixir.Macro':to_string(ERight)]) - end, - - {{'%', EMeta, [ELeft, ERight]}, EE}. - -translate_map(Meta, Args, S) -> - {Assocs, TUpdate, US} = extract_assoc_update(Args, S), - translate_map(Meta, Assocs, TUpdate, US). + true -> + {{'%', Meta, [ELeft, ERight]}, EE}; -translate_struct(Meta, Name, {'%{}', MapMeta, Args}, S) -> - {Assocs, TUpdate, US} = extract_assoc_update(Args, S), - Struct = load_struct(Meta, Name, S), + false when Context == match -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_struct_name_in_match, ELeft}); - case is_map(Struct) of - true -> - assert_struct_keys(Meta, Name, Struct, Assocs, S); false -> - compile_error(Meta, S#elixir_scope.file, "expected ~ts.__struct__/0 to " - "return a map, got: ~ts", [elixir_aliases:inspect(Name), 'Elixir.Kernel':inspect(Struct)]) - end, - - if - TUpdate /= nil -> - Line = ?line(Meta), - {VarName, _, VS} = elixir_scope:build_var('_', US), - - Var = {var, Line, VarName}, - Map = {map, Line, [{map_field_exact, Line, {atom, Line, '__struct__'}, {atom, Line, Name}}]}, - - Match = {match, Line, Var, Map}, - Error = {tuple, Line, [{atom, Line, badstruct}, {atom, Line, Name}, Var]}, - - {TMap, TS} = translate_map(MapMeta, Assocs, Var, VS), - - {{'case', Line, TUpdate, [ - {clause, Line, [Match], [], [TMap]}, - {clause, Line, [Var], [], [elixir_utils:erl_call(Line, erlang, error, [Error])]} - ]}, TS}; - S#elixir_scope.context == match -> - translate_map(MapMeta, Assocs ++ [{'__struct__', Name}], nil, US); - true -> - Keys = [K || {K,_} <- Assocs], - {StructAssocs, _} = elixir_quote:escape(maps:to_list(maps:without(Keys, Struct)), false), - translate_map(MapMeta, StructAssocs ++ Assocs ++ [{'__struct__', Name}], nil, US) + form_error(Meta, ?key(E, file), ?MODULE, {invalid_struct_name, ELeft}) + end; +expand_struct(Meta, _Left, Right, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {non_map_after_struct, Right}). + +clean_struct_key_from_map_args(Meta, [{'|', PipeMeta, [Left, MapAssocs]}], E) -> + [{'|', PipeMeta, [Left, clean_struct_key_from_map_assocs(Meta, MapAssocs, E)]}]; +clean_struct_key_from_map_args(Meta, MapAssocs, E) -> + clean_struct_key_from_map_assocs(Meta, MapAssocs, E). + +clean_struct_key_from_map_assocs(Meta, Assocs, E) -> + case lists:keytake('__struct__', 1, Assocs) of + {value, _, CleanAssocs} -> + form_warn(Meta, ?key(E, file), ?MODULE, ignored_struct_key_in_struct), + CleanAssocs; + false -> + Assocs end. -%% Helpers +validate_match_key(_Meta, {'^', _, [_]}, _E) -> + ok; +validate_match_key(Meta, {Name, _, Context}, E) when is_atom(Name), is_atom(Context) -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_variable_in_map_key_match, Name}); +validate_match_key(Meta, {Left, _, Right}, E) -> + validate_match_key(Meta, Left, E), + validate_match_key(Meta, Right, E); +validate_match_key(Meta, {Left, Right}, E) -> + validate_match_key(Meta, Left, E), + validate_match_key(Meta, Right, E); +validate_match_key(Meta, List, E) when is_list(List) -> + [validate_match_key(Meta, Each, E) || Each <- List]; +validate_match_key(_, _, _) -> + ok. + +validate_kv(Meta, KV, Original, E) -> + lists:foldl(fun + ({_K, _V}, Acc) -> + Acc + 1; + (_, Acc) -> + form_error(Meta, ?key(E, file), ?MODULE, {not_kv_pair, lists:nth(Acc, Original)}) + end, 1, KV). + +extract_struct_assocs(_, {'%{}', Meta, [{'|', _, [_, Assocs]}]}, _) -> + {update, Meta, delete_struct_key(Assocs)}; +extract_struct_assocs(_, {'%{}', Meta, Assocs}, _) -> + {expand, Meta, delete_struct_key(Assocs)}; +extract_struct_assocs(Meta, Other, E) -> + form_error(Meta, ?key(E, file), ?MODULE, {non_map_after_struct, Other}). + +delete_struct_key(Assocs) -> + lists:keydelete('__struct__', 1, Assocs). + +validate_struct({'^', _, [{Var, _, Ctx}]}, match) when is_atom(Var), is_atom(Ctx) -> true; +validate_struct({Var, _Meta, Ctx}, match) when is_atom(Var), is_atom(Ctx) -> true; +validate_struct(Atom, _) when is_atom(Atom) -> true; +validate_struct(_, _) -> false. + +load_struct(Meta, Name, Args, InContext, E) -> + Arity = length(Args), -load_struct(Meta, Name, S) -> Local = - elixir_module:is_open(Name) andalso - (case lists:keyfind(struct, 1, Meta) of - {struct, context} -> true; - _ -> wait_for_struct(Name) - end), + not(ensure_loaded(Name)) andalso + (InContext orelse wait_for_struct(Name)), try - case Local of - true -> + case Local andalso elixir_def:local_for(Name, '__struct__', Arity, [def]) of + false -> + apply(Name, '__struct__', Args); + LocalFun -> + %% There is an inherent race condition when using local_for. + %% By the time we got to execute the function, the ets table + %% with temporary definitions for the given module may no longer + %% be available, so any function invocation happening inside the + %% local function will fail. In this case, we need to fallback to + %% the regular dispatching since the module will be available if + %% the table has not been deleted (unless compilation of that + %% module failed which then should cause this call to fail too). try - (elixir_locals:local_for(Name, '__struct__', 0, def))() + apply(LocalFun, Args) catch - error:undef -> Name:'__struct__'(); - error:badarg -> Name:'__struct__'() - end; - false -> - Name:'__struct__'() + error:undef -> apply(Name, '__struct__', Args) + end end + of + #{} = Struct -> + Struct; + Other -> + form_error(Meta, ?key(E, file), ?MODULE, {invalid_struct_return_value, Name, Arity, Other}) catch error:undef -> - Inspected = elixir_aliases:inspect(Name), - compile_error(Meta, S#elixir_scope.file, "~ts.__struct__/0 is undefined, " - "cannot expand struct ~ts", [Inspected, Inspected]) + case InContext andalso (?key(E, function) == nil) of + true -> + form_error(Meta, ?key(E, file), ?MODULE, {inaccessible_struct, Name}); + false -> + form_error(Meta, ?key(E, file), ?MODULE, {undefined_struct, Name, Arity}) + end; + + Kind:Reason -> + Stacktrace = erlang:get_stacktrace(), + Info = [{Name, '__struct__', Arity, [{file, "expanding struct"}]}, + elixir_utils:caller(?line(Meta), ?key(E, file), ?key(E, module), ?key(E, function))], + erlang:raise(Kind, Reason, prune_stacktrace(Stacktrace, Name, Arity) ++ Info) end. +prune_stacktrace([{Module, '__struct__', Arity, _} | _], Module, Arity) -> + []; +prune_stacktrace([H | T], Module, Arity) -> + [H | prune_stacktrace(T, Module, Arity)]; +prune_stacktrace([], _Module, _Arity) -> + []. + +ensure_loaded(Module) -> + code:ensure_loaded(Module) == {module, Module}. + wait_for_struct(Module) -> - case erlang:get(elixir_compiler_pid) of - undefined -> - false; - Pid -> - Ref = erlang:make_ref(), - Pid ! {waiting, struct, self(), Ref, Module}, - receive - {Ref, ready} -> - true; - {Ref, release} -> - 'Elixir.Kernel.ErrorHandler':release(), - false - end - end. + is_pid(erlang:get(elixir_compiler_pid)) andalso + 'Elixir.Kernel.ErrorHandler':ensure_compiled(Module, struct). -translate_map(Meta, Assocs, TUpdate, #elixir_scope{extra=Extra} = S) -> - {Op, KeyFun, ValFun} = extract_key_val_op(TUpdate, S), - - Line = ?line(Meta), - - {TArgs, SA} = lists:mapfoldl(fun - ({Key, Value}, Acc) -> - {TKey, Acc1} = KeyFun(Key, Acc), - {TValue, Acc2} = ValFun(Value, Acc1#elixir_scope{extra=Extra}), - {{Op, ?line(Meta), TKey, TValue}, Acc2}; - (Other, _Acc) -> - compile_error(Meta, S#elixir_scope.file, "expected key-value pairs in map, got: ~ts", - ['Elixir.Macro':to_string(Other)]) - end, S, Assocs), - - build_map(Line, TUpdate, TArgs, SA). - -extract_assoc_update([{'|', _Meta, [Update, Args]}], S) -> - {TArg, SA} = elixir_translator:translate_arg(Update, S, S), - {Args, TArg, SA}; -extract_assoc_update(Args, SA) -> {Args, nil, SA}. - -extract_key_val_op(_TUpdate, #elixir_scope{context=match}) -> - {map_field_exact, - fun(X, Acc) -> elixir_translator:translate(X, Acc#elixir_scope{extra=map_key}) end, - fun elixir_translator:translate/2}; -extract_key_val_op(TUpdate, S) -> - KS = S#elixir_scope{extra=map_key}, - Op = if TUpdate == nil -> map_field_assoc; true -> map_field_exact end, - {Op, - fun(X, Acc) -> elixir_translator:translate_arg(X, Acc, KS) end, - fun(X, Acc) -> elixir_translator:translate_arg(X, Acc, S) end}. - -build_map(Line, nil, TArgs, SA) -> {{map, Line, TArgs}, SA}; -build_map(Line, TUpdate, TArgs, SA) -> {{map, Line, TUpdate, TArgs}, SA}. - -assert_struct_keys(Meta, Name, Struct, Assocs, S) -> +assert_struct_keys(Meta, Name, Struct, Assocs, E) -> [begin - compile_error(Meta, S#elixir_scope.file, "unknown key ~ts for struct ~ts", - ['Elixir.Kernel':inspect(Key), elixir_aliases:inspect(Name)]) + form_error(Meta, ?key(E, file), ?MODULE, {unknown_key_for_struct, Name, Key}) end || {Key, _} <- Assocs, not maps:is_key(Key, Struct)]. + +format_error({update_syntax_in_wrong_context, Context, Expr}) -> + io_lib:format("cannot use map/struct update syntax in ~ts, got: ~ts", + [Context, 'Elixir.Macro':to_string(Expr)]); +format_error({invalid_struct_name_in_match, Expr}) -> + Message = + "expected struct name in a match to be a compile time atom, alias or a " + "variable, got: ~ts", + io_lib:format(Message, ['Elixir.Macro':to_string(Expr)]); +format_error({invalid_struct_name, Expr}) -> + Message = "expected struct name to be a compile time atom or alias, got: ~ts", + io_lib:format(Message, ['Elixir.Macro':to_string(Expr)]); +format_error({invalid_variable_in_map_key_match, Name}) -> + Message = + "illegal use of variable ~ts inside map key match, maps can only match on " + "existing variables by using ^~ts", + io_lib:format(Message, [Name, Name]); +format_error({not_kv_pair, Expr}) -> + io_lib:format("expected key-value pairs in a map, got: ~ts", + ['Elixir.Macro':to_string(Expr)]); +format_error({non_map_after_struct, Expr}) -> + io_lib:format("expected struct to be followed by a map, got: ~ts", + ['Elixir.Macro':to_string(Expr)]); +format_error({invalid_struct_return_value, Module, Arity, Expr}) -> + io_lib:format("expected ~ts.__struct__/~p to return a map, got: ~ts", + ['Elixir.Macro':to_string(Module), Arity, 'Elixir.Macro':to_string(Expr)]); +format_error({inaccessible_struct, Module}) -> + Message = + "cannot access struct ~ts, the struct was not yet defined or the struct is " + "being accessed in the same context that defines it", + io_lib:format(Message, ['Elixir.Macro':to_string(Module)]); +format_error({undefined_struct, Module, Arity}) -> + StringName = 'Elixir.Macro':to_string(Module), + io_lib:format("~ts.__struct__/~p is undefined, cannot expand struct ~ts", + [StringName, Arity, StringName]); +format_error({unknown_key_for_struct, Module, Key}) -> + io_lib:format("unknown key ~ts for struct ~ts", + ['Elixir.Macro':to_string(Key), 'Elixir.Macro':to_string(Module)]); +format_error(ignored_struct_key_in_struct) -> + "key :__struct__ is ignored when using structs". diff --git a/lib/elixir/src/elixir_module.erl b/lib/elixir/src/elixir_module.erl index 971b50eb472..df4345b4120 100644 --- a/lib/elixir/src/elixir_module.erl +++ b/lib/elixir/src/elixir_module.erl @@ -1,25 +1,43 @@ -module(elixir_module). --export([compile/4, data_table/1, docs_table/1, is_open/1, - expand_callback/6, add_beam_chunk/3, format_error/1]). +-export([data_table/1, defs_table/1, is_open/1, delete_doc/6, + compile/4, expand_callback/6, format_error/1, + compiler_modules/0, delete_impl/6]). -include("elixir.hrl"). --define(acc_attr, '__acc_attributes'). --define(docs_attr, '__docs_table'). --define(lexical_attr, '__lexical_tracker'). --define(persisted_attr, '__persisted_attributes'). --define(overridable_attr, '__overridable'). --define(location_attr, '__location'). +-define(lexical_attr, {elixir, lexical_tracker}). +-define(persisted_attr, {elixir, persisted_attributes}). -%% TABLE METHODS +%% Stores modules currently being defined by the compiler + +compiler_modules() -> + case erlang:get(elixir_compiler_modules) of + undefined -> []; + M when is_list(M) -> M + end. + +put_compiler_modules([]) -> + erlang:erase(elixir_compiler_modules); +put_compiler_modules(M) when is_list(M) -> + erlang:put(elixir_compiler_modules, M). + +%% Table functions data_table(Module) -> - Module. + ets:lookup_element(elixir_modules, Module, 2). -docs_table(Module) -> - ets:lookup_element(Module, ?docs_attr, 2). +defs_table(Module) -> + ets:lookup_element(elixir_modules, Module, 3). is_open(Module) -> - Module == ets:info(Module, name). + ets:lookup(elixir_modules, Module) /= []. + +delete_doc(#{module := Module}, _, _, _, _, _) -> + ets:delete(data_table(Module), doc), + ok. + +delete_impl(#{module := Module}, _, _, _, _, _) -> + ets:delete(data_table(Module), impl), + ok. %% Compilation hook @@ -27,331 +45,102 @@ compile(Module, Block, Vars, #{line := Line} = Env) when is_atom(Module) -> %% In case we are generating a module from inside a function, %% we get rid of the lexical tracker information as, at this %% point, the lexical tracker process is long gone. - LexEnv = case ?m(Env, function) of - nil -> Env#{module := Module, local := nil}; - _ -> Env#{lexical_tracker := nil, function := nil, module := Module, local := nil} + LexEnv = case ?key(Env, function) of + nil -> Env#{module := Module}; + _ -> Env#{lexical_tracker := nil, function := nil, module := Module} end, - case ?m(LexEnv, lexical_tracker) of + case ?key(LexEnv, lexical_tracker) of nil -> - elixir_lexical:run(?m(LexEnv, file), fun(Pid) -> - do_compile(Line, Module, Block, Vars, LexEnv#{lexical_tracker := Pid}) + elixir_lexical:run(?key(LexEnv, file), nil, fun(Pid) -> + compile(Line, Module, Block, Vars, LexEnv#{lexical_tracker := Pid}) end); _ -> - do_compile(Line, Module, Block, Vars, LexEnv) + compile(Line, Module, Block, Vars, LexEnv) end; - compile(Module, _Block, _Vars, #{line := Line, file := File}) -> - elixir_errors:form_error(Line, File, ?MODULE, {invalid_module, Module}). + elixir_errors:form_error([{line, Line}], File, ?MODULE, {invalid_module, Module}). -do_compile(Line, Module, Block, Vars, E) -> - File = ?m(E, file), +compile(Line, Module, Block, Vars, E) -> + File = ?key(E, file), check_module_availability(Line, File, Module), - build(Line, File, Module, ?m(E, lexical_tracker)), - try - {Result, NE} = eval_form(Line, Module, Block, Vars, E), - {Base, Export, Private, Def, Defmacro, Functions} = elixir_def:unwrap_definitions(Module), - - {All, Forms0} = functions_form(Line, File, Module, Base, Export, Def, Defmacro, Functions), - Forms1 = specs_form(Module, Private, Defmacro, Forms0), - Forms2 = types_form(Module, Forms1), - Forms3 = attributes_form(Line, File, Module, Forms2), - - case ets:lookup(data_table(Module), 'on_load') of - [] -> ok; - [{on_load,OnLoad}] -> - [elixir_locals:record_local(Tuple, Module) || Tuple <- OnLoad] - end, + CompilerModules = compiler_modules(), + {Data, Defs, Ref} = build(Line, File, Module, ?key(E, lexical_tracker)), - AllFunctions = Def ++ [T || {T, defp, _, _, _} <- Private], - elixir_locals:ensure_no_function_conflict(Line, File, Module, AllFunctions), - elixir_locals:warn_unused_local(File, Module, Private), - warn_invalid_clauses(Line, File, Module, All), - warn_unused_docs(Line, File, Module), - - Location = {elixir_utils:relative_to_cwd(elixir_utils:characters_to_list(File)), Line}, - - Final = [ - {attribute, Line, file, Location}, - {attribute, Line, module, Module} | Forms3 - ], - - Binary = load_form(Line, Final, compile_opts(Module), NE), + try + put_compiler_modules([Module | CompilerModules]), + {Result, NE, OverridablePairs} = eval_form(Line, Module, Data, Block, Vars, E), + + PersistedAttributes = ets:lookup_element(Data, ?persisted_attr, 2), + Attributes = attributes(Line, File, Data, PersistedAttributes), + OnLoad = ets:lookup_element(Data, 'on_load', 2), + [elixir_locals:record_local(Tuple, Module) || Tuple <- OnLoad], + + {AllDefinitions, Unreachable} = elixir_def:fetch_definitions(File, Module), + + (not elixir_compiler:get_opt(internal)) andalso + 'Elixir.Module':check_behaviours_and_impls(E, Data, AllDefinitions, OverridablePairs), + + CompileOpts = lists:flatten(ets:lookup_element(Data, compile, 2)), + + ModuleMap = #{ + module => Module, + line => Line, + file => File, + attributes => Attributes, + definitions => AllDefinitions, + unreachable => Unreachable, + compile_opts => CompileOpts + }, + + Binary = elixir_erl:compile(ModuleMap), + warn_unused_attributes(File, Data, PersistedAttributes), + autoload_module(Module, Binary, CompileOpts, NE), + eval_callbacks(Line, Data, after_compile, [NE, Binary], NE), + make_module_available(Module, Binary), {module, Module, Binary, Result} + catch + error:undef -> + case erlang:get_stacktrace() of + [{Module, Fun, Args, _Info} | _] = Stack when is_list(Args) -> + compile_undef(Module, Fun, length(Args), Stack); + [{Module, Fun, Arity, _Info} | _] = Stack -> + compile_undef(Module, Fun, Arity, Stack); + Stack -> + erlang:raise(error, undef, Stack) + end after + put_compiler_modules(CompilerModules), elixir_locals:cleanup(Module), - elixir_def:cleanup(Module), - ets:delete(docs_table(Module)), - ets:delete(data_table(Module)) + ets:delete(Data), + ets:delete(Defs), + elixir_code_server:call({undefmodule, Ref}) end. -%% Hook that builds both attribute and functions and set up common hooks. - -build(Line, File, Module, Lexical) -> - %% Table with meta information about the module. - DataTable = data_table(Module), - - OldTable = ets:info(DataTable, name), - case OldTable == DataTable of - true -> - [{OldFile, OldLine}] = ets:lookup_element(OldTable, ?location_attr, 2), - Error = {module_in_definition, Module, OldFile, OldLine}, - elixir_errors:form_error(Line, File, ?MODULE, Error); +%% An undef error for a function in the module being compiled might result in an +%% exception message suggesting the current module is not loaded. This is +%% misleading so use a custom reason. +compile_undef(Module, Fun, Arity, Stack) -> + ExMod = 'Elixir.UndefinedFunctionError', + case code:is_loaded(ExMod) of false -> - [] - end, - - ets:new(DataTable, [set, named_table, public]), - ets:insert(DataTable, {before_compile, []}), - ets:insert(DataTable, {after_compile, []}), - - case elixir_compiler:get_opt(docs) of - true -> ets:insert(DataTable, {on_definition, [{'Elixir.Module', compile_doc}]}); - _ -> ets:insert(DataTable, {on_definition, []}) - end, - - Attributes = [behaviour, on_load, spec, type, typep, opaque, callback, compile, external_resource], - ets:insert(DataTable, {?acc_attr, [before_compile, after_compile, on_definition, derive|Attributes]}), - ets:insert(DataTable, {?persisted_attr, [vsn|Attributes]}), - ets:insert(DataTable, {?docs_attr, ets:new(DataTable, [ordered_set, public])}), - ets:insert(DataTable, {?lexical_attr, Lexical}), - ets:insert(DataTable, {?overridable_attr, []}), - ets:insert(DataTable, {?location_attr, [{File, Line}]}), - - %% Setup other modules - elixir_def:setup(Module), - elixir_locals:setup(Module). - -%% Receives the module representation and evaluates it. - -eval_form(Line, Module, Block, Vars, E) -> - {Value, EE} = elixir_compiler:eval_forms(Block, Vars, E), - elixir_def_overridable:store_pending(Module), - EV = elixir_env:linify({Line, EE#{vars := [], export_vars := nil}}), - EC = eval_callbacks(Line, Module, before_compile, [EV], EV), - elixir_def_overridable:store_pending(Module), - {Value, EC}. - -eval_callbacks(Line, Module, Name, Args, E) -> - Callbacks = lists:reverse(ets:lookup_element(data_table(Module), Name, 2)), - - lists:foldl(fun({M,F}, Acc) -> - expand_callback(Line, M, F, Args, Acc#{vars := [], export_vars := nil}, - fun(AM, AF, AA) -> apply(AM, AF, AA) end) - end, E, Callbacks). - -%% Return the form with exports and function declarations. - -functions_form(Line, File, Module, BaseAll, BaseExport, Def, Defmacro, BaseFunctions) -> - {InfoSpec, Info} = add_info_function(Line, File, Module, BaseExport, Def, Defmacro), - - All = [{'__info__', 1}|BaseAll], - Export = [{'__info__', 1}|BaseExport], - Functions = [InfoSpec,Info|BaseFunctions], - - {All, [ - {attribute, Line, export, lists:sort(Export)} | Functions - ]}. - -%% Add attributes handling to the form - -attributes_form(Line, File, Module, Current) -> - Table = data_table(Module), - - AccAttrs = ets:lookup_element(Table, '__acc_attributes', 2), - PersistedAttrs = ets:lookup_element(Table, '__persisted_attributes', 2), - - Transform = fun({Key, Value}, Acc) -> - case lists:member(Key, PersistedAttrs) of - false -> Acc; - true -> - Values = - case lists:member(Key, AccAttrs) of - true -> Value; - false -> [Value] - end, - - lists:foldl(fun(X, Final) -> - [{attribute, Line, Key, X}|Final] - end, Acc, process_attribute(Line, File, Key, Values)) - end - end, - - ets:foldl(Transform, Current, Table). - -process_attribute(Line, File, external_resource, Values) -> - lists:usort([process_external_resource(Line, File, Value) || Value <- Values]); -process_attribute(_Line, _File, _Key, Values) -> - Values. - -process_external_resource(_Line, _File, Value) when is_binary(Value) -> - Value; -process_external_resource(Line, File, Value) -> - elixir_errors:handle_file_error(File, - {Line, ?MODULE, {invalid_external_resource, Value}}). - -%% Types - -types_form(Module, Forms0) -> - case code:ensure_loaded('Elixir.Kernel.Typespec') of - {module, 'Elixir.Kernel.Typespec'} -> - Types0 = 'Elixir.Module':get_attribute(Module, type) ++ - 'Elixir.Module':get_attribute(Module, typep) ++ - 'Elixir.Module':get_attribute(Module, opaque), - - Types1 = ['Elixir.Kernel.Typespec':translate_type(Kind, Expr, Doc, Caller) || - {Kind, Expr, Doc, Caller} <- Types0], - - 'Elixir.Module':delete_attribute(Module, type), - 'Elixir.Module':delete_attribute(Module, typep), - 'Elixir.Module':delete_attribute(Module, opaque), - - Forms1 = types_attributes(Types1, Forms0), - Forms2 = export_types_attributes(Types1, Forms1), - typedocs_attributes(Types1, Forms2); - - {error, _} -> - Forms0 - end. - -types_attributes(Types, Forms) -> - Fun = fun({{Kind, _NameArity, Expr}, Line, _Export, _Doc}, Acc) -> - [{attribute, Line, Kind, Expr}|Acc] - end, - lists:foldl(Fun, Forms, Types). - -export_types_attributes(Types, Forms) -> - Fun = fun - ({{_Kind, NameArity, _Expr}, Line, true, _Doc}, Acc) -> - [{attribute, Line, export_type, [NameArity]}|Acc]; - ({_Type, _Line, false, _Doc}, Acc) -> - Acc - end, - lists:foldl(Fun, Forms, Types). - -typedocs_attributes(Types, Forms) -> - Fun = fun - ({{_Kind, NameArity, _Expr}, Line, true, Doc}, Acc) when Doc =/= nil -> - [{attribute, Line, typedoc, {NameArity, Doc}}|Acc]; - ({_Type, _Line, _Export, _Doc}, Acc) -> - Acc - end, - lists:foldl(Fun, Forms, Types). - -%% Specs - -specs_form(Module, Private, Defmacro, Forms) -> - case code:ensure_loaded('Elixir.Kernel.Typespec') of - {module, 'Elixir.Kernel.Typespec'} -> - Defmacrop = [Tuple || {Tuple, defmacrop, _, _, _} <- Private], - - Specs0 = 'Elixir.Module':get_attribute(Module, spec) ++ - 'Elixir.Module':get_attribute(Module, callback), - - Specs1 = ['Elixir.Kernel.Typespec':translate_spec(Kind, Expr, Caller) || - {Kind, Expr, Caller} <- Specs0], - Specs2 = lists:flatmap(fun(Spec) -> - translate_macro_spec(Spec, Defmacro, Defmacrop) - end, Specs1), - - 'Elixir.Module':delete_attribute(Module, spec), - 'Elixir.Module':delete_attribute(Module, callback), - specs_attributes(Forms, Specs2); - - {error, _} -> - Forms - end. - -specs_attributes(Forms, Specs) -> - Dict = lists:foldl(fun({{Kind, NameArity, Spec}, Line}, Acc) -> - dict:append({Kind, NameArity}, {Spec, Line}, Acc) - end, dict:new(), Specs), - dict:fold(fun({Kind, NameArity}, ExprsLines, Acc) -> - {Exprs, Lines} = lists:unzip(ExprsLines), - Line = lists:min(Lines), - [{attribute, Line, Kind, {NameArity, Exprs}}|Acc] - end, Forms, Dict). - -translate_macro_spec({{spec, NameArity, Spec}, Line}, Defmacro, Defmacrop) -> - case ordsets:is_element(NameArity, Defmacrop) of - true -> []; - false -> - case ordsets:is_element(NameArity, Defmacro) of - true -> - {Name, Arity} = NameArity, - [{{spec, {elixir_utils:macro_name(Name), Arity + 1}, spec_for_macro(Spec)}, Line}]; - false -> - [{{spec, NameArity, Spec}, Line}] - end - end; - -translate_macro_spec({{callback, NameArity, Spec}, Line}, _Defmacro, _Defmacrop) -> - [{{callback, NameArity, Spec}, Line}]. - -spec_for_macro({type, Line, 'fun', [{type, _, product, Args}|T]}) -> - NewArgs = [{type, Line, term, []}|Args], - {type, Line, 'fun', [{type, Line, product, NewArgs}|T]}; - -spec_for_macro(Else) -> Else. - -%% Loads the form into the code server. - -compile_opts(Module) -> - case ets:lookup(data_table(Module), compile) of - [{compile,Opts}] when is_list(Opts) -> Opts; - [] -> [] + erlang:raise(error, undef, Stack); + _ -> + Opts = [{module, Module}, {function, Fun}, {arity, Arity}, + {reason, 'function not available'}], + Exception = 'Elixir.UndefinedFunctionError':exception(Opts), + erlang:raise(error, Exception, Stack) end. -load_form(Line, Forms, Opts, #{file := File} = E) -> - elixir_compiler:module(Forms, File, Opts, fun(Module, Binary0) -> - Docs = elixir_compiler:get_opt(docs), - Binary = add_docs_chunk(Binary0, Module, Line, Docs), - eval_callbacks(Line, Module, after_compile, [E, Binary], E), - - case get(elixir_compiled) of - Current when is_list(Current) -> - put(elixir_compiled, [{Module,Binary}|Current]), - - case get(elixir_compiler_pid) of - undefined -> []; - PID -> - Ref = make_ref(), - PID ! {module_available, self(), Ref, File, Module, Binary}, - receive {Ref, ack} -> ok end - end; - _ -> - [] - end, - - Binary - end). - -add_docs_chunk(Bin, Module, Line, true) -> - ChunkData = term_to_binary({elixir_docs_v1, [ - {docs, get_docs(Module)}, - {moduledoc, get_moduledoc(Line, Module)} - ]}), - add_beam_chunk(Bin, "ExDc", ChunkData); - -add_docs_chunk(Bin, _, _, _) -> Bin. - -get_docs(Module) -> - ordsets:from_list( - [{Tuple, Line, Kind, Sig, Doc} || - {Tuple, Line, Kind, Sig, Doc} <- ets:tab2list(docs_table(Module)), - Kind =/= type, Kind =/= opaque]). - -get_moduledoc(Line, Module) -> - {Line, 'Elixir.Module':get_attribute(Module, moduledoc)}. - +%% Handle reserved modules and duplicates. check_module_availability(Line, File, Module) -> Reserved = ['Elixir.Any', 'Elixir.BitString', 'Elixir.Function', 'Elixir.PID', 'Elixir.Reference', 'Elixir.Elixir', 'Elixir'], case lists:member(Module, Reserved) of - true -> elixir_errors:handle_file_error(File, {Line, ?MODULE, {module_reserved, Module}}); + true -> elixir_errors:form_error([{line, Line}], File, ?MODULE, {module_reserved, Module}); false -> ok end, @@ -359,7 +148,7 @@ check_module_availability(Line, File, Module) -> false -> case code:ensure_loaded(Module) of {module, _} -> - elixir_errors:handle_file_warning(File, {Line, ?MODULE, {module_defined, Module}}); + elixir_errors:form_warn([{line, Line}], File, ?MODULE, {module_defined, Module}); {error, _} -> ok end; @@ -367,80 +156,94 @@ check_module_availability(Line, File, Module) -> ok end. -warn_invalid_clauses(_Line, _File, 'Elixir.Kernel.SpecialForms', _All) -> ok; -warn_invalid_clauses(_Line, File, Module, All) -> - ets:foldl(fun - ({_, _, Kind, _, _}, _) when Kind == type; Kind == opaque -> - ok; - ({Tuple, Line, _, _, _}, _) -> - case lists:member(Tuple, All) of - false -> - elixir_errors:handle_file_warning(File, {Line, ?MODULE, {invalid_clause, Tuple}}); - true -> - ok - end - end, ok, docs_table(Module)). - -warn_unused_docs(Line, File, Module) -> - lists:foreach(fun(Attribute) -> - case ets:member(data_table(Module), Attribute) of - true -> - elixir_errors:handle_file_warning(File, {Line, ?MODULE, {unused_doc, Attribute}}); - _ -> - ok - end - end, [typedoc]). - -% EXTRA FUNCTIONS - -add_info_function(Line, File, Module, Export, Def, Defmacro) -> - Pair = {'__info__', 1}, - case lists:member(Pair, Export) of - true -> - elixir_errors:form_error(Line, File, ?MODULE, {internal_function_overridden, Pair}); - false -> - { - {attribute, Line, spec, {{'__info__', 1}, - [{type, Line, 'fun', [{type, Line, product, [ {type, Line, atom, []}]}, {type, Line, term, []} ]}] - }}, - {function, 0, '__info__', 1, [ - functions_clause(Def), - macros_clause(Defmacro), - module_clause(Module), - else_clause() - ]} - } - end. - -functions_clause(Def) -> - {clause, 0, [{atom, 0, functions}], [], [elixir_utils:elixir_to_erl(Def)]}. +%% Hook that builds both attribute and functions and set up common hooks. -macros_clause(Defmacro) -> - {clause, 0, [{atom, 0, macros}], [], [elixir_utils:elixir_to_erl(Defmacro)]}. +build(Line, File, Module, Lexical) -> + case elixir_code_server:call({lookup, Module}) of + [{Module, _, _, OldLine, OldFile}] -> + Error = {module_in_definition, Module, OldFile, OldLine}, + elixir_errors:form_error([{line, Line}], File, ?MODULE, Error); + _ -> + [] + end, -module_clause(Module) -> - {clause, 0, [{atom, 0, module}], [], [{atom, 0, Module}]}. + Data = ets:new(Module, [set, public]), + Defs = ets:new(Module, [duplicate_bag, public]), + Ref = elixir_code_server:call({defmodule, self(), + {Module, Data, Defs, Line, File}}), + + DocsOnDefinition = + case elixir_compiler:get_opt(docs) of + true -> [{'Elixir.Module', compile_doc}]; + _ -> [{elixir_module, delete_doc}] + end, + + ImplOnDefinition = + case elixir_compiler:get_opt(internal) of + true -> [{elixir_module, delete_impl}]; + _ -> [{'Elixir.Module', compile_impl}] + end, + + %% Docs must come first as they read the impl callback. + OnDefinition = DocsOnDefinition ++ ImplOnDefinition, + + ets:insert(Data, [ + % {Key, Value, Accumulate?, UnreadLine} + {after_compile, [], true, nil}, + {before_compile, [], true, nil}, + {behaviour, [], true, nil}, + {compile, [], true, nil}, + {derive, [], true, nil}, + {dialyzer, [], true, nil}, + {external_resource, [], true, nil}, + {moduledoc, nil, false, nil}, + {on_definition, OnDefinition, true, nil}, + {on_load, [], true, nil}, + + % Types + {callback, [], true, nil}, + {opaque, [], true, nil}, + {optional_callbacks, [], true, nil}, + {macrocallback, [], true, nil}, + {spec, [], true, nil}, + {type, [], true, nil}, + {typep, [], true, nil}, + + % Internal + {{elixir, impls}, []} + ]), + + Persisted = [behaviour, on_load, compile, external_resource, dialyzer, vsn], + ets:insert(Data, {?persisted_attr, Persisted}), + ets:insert(Data, {?lexical_attr, Lexical}), + + %% Setup definition related modules + elixir_def:setup(Module), + elixir_locals:setup(Module), + elixir_overridable:setup(Module), -else_clause() -> - Info = {call, 0, {atom, 0, module_info}, [{var, 0, atom}]}, - {clause, 0, [{var, 0, atom}], [], [Info]}. + {Data, Defs, Ref}. -% HELPERS +%% Handles module and callback evaluations. -%% Adds custom chunk to a .beam binary -add_beam_chunk(Bin, Id, ChunkData) - when is_binary(Bin), is_list(Id), is_binary(ChunkData) -> - {ok, _, Chunks0} = beam_lib:all_chunks(Bin), - NewChunk = {Id, ChunkData}, - Chunks = [NewChunk|Chunks0], - {ok, NewBin} = beam_lib:build_module(Chunks), - NewBin. +eval_form(Line, Module, Data, Block, Vars, E) -> + {Value, EE} = elixir_compiler:eval_forms(Block, Vars, E), + Pairs1 = elixir_overridable:store_pending(Module), + EV = elixir_env:linify({Line, reset_env(EE)}), + EC = eval_callbacks(Line, Data, before_compile, [EV], EV), + Pairs2 = elixir_overridable:store_pending(Module), + OverridablePairs = Pairs1 ++ Pairs2, + {Value, EC, OverridablePairs}. + +eval_callbacks(Line, Data, Name, Args, E) -> + Callbacks = ets:lookup_element(Data, Name, 2), + lists:foldr(fun({M, F}, Acc) -> + expand_callback(Line, M, F, Args, reset_env(Acc), + fun(AM, AF, AA) -> apply(AM, AF, AA) end) + end, E, Callbacks). -%% Expands a callback given by M:F(Args). In case -%% the callback can't be expanded, invokes the given -%% fun passing a possibly expanded AM:AF(Args). expand_callback(Line, M, F, Args, E, Fun) -> - Meta = [{line,Line},{require,false}], + Meta = [{line, Line}, {required, true}], {EE, ET} = elixir_dispatch:dispatch_require(Meta, M, F, Args, E, fun(AM, AF, AA) -> Fun(AM, AF, AA), @@ -456,43 +259,122 @@ expand_callback(Line, M, F, Args, E, Fun) -> EF catch Kind:Reason -> + Stacktrace = erlang:get_stacktrace(), Info = {M, F, length(Args), location(Line, E)}, - erlang:raise(Kind, Reason, prune_stacktrace(Info, erlang:get_stacktrace())) + erlang:raise(Kind, Reason, prune_stacktrace(Info, Stacktrace)) end end. -location(Line, E) -> - [{file, elixir_utils:characters_to_list(?m(E, file))}, {line, Line}]. +reset_env(Env) -> + Env#{vars := [], export_vars := nil}. + +%% Add attributes handling to the form + +attributes(Line, File, Data, PersistedAttributes) -> + [{Key, Value} || Key <- PersistedAttributes, + Value <- lookup_attribute(Line, File, Data, Key)]. + +lookup_attribute(Line, File, Data, Key) when is_atom(Key) -> + case ets:lookup(Data, Key) of + [{resource, Values, true, _}] -> + lists:usort([validate_external_resource(Line, File, Value) || Value <- Values]); + [{Key, Values, true, _}] -> + Values; + [{Key, Value, false, _}] -> + [Value]; + [] -> + [] + end. + +validate_external_resource(_Line, _File, Value) when is_binary(Value) -> + Value; +validate_external_resource(Line, File, Value) -> + elixir_errors:form_error([{line, Line}], File, ?MODULE, {invalid_external_resource, Value}). + +%% Takes care of autoloading the module if configured. + +autoload_module(Module, Binary, Opts, E) -> + case proplists:get_value(autoload, Opts, true) of + true -> code:load_binary(Module, beam_location(E), Binary); + false -> ok + end. + +beam_location(#{lexical_tracker := Pid, module := Module}) -> + case elixir_lexical:dest(Pid) of + nil -> in_memory; + Dest -> + filename:join(elixir_utils:characters_to_list(Dest), + atom_to_list(Module) ++ ".beam") + end. + +%% Handle unused attributes warnings and special cases. + +warn_unused_attributes(File, Data, PersistedAttrs) -> + ReservedAttrs = [after_compile, before_compile, moduledoc, on_definition | PersistedAttrs], + Keys = ets:select(Data, [{{'$1', '_', '_', '$2'}, [{is_atom, '$1'}, {is_integer, '$2'}], [['$1', '$2']]}]), + [elixir_errors:form_warn([{line, Line}], File, ?MODULE, {unused_attribute, Key}) || + [Key, Line] <- Keys, not lists:member(Key, ReservedAttrs)]. + +%% Integration with elixir_compiler that makes the module available + +make_module_available(Module, Binary) -> + case get(elixir_module_binaries) of + Current when is_list(Current) -> + put(elixir_module_binaries, [{Module, Binary} | Current]), + + case get(elixir_compiler_pid) of + undefined -> + ok; + PID -> + Ref = make_ref(), + PID ! {module_available, self(), Ref, get(elixir_compiler_file), Module, Binary}, + receive {Ref, ack} -> ok end + end; + _ -> + ok + end. + +%% Error handling and helpers. %% We've reached the elixir_module or eval internals, skip it with the rest -prune_stacktrace(Info, [{elixir, eval_forms, _, _}|_]) -> +prune_stacktrace(Info, [{elixir, eval_forms, _, _} | _]) -> [Info]; -prune_stacktrace(Info, [{elixir_module, _, _, _}|_]) -> +prune_stacktrace(Info, [{elixir_module, _, _, _} | _]) -> [Info]; -prune_stacktrace(Info, [H|T]) -> - [H|prune_stacktrace(Info, T)]; +prune_stacktrace(Info, [H | T]) -> + [H | prune_stacktrace(Info, T)]; prune_stacktrace(Info, []) -> [Info]. -% ERROR HANDLING +location(Line, E) -> + [{file, elixir_utils:characters_to_list(?key(E, file))}, {line, Line}]. -format_error({invalid_clause, {Name, Arity}}) -> - io_lib:format("empty clause provided for nonexistent function or macro ~ts/~B", [Name, Arity]); format_error({invalid_external_resource, Value}) -> io_lib:format("expected a string value for @external_resource, got: ~p", ['Elixir.Kernel':inspect(Value)]); -format_error({unused_doc, typedoc}) -> - "@typedoc provided but no type follows it"; -format_error({unused_doc, doc}) -> - "@doc provided but no definition follows it"; -format_error({internal_function_overridden, {Name, Arity}}) -> - io_lib:format("function ~ts/~B is internal and should not be overridden", [Name, Arity]); +format_error({unused_attribute, typedoc}) -> + "module attribute @typedoc was set but no type follows it"; +format_error({unused_attribute, doc}) -> + "module attribute @doc was set but no definition follows it"; +format_error({unused_attribute, impl}) -> + "module attribute @impl was set but no definition follows it"; +format_error({unused_attribute, Attr}) -> + io_lib:format("module attribute @~ts was set but never used", [Attr]); format_error({invalid_module, Module}) -> io_lib:format("invalid module name: ~ts", ['Elixir.Kernel':inspect(Module)]); format_error({module_defined, Module}) -> - io_lib:format("redefining module ~ts", [elixir_aliases:inspect(Module)]); + Extra = + case code:which(Module) of + Path when is_list(Path) -> + io_lib:format(" (current version loaded from ~ts)", [elixir_utils:relative_to_cwd(Path)]); + in_memory -> + " (current version defined in memory)"; + _ -> + "" + end, + io_lib:format("redefining module ~ts~ts", [elixir_aliases:inspect(Module), Extra]); format_error({module_reserved, Module}) -> io_lib:format("module ~ts is reserved and cannot be defined", [elixir_aliases:inspect(Module)]); format_error({module_in_definition, Module, File, Line}) -> io_lib:format("cannot define module ~ts because it is currently being defined in ~ts:~B", - [elixir_aliases:inspect(Module), 'Elixir.Path':relative_to_cwd(File), Line]). + [elixir_aliases:inspect(Module), elixir_utils:relative_to_cwd(File), Line]). diff --git a/lib/elixir/src/elixir_overridable.erl b/lib/elixir/src/elixir_overridable.erl new file mode 100644 index 00000000000..4f752865585 --- /dev/null +++ b/lib/elixir/src/elixir_overridable.erl @@ -0,0 +1,88 @@ +% Holds the logic responsible for defining overridable functions and handling super. +-module(elixir_overridable). +-export([setup/1, overridable/1, overridable/2, super/4, store_pending/1, format_error/1]). +-include("elixir.hrl"). +-define(attr, {elixir, overridable}). + +setup(Module) -> + overridable(Module, #{}). + +overridable(Module) -> + ets:lookup_element(elixir_module:data_table(Module), ?attr, 2). + +overridable(Module, Value) -> + ets:insert(elixir_module:data_table(Module), {?attr, Value}). + +super(Meta, File, Module, Function) -> + case store(Module, Function, true) of + {ok, Name} -> + Name; + error -> + elixir_errors:form_error(Meta, File, ?MODULE, {no_super, Module, Function}) + end. + +store_pending(Module) -> + [begin + {ok, _} = store(Module, Pair, false), + Pair + end || {Pair, {_, _, _, false}} <- maps:to_list(overridable(Module)), + not 'Elixir.Module':'defines?'(Module, Pair)]. + +%% Private + +store(Module, Function, Hidden) -> + Overridable = overridable(Module), + case maps:find(Function, Overridable) of + {ok, {Count, Def, Neighbours, Overridden}} -> + {{{def, {Name, Arity}}, Kind, Meta, File, _Check, + {Defaults, _HasBody, _LastDefaults}}, Clauses} = Def, + + {FinalKind, FinalName, FinalArity, FinalClauses} = + case Hidden of + false -> + {Kind, Name, Arity, Clauses}; + true when Kind == defmacro; Kind == defmacrop -> + {defp, name(Name, Count), Arity + 1, rewrite_clauses(Clauses)}; + true -> + {defp, name(Name, Count), Arity, Clauses} + end, + + Tuple = {FinalName, FinalArity}, + + case Overridden of + false -> + overridable(Module, maps:put(Function, {Count, Def, Neighbours, true}, Overridable)), + (not elixir_compiler:get_opt(internal)) andalso + 'Elixir.Module.LocalsTracker':reattach(Module, Kind, Function, Neighbours), + elixir_def:store_definition(false, FinalKind, Meta, FinalName, FinalArity, + File, Module, Defaults, FinalClauses), + elixir_locals:record_definition(Tuple, FinalKind, Module), + elixir_locals:record_local(Tuple, Module, Function); + true -> + ok + end, + + {ok, Tuple}; + error -> + error + end. + +rewrite_clauses(Clauses) -> + [{Meta, [{'__CALLER__', [], nil} | Args], Guards, Body} || + {Meta, Args, Guards, Body} <- Clauses]. + +name(Name, Count) when is_integer(Count) -> + list_to_atom(atom_to_list(Name) ++ " (overridable " ++ integer_to_list(Count) ++ ")"). + +%% Error handling + +format_error({no_super, Module, {Name, Arity}}) -> + Bins = [format_fa(X) || {X, {_, _, _, _}} <- maps:to_list(overridable(Module))], + Joined = 'Elixir.Enum':join(Bins, <<", ">>), + io_lib:format("no super defined for ~ts/~B in module ~ts. Overridable functions available are: ~ts", + [Name, Arity, elixir_aliases:inspect(Module), Joined]). + +format_fa({Name, Arity}) -> + A = atom_to_binary(Name, utf8), + B = integer_to_binary(Arity), + <>. diff --git a/lib/elixir/src/elixir_parser.yrl b/lib/elixir/src/elixir_parser.yrl index e8d4c9f45aa..560fa42520c 100644 --- a/lib/elixir/src/elixir_parser.yrl +++ b/lib/elixir/src/elixir_parser.yrl @@ -1,45 +1,50 @@ Nonterminals grammar expr_list - expr container_expr block_expr no_parens_expr no_parens_one_expr access_expr + expr container_expr block_expr access_expr + no_parens_expr no_parens_zero_expr no_parens_one_expr no_parens_one_ambig_expr bracket_expr bracket_at_expr bracket_arg matched_expr unmatched_expr max_expr - op_expr matched_op_expr no_parens_op_expr + unmatched_op_expr matched_op_expr no_parens_op_expr no_parens_many_expr comp_op_eol at_op_eol unary_op_eol and_op_eol or_op_eol capture_op_eol - add_op_eol mult_op_eol hat_op_eol two_op_eol pipe_op_eol stab_op_eol + add_op_eol mult_op_eol two_op_eol three_op_eol pipe_op_eol stab_op_eol arrow_op_eol match_op_eol when_op_eol in_op_eol in_match_op_eol type_op_eol rel_op_eol - open_paren close_paren empty_paren + open_paren close_paren empty_paren eoe list list_args open_bracket close_bracket tuple open_curly close_curly bit_string open_bit close_bit map map_op map_close map_args map_expr struct_op assoc_op_eol assoc_expr assoc_base assoc_update assoc_update_kw assoc - container_args_base container_args call_args_parens_base call_args_parens parens_call - call_args_no_parens_one call_args_no_parens_expr call_args_no_parens_comma_expr - call_args_no_parens_all call_args_no_parens_many call_args_no_parens_many_strict - stab stab_eol stab_expr stab_maybe_expr stab_parens_many + container_args_base container_args + call_args_parens_expr call_args_parens_base call_args_parens parens_call + call_args_no_parens_one call_args_no_parens_ambig call_args_no_parens_expr + call_args_no_parens_comma_expr call_args_no_parens_all call_args_no_parens_many + call_args_no_parens_many_strict + stab stab_eoe stab_expr stab_op_eol_and_expr stab_parens_many kw_eol kw_base kw call_args_no_parens_kw_expr call_args_no_parens_kw - dot_op dot_alias dot_identifier dot_op_identifier dot_do_identifier + dot_op dot_alias dot_alias_container + dot_identifier dot_op_identifier dot_do_identifier dot_paren_identifier dot_bracket_identifier - do_block fn_eol do_eol end_eol block_eol block_item block_list + do_block fn_eoe do_eoe end_eoe block_eoe block_item block_list . Terminals identifier kw_identifier kw_identifier_safe kw_identifier_unsafe bracket_identifier paren_identifier do_identifier block_identifier fn 'end' aliases - number signed_number atom atom_safe atom_unsafe bin_string list_string sigil + number char atom atom_safe atom_unsafe bin_string list_string sigil dot_call_op op_identifier comp_op at_op unary_op and_op or_op arrow_op match_op in_op in_match_op - type_op dual_op add_op mult_op hat_op two_op pipe_op stab_op when_op assoc_op + type_op dual_op add_op mult_op two_op three_op pipe_op stab_op when_op assoc_op capture_op rel_op - 'true' 'false' 'nil' 'do' eol ',' '.' + 'true' 'false' 'nil' 'do' eol ';' ',' '.' '(' ')' '[' ']' '{' '}' '<<' '>>' '%{}' '%' . Rootsymbol grammar. -%% There are two shift/reduce conflicts coming from call_args_parens. -Expect 2. +%% Two shift/reduce conflicts coming from call_args_parens and +%% one coming from empty_paren on stab. +Expect 3. %% Changes in ops and precedence should be reflected on lib/elixir/lib/macro.ex %% Note though the operator => in practice has lower precedence than all others, @@ -54,16 +59,16 @@ Right 60 type_op_eol. %% :: Right 70 pipe_op_eol. %% | Right 80 assoc_op_eol. %% => Right 90 match_op_eol. %% = -Left 130 or_op_eol. %% ||, |||, or, xor +Left 130 or_op_eol. %% ||, |||, or Left 140 and_op_eol. %% &&, &&&, and Left 150 comp_op_eol. %% ==, !=, =~, ===, !== Left 160 rel_op_eol. %% <, >, <=, >= -Left 170 arrow_op_eol. %% < (op), (op) > (e.g |>, <<<, >>>) -Left 180 in_op_eol. %% in +Left 170 arrow_op_eol. %% |>, <<<, >>>, ~>>, <<~, ~>, <~, <~>, <|> +Left 180 in_op_eol. %% in, not in +Left 190 three_op_eol. %% ^^^ Right 200 two_op_eol. %% ++, --, .., <> -Left 210 add_op_eol. %% + (op), - (op) -Left 220 mult_op_eol. %% * (op), / (op) -Left 250 hat_op_eol. %% ^ (op) (e.g ^^^) +Left 210 add_op_eol. %% +, - +Left 220 mult_op_eol. %% *, / Nonassoc 300 unary_op_eol. %% +, -, !, ^, not, ~~~ Left 310 dot_call_op. Left 310 dot_op. %% . @@ -72,18 +77,17 @@ Nonassoc 330 dot_identifier. %%% MAIN FLOW OF EXPRESSIONS -grammar -> eol : nil. +grammar -> eoe : nil. grammar -> expr_list : to_block('$1'). -grammar -> eol expr_list : to_block('$2'). -grammar -> expr_list eol : to_block('$1'). -grammar -> eol expr_list eol : to_block('$2'). +grammar -> eoe expr_list : to_block('$2'). +grammar -> expr_list eoe : to_block('$1'). +grammar -> eoe expr_list eoe : to_block('$2'). grammar -> '$empty' : nil. % Note expressions are on reverse order expr_list -> expr : ['$1']. -expr_list -> expr_list eol expr : ['$3'|'$1']. +expr_list -> expr_list eoe expr : ['$3' | '$1']. -expr -> empty_paren : nil. expr -> matched_expr : '$1'. expr -> no_parens_expr : '$1'. expr -> unmatched_expr : '$1'. @@ -92,6 +96,23 @@ expr -> unmatched_expr : '$1'. %% without parentheses and with do blocks. They are represented %% in the AST as matched, no_parens and unmatched. %% +%% Calls without parentheses are further divided according to how +%% problematic they are: +%% +%% (a) no_parens_one: a call with one unproblematic argument +%% (e.g. `f a` or `f g a` and similar) (includes unary operators) +%% +%% (b) no_parens_many: a call with several arguments (e.g. `f a, b`) +%% +%% (c) no_parens_one_ambig: a call with one argument which is +%% itself a no_parens_many or no_parens_one_ambig (e.g. `f g a, b` +%% or `f g h a, b` and similar) +%% +%% Note, in particular, that no_parens_one_ambig expressions are +%% ambiguous and are interpreted such that the outer function has +%% arity 1 (e.g. `f g a, b` is interpreted as `f(g(a, b))` rather +%% than `f(g(a), b)`). Hence the name, no_parens_one_ambig. +%% %% The distinction is required because we can't, for example, have %% a function call with a do block as argument inside another do %% block call, unless there are parentheses: @@ -104,94 +125,108 @@ expr -> unmatched_expr : '$1'. %% %% foo a, bar b, c #=> invalid %% foo(a, bar b, c) #=> invalid -%% foo a, bar b #=> valid +%% foo bar a, b #=> valid %% foo a, bar(b, c) #=> valid %% %% So the different grammar rules need to take into account %% if calls without parentheses are do blocks in particular %% segments and act accordingly. matched_expr -> matched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). -matched_expr -> matched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). matched_expr -> unary_op_eol matched_expr : build_unary_op('$1', '$2'). -matched_expr -> unary_op_eol no_parens_expr : build_unary_op('$1', '$2'). matched_expr -> at_op_eol matched_expr : build_unary_op('$1', '$2'). -matched_expr -> at_op_eol no_parens_expr : build_unary_op('$1', '$2'). matched_expr -> capture_op_eol matched_expr : build_unary_op('$1', '$2'). -matched_expr -> capture_op_eol no_parens_expr : build_unary_op('$1', '$2'). matched_expr -> no_parens_one_expr : '$1'. +matched_expr -> no_parens_zero_expr : '$1'. matched_expr -> access_expr : '$1'. +matched_expr -> access_expr kw_identifier : throw_invalid_kw_identifier('$2'). -no_parens_expr -> dot_op_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2'). -no_parens_expr -> dot_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2'). - -unmatched_expr -> empty_paren op_expr : build_op(element(1, '$2'), nil, element(2, '$2')). -unmatched_expr -> matched_expr op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). -unmatched_expr -> unmatched_expr op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). +unmatched_expr -> matched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). +unmatched_expr -> unmatched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). +unmatched_expr -> unmatched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). +unmatched_expr -> unmatched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). unmatched_expr -> unary_op_eol expr : build_unary_op('$1', '$2'). unmatched_expr -> at_op_eol expr : build_unary_op('$1', '$2'). unmatched_expr -> capture_op_eol expr : build_unary_op('$1', '$2'). unmatched_expr -> block_expr : '$1'. +no_parens_expr -> matched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')). +no_parens_expr -> unary_op_eol no_parens_expr : build_unary_op('$1', '$2'). +no_parens_expr -> at_op_eol no_parens_expr : build_unary_op('$1', '$2'). +no_parens_expr -> capture_op_eol no_parens_expr : build_unary_op('$1', '$2'). +no_parens_expr -> no_parens_one_ambig_expr : '$1'. +no_parens_expr -> no_parens_many_expr : '$1'. + block_expr -> parens_call call_args_parens do_block : build_identifier('$1', '$2' ++ '$3'). block_expr -> parens_call call_args_parens call_args_parens do_block : build_nested_parens('$1', '$2', '$3' ++ '$4'). block_expr -> dot_do_identifier do_block : build_identifier('$1', '$2'). block_expr -> dot_identifier call_args_no_parens_all do_block : build_identifier('$1', '$2' ++ '$3'). -op_expr -> match_op_eol expr : {'$1', '$2'}. -op_expr -> add_op_eol expr : {'$1', '$2'}. -op_expr -> mult_op_eol expr : {'$1', '$2'}. -op_expr -> hat_op_eol expr : {'$1', '$2'}. -op_expr -> two_op_eol expr : {'$1', '$2'}. -op_expr -> and_op_eol expr : {'$1', '$2'}. -op_expr -> or_op_eol expr : {'$1', '$2'}. -op_expr -> in_op_eol expr : {'$1', '$2'}. -op_expr -> in_match_op_eol expr : {'$1', '$2'}. -op_expr -> type_op_eol expr : {'$1', '$2'}. -op_expr -> when_op_eol expr : {'$1', '$2'}. -op_expr -> pipe_op_eol expr : {'$1', '$2'}. -op_expr -> comp_op_eol expr : {'$1', '$2'}. -op_expr -> rel_op_eol expr : {'$1', '$2'}. -op_expr -> arrow_op_eol expr : {'$1', '$2'}. +matched_op_expr -> match_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> add_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> mult_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> two_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> three_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> and_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> or_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> in_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> in_match_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> type_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> when_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> rel_op_eol matched_expr : {'$1', '$2'}. +matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}. +%% Warn for no parens subset +matched_op_expr -> arrow_op_eol no_parens_one_expr : warn_pipe('$1', '$2'), {'$1', '$2'}. + +unmatched_op_expr -> match_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> add_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> mult_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> two_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> three_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> and_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> or_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> in_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> in_match_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> type_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> when_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> pipe_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> comp_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> rel_op_eol unmatched_expr : {'$1', '$2'}. +unmatched_op_expr -> arrow_op_eol unmatched_expr : {'$1', '$2'}. no_parens_op_expr -> match_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> add_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> mult_op_eol no_parens_expr : {'$1', '$2'}. -no_parens_op_expr -> hat_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> two_op_eol no_parens_expr : {'$1', '$2'}. +no_parens_op_expr -> three_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> and_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> or_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> in_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> in_match_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> type_op_eol no_parens_expr : {'$1', '$2'}. +no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> pipe_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> comp_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> rel_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> arrow_op_eol no_parens_expr : {'$1', '$2'}. +%% Warn for no parens subset +no_parens_op_expr -> arrow_op_eol no_parens_one_ambig_expr : warn_pipe('$1', '$2'), {'$1', '$2'}. +no_parens_op_expr -> arrow_op_eol no_parens_many_expr : warn_pipe('$1', '$2'), {'$1', '$2'}. %% Allow when (and only when) with keywords -no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> when_op_eol call_args_no_parens_kw : {'$1', '$2'}. -matched_op_expr -> match_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> add_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> mult_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> hat_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> two_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> and_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> or_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> in_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> in_match_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> type_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> when_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> rel_op_eol matched_expr : {'$1', '$2'}. -matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}. +no_parens_one_ambig_expr -> dot_op_identifier call_args_no_parens_ambig : build_identifier('$1', '$2'). +no_parens_one_ambig_expr -> dot_identifier call_args_no_parens_ambig : build_identifier('$1', '$2'). + +no_parens_many_expr -> dot_op_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2'). +no_parens_many_expr -> dot_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2'). no_parens_one_expr -> dot_op_identifier call_args_no_parens_one : build_identifier('$1', '$2'). no_parens_one_expr -> dot_identifier call_args_no_parens_one : build_identifier('$1', '$2'). -no_parens_one_expr -> dot_do_identifier : build_identifier('$1', nil). -no_parens_one_expr -> dot_identifier : build_identifier('$1', nil). +no_parens_zero_expr -> dot_do_identifier : build_identifier('$1', nil). +no_parens_zero_expr -> dot_identifier : build_identifier('$1', nil). %% From this point on, we just have constructs that can be %% used with the access syntax. Notice that (dot_)identifier @@ -199,13 +234,16 @@ no_parens_one_expr -> dot_identifier : build_identifier('$1', nil). %% marks identifiers followed by brackets as bracket_identifier. access_expr -> bracket_at_expr : '$1'. access_expr -> bracket_expr : '$1'. -access_expr -> at_op_eol number : build_unary_op('$1', ?exprs('$2')). -access_expr -> unary_op_eol number : build_unary_op('$1', ?exprs('$2')). access_expr -> capture_op_eol number : build_unary_op('$1', ?exprs('$2')). -access_expr -> fn_eol stab end_eol : build_fn('$1', build_stab(reverse('$2'))). +access_expr -> fn_eoe stab end_eoe : build_fn('$1', reverse('$2')). access_expr -> open_paren stab close_paren : build_stab(reverse('$2')). +access_expr -> open_paren stab ';' close_paren : build_stab(reverse('$2')). +access_expr -> open_paren ';' stab ';' close_paren : build_stab(reverse('$3')). +access_expr -> open_paren ';' stab close_paren : build_stab(reverse('$3')). +access_expr -> open_paren ';' close_paren : build_stab([]). +access_expr -> empty_paren : warn_empty_paren('$1'), nil. access_expr -> number : ?exprs('$1'). -access_expr -> signed_number : {element(4, '$1'), meta('$1'), ?exprs('$1')}. +access_expr -> char : ?exprs('$1'). access_expr -> list : element(1, '$1'). access_expr -> map : '$1'. access_expr -> tuple : '$1'. @@ -226,7 +264,6 @@ max_expr -> parens_call call_args_parens : build_identifier('$1', '$2'). max_expr -> parens_call call_args_parens call_args_parens : build_nested_parens('$1', '$2', '$3'). max_expr -> dot_alias : '$1'. -bracket_arg -> open_bracket ']' : build_list('$1', []). bracket_arg -> open_bracket kw close_bracket : build_list('$1', '$2'). bracket_arg -> open_bracket container_expr close_bracket : build_list('$1', '$2'). bracket_arg -> open_bracket container_expr ',' close_bracket : build_list('$1', '$2'). @@ -241,46 +278,58 @@ bracket_at_expr -> at_op_eol access_expr bracket_arg : %% Blocks -do_block -> do_eol 'end' : [[{do,nil}]]. -do_block -> do_eol stab end_eol : [[{do, build_stab(reverse('$2'))}]]. -do_block -> do_eol block_list 'end' : [[{do, nil}|'$2']]. -do_block -> do_eol stab_eol block_list 'end' : [[{do, build_stab(reverse('$2'))}|'$3']]. - -fn_eol -> 'fn' : '$1'. -fn_eol -> 'fn' eol : '$1'. - -do_eol -> 'do' : '$1'. -do_eol -> 'do' eol : '$1'. - -end_eol -> 'end' : '$1'. -end_eol -> eol 'end' : '$2'. +do_block -> do_eoe 'end' : [[{do, nil}]]. +do_block -> do_eoe stab end_eoe : [[{do, build_stab(reverse('$2'))}]]. +do_block -> do_eoe block_list 'end' : [[{do, nil} | '$2']]. +do_block -> do_eoe stab_eoe block_list 'end' : [[{do, build_stab(reverse('$2'))} | '$3']]. -block_eol -> block_identifier : '$1'. -block_eol -> block_identifier eol : '$1'. +eoe -> eol : '$1'. +eoe -> ';' : '$1'. +eoe -> eol ';' : '$1'. -stab -> stab_expr : ['$1']. -stab -> stab eol stab_expr : ['$3'|'$1']. +fn_eoe -> 'fn' : '$1'. +fn_eoe -> 'fn' eoe : '$1'. -stab_eol -> stab : '$1'. -stab_eol -> stab eol : '$1'. +do_eoe -> 'do' : '$1'. +do_eoe -> 'do' eoe : '$1'. -stab_expr -> expr : '$1'. -stab_expr -> stab_op_eol stab_maybe_expr : build_op('$1', [], '$2'). -stab_expr -> call_args_no_parens_all stab_op_eol stab_maybe_expr : - build_op('$2', unwrap_when(unwrap_splice('$1')), '$3'). -stab_expr -> stab_parens_many stab_op_eol stab_maybe_expr : - build_op('$2', unwrap_splice('$1'), '$3'). -stab_expr -> stab_parens_many when_op expr stab_op_eol stab_maybe_expr : - build_op('$4', [{'when', meta('$2'), unwrap_splice('$1') ++ ['$3']}], '$5'). +end_eoe -> 'end' : '$1'. +end_eoe -> eoe 'end' : '$2'. -stab_maybe_expr -> 'expr' : '$1'. -stab_maybe_expr -> '$empty' : nil. +block_eoe -> block_identifier : '$1'. +block_eoe -> block_identifier eoe : '$1'. -block_item -> block_eol stab_eol : {?exprs('$1'), build_stab(reverse('$2'))}. -block_item -> block_eol : {?exprs('$1'), nil}. +stab -> stab_expr : ['$1']. +stab -> stab eoe stab_expr : ['$3' | '$1']. + +stab_eoe -> stab : '$1'. +stab_eoe -> stab eoe : '$1'. + +%% Here, `element(1, Token)` is the stab operator, +%% while `element(2, Token)` is the expression. +stab_expr -> expr : + '$1'. +stab_expr -> stab_op_eol_and_expr : + build_op(element(1, '$1'), [], element(2, '$1')). +stab_expr -> empty_paren stab_op_eol_and_expr : + build_op(element(1, '$2'), [], element(2, '$2')). +stab_expr -> empty_paren when_op expr stab_op_eol_and_expr : + build_op(element(1, '$4'), [{'when', meta_from_token('$2'), ['$3']}], element(2, '$4')). +stab_expr -> call_args_no_parens_all stab_op_eol_and_expr : + build_op(element(1, '$2'), unwrap_when(unwrap_splice('$1')), element(2, '$2')). +stab_expr -> stab_parens_many stab_op_eol_and_expr : + build_op(element(1, '$2'), unwrap_splice('$1'), element(2, '$2')). +stab_expr -> stab_parens_many when_op expr stab_op_eol_and_expr : + build_op(element(1, '$4'), [{'when', meta_from_token('$2'), unwrap_splice('$1') ++ ['$3']}], element(2, '$4')). + +stab_op_eol_and_expr -> stab_op_eol expr : {'$1', '$2'}. +stab_op_eol_and_expr -> stab_op_eol : warn_empty_stab_clause('$1'), {'$1', nil}. + +block_item -> block_eoe stab_eoe : {?exprs('$1'), build_stab(reverse('$2'))}. +block_item -> block_eoe : {?exprs('$1'), nil}. block_list -> block_item : ['$1']. -block_list -> block_item block_list : ['$1'|'$2']. +block_list -> block_item block_list : ['$1' | '$2']. %% Helpers @@ -291,20 +340,20 @@ close_paren -> eol ')' : '$2'. empty_paren -> open_paren ')' : '$1'. -open_bracket -> '[' : '$1'. -open_bracket -> '[' eol : '$1'. -close_bracket -> ']' : '$1'. -close_bracket -> eol ']' : '$2'. +open_bracket -> '[' : '$1'. +open_bracket -> '[' eol : '$1'. +close_bracket -> ']' : '$1'. +close_bracket -> eol ']' : '$2'. -open_bit -> '<<' : '$1'. -open_bit -> '<<' eol : '$1'. -close_bit -> '>>' : '$1'. -close_bit -> eol '>>' : '$2'. +open_bit -> '<<' : '$1'. +open_bit -> '<<' eol : '$1'. +close_bit -> '>>' : '$1'. +close_bit -> eol '>>' : '$2'. open_curly -> '{' : '$1'. open_curly -> '{' eol : '$1'. -close_curly -> '}' : '$1'. -close_curly -> eol '}' : '$2'. +close_curly -> '}' : '$1'. +close_curly -> eol '}' : '$2'. % Operators @@ -316,12 +365,12 @@ add_op_eol -> dual_op eol : '$1'. mult_op_eol -> mult_op : '$1'. mult_op_eol -> mult_op eol : '$1'. -hat_op_eol -> hat_op : '$1'. -hat_op_eol -> hat_op eol : '$1'. - two_op_eol -> two_op : '$1'. two_op_eol -> two_op eol : '$1'. +three_op_eol -> three_op : '$1'. +three_op_eol -> three_op eol : '$1'. + pipe_op_eol -> pipe_op : '$1'. pipe_op_eol -> pipe_op eol : '$1'. @@ -377,8 +426,12 @@ dot_op -> '.' eol : '$1'. dot_identifier -> identifier : '$1'. dot_identifier -> matched_expr dot_op identifier : build_dot('$2', '$1', '$3'). -dot_alias -> aliases : {'__aliases__', meta('$1', 0), ?exprs('$1')}. +dot_alias -> aliases : {'__aliases__', meta_from_token('$1', 0), ?exprs('$1')}. dot_alias -> matched_expr dot_op aliases : build_dot_alias('$2', '$1', '$3'). +dot_alias -> matched_expr dot_op dot_alias_container : build_dot_container('$2', '$1', '$3'). + +dot_alias_container -> open_curly '}' : []. +dot_alias_container -> open_curly container_args close_curly : '$2'. dot_op_identifier -> op_identifier : '$1'. dot_op_identifier -> matched_expr dot_op op_identifier : build_dot('$2', '$1', '$3'). @@ -393,59 +446,63 @@ dot_paren_identifier -> paren_identifier : '$1'. dot_paren_identifier -> matched_expr dot_op paren_identifier : build_dot('$2', '$1', '$3'). parens_call -> dot_paren_identifier : '$1'. -parens_call -> matched_expr dot_call_op : {'.', meta('$2'), ['$1']}. % Fun/local calls +parens_call -> matched_expr dot_call_op : {'.', meta_from_token('$2'), ['$1']}. % Fun/local calls % Function calls with no parentheses call_args_no_parens_expr -> matched_expr : '$1'. -call_args_no_parens_expr -> empty_paren : nil. call_args_no_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1'). call_args_no_parens_comma_expr -> matched_expr ',' call_args_no_parens_expr : ['$3', '$1']. -call_args_no_parens_comma_expr -> call_args_no_parens_comma_expr ',' call_args_no_parens_expr : ['$3'|'$1']. +call_args_no_parens_comma_expr -> call_args_no_parens_comma_expr ',' call_args_no_parens_expr : ['$3' | '$1']. call_args_no_parens_all -> call_args_no_parens_one : '$1'. +call_args_no_parens_all -> call_args_no_parens_ambig : '$1'. call_args_no_parens_all -> call_args_no_parens_many : '$1'. call_args_no_parens_one -> call_args_no_parens_kw : ['$1']. call_args_no_parens_one -> matched_expr : ['$1']. -call_args_no_parens_one -> no_parens_expr : ['$1']. + +call_args_no_parens_ambig -> no_parens_expr : ['$1']. call_args_no_parens_many -> matched_expr ',' call_args_no_parens_kw : ['$1', '$3']. call_args_no_parens_many -> call_args_no_parens_comma_expr : reverse('$1'). -call_args_no_parens_many -> call_args_no_parens_comma_expr ',' call_args_no_parens_kw : reverse(['$3'|'$1']). +call_args_no_parens_many -> call_args_no_parens_comma_expr ',' call_args_no_parens_kw : reverse(['$3' | '$1']). call_args_no_parens_many_strict -> call_args_no_parens_many : '$1'. -call_args_no_parens_many_strict -> empty_paren : throw_no_parens_strict('$1'). call_args_no_parens_many_strict -> open_paren call_args_no_parens_kw close_paren : throw_no_parens_strict('$1'). call_args_no_parens_many_strict -> open_paren call_args_no_parens_many close_paren : throw_no_parens_strict('$1'). -stab_parens_many -> empty_paren : []. stab_parens_many -> open_paren call_args_no_parens_kw close_paren : ['$2']. stab_parens_many -> open_paren call_args_no_parens_many close_paren : '$2'. -% Containers and function calls with parentheses +% Containers -container_expr -> empty_paren : nil. container_expr -> matched_expr : '$1'. container_expr -> unmatched_expr : '$1'. -container_expr -> no_parens_expr : throw_no_parens_many_strict('$1'). +container_expr -> no_parens_expr : throw_no_parens_container_strict('$1'). container_args_base -> container_expr : ['$1']. -container_args_base -> container_args_base ',' container_expr : ['$3'|'$1']. +container_args_base -> container_args_base ',' container_expr : ['$3' | '$1']. container_args -> container_args_base : lists:reverse('$1'). container_args -> container_args_base ',' : lists:reverse('$1'). -container_args -> container_args_base ',' kw : lists:reverse(['$3'|'$1']). +container_args -> container_args_base ',' kw : lists:reverse(['$3' | '$1']). + +% Function calls with parentheses + +call_args_parens_expr -> matched_expr : '$1'. +call_args_parens_expr -> unmatched_expr : '$1'. +call_args_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1'). -call_args_parens_base -> container_expr : ['$1']. -call_args_parens_base -> call_args_parens_base ',' container_expr : ['$3'|'$1']. +call_args_parens_base -> call_args_parens_expr : ['$1']. +call_args_parens_base -> call_args_parens_base ',' call_args_parens_expr : ['$3' | '$1']. call_args_parens -> empty_paren : []. call_args_parens -> open_paren no_parens_expr close_paren : ['$2']. call_args_parens -> open_paren kw close_paren : ['$2']. call_args_parens -> open_paren call_args_parens_base close_paren : reverse('$2'). -call_args_parens -> open_paren call_args_parens_base ',' kw close_paren : reverse(['$4'|'$2']). +call_args_parens -> open_paren call_args_parens_base ',' kw close_paren : reverse(['$4' | '$2']). % KV @@ -457,14 +514,16 @@ kw_eol -> kw_identifier_unsafe : build_quoted_atom('$1', false). kw_eol -> kw_identifier_unsafe eol : build_quoted_atom('$1', false). kw_base -> kw_eol container_expr : [{'$1', '$2'}]. -kw_base -> kw_base ',' kw_eol container_expr : [{'$3', '$4'}|'$1']. +kw_base -> kw_base ',' kw_eol container_expr : [{'$3', '$4'} | '$1']. kw -> kw_base : reverse('$1'). kw -> kw_base ',' : reverse('$1'). -call_args_no_parens_kw_expr -> kw_eol call_args_no_parens_expr : {'$1','$2'}. +call_args_no_parens_kw_expr -> kw_eol matched_expr : {'$1', '$2'}. +call_args_no_parens_kw_expr -> kw_eol no_parens_expr : {'$1', '$2'}. + call_args_no_parens_kw -> call_args_no_parens_kw_expr : ['$1']. -call_args_no_parens_kw -> call_args_no_parens_kw_expr ',' call_args_no_parens_kw : ['$1'|'$3']. +call_args_no_parens_kw -> call_args_no_parens_kw_expr ',' call_args_no_parens_kw : ['$1' | '$3']. % Lists @@ -488,25 +547,29 @@ bit_string -> open_bit container_args close_bit : build_bit('$1', '$2'). % Map and structs +%% Allow unquote/@something/aliases inside maps and structs. map_expr -> max_expr : '$1'. map_expr -> dot_identifier : build_identifier('$1', nil). +map_expr -> unary_op_eol map_expr : build_unary_op('$1', '$2'). map_expr -> at_op_eol map_expr : build_unary_op('$1', '$2'). assoc_op_eol -> assoc_op : '$1'. assoc_op_eol -> assoc_op eol : '$1'. -assoc_expr -> container_expr assoc_op_eol container_expr : {'$1', '$3'}. +assoc_expr -> matched_expr assoc_op_eol matched_expr : {'$1', '$3'}. +assoc_expr -> unmatched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}. +assoc_expr -> matched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}. +assoc_expr -> unmatched_expr assoc_op_eol matched_expr : {'$1', '$3'}. assoc_expr -> map_expr : '$1'. -assoc_update -> matched_expr pipe_op_eol matched_expr assoc_op_eol matched_expr : {'$2', '$1', [{'$3', '$5'}]}. -assoc_update -> unmatched_expr pipe_op_eol expr assoc_op_eol expr : {'$2', '$1', [{'$3', '$5'}]}. -assoc_update -> matched_expr pipe_op_eol map_expr : {'$2', '$1', ['$3']}. +assoc_update -> matched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}. +assoc_update -> unmatched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}. assoc_update_kw -> matched_expr pipe_op_eol kw : {'$2', '$1', '$3'}. assoc_update_kw -> unmatched_expr pipe_op_eol kw : {'$2', '$1', '$3'}. assoc_base -> assoc_expr : ['$1']. -assoc_base -> assoc_base ',' assoc_expr : ['$3'|'$1']. +assoc_base -> assoc_base ',' assoc_expr : ['$3' | '$1']. assoc -> assoc_base : reverse('$1'). assoc -> assoc_base ',' : reverse('$1'). @@ -526,74 +589,87 @@ map_args -> open_curly assoc_update ',' map_close : build_map_update('$1', '$2', map_args -> open_curly assoc_update_kw close_curly : build_map_update('$1', '$2', []). struct_op -> '%' : '$1'. -struct_op -> '%' eol : '$1'. map -> map_op map_args : '$2'. -map -> struct_op map_expr map_args : {'%', meta('$1'), ['$2', '$3']}. -map -> struct_op map_expr eol map_args : {'%', meta('$1'), ['$2', '$4']}. +map -> struct_op map_expr map_args : {'%', meta_from_token('$1'), ['$2', '$3']}. +map -> struct_op map_expr eol map_args : {'%', meta_from_token('$1'), ['$2', '$4']}. Erlang code. --define(id(Node), element(1, Node)). --define(line(Node), element(2, Node)). --define(exprs(Node), element(3, Node)). +-define(file(), get(elixir_parser_file)). +-define(id(Token), element(1, Token)). +-define(location(Token), element(2, Token)). +-define(exprs(Token), element(3, Token)). +-define(meta(Node), element(2, Node)). -define(rearrange_uop(Op), (Op == 'not' orelse Op == '!')). %% The following directive is needed for (significantly) faster %% compilation of the generated .erl file by the HiPE compiler --compile([{hipe,[{regalloc,linear_scan}]}]). +-compile([{hipe, [{regalloc, linear_scan}]}]). -import(lists, [reverse/1, reverse/2]). -meta(Line, Counter) -> [{counter,Counter}|meta(Line)]. -meta(Line) when is_integer(Line) -> [{line,Line}]; -meta(Node) -> meta(?line(Node)). +meta_from_token(Token, Counter) -> [{counter, Counter} | meta_from_token(Token)]. +meta_from_token(Token) -> meta_from_location(?location(Token)). -%% Operators +meta_from_location({Line, Column, EndColumn}) + when is_integer(Line), is_integer(Column), is_integer(EndColumn) -> [{line, Line}]. -build_op({_Kind, Line, 'in'}, {UOp, _, [Left]}, Right) when ?rearrange_uop(UOp) -> - {UOp, meta(Line), [{'in', meta(Line), [Left, Right]}]}; +%% Operators -build_op({_Kind, Line, Op}, Left, Right) -> - {Op, meta(Line), [Left, Right]}. +build_op({_Kind, Location, 'in'}, {UOp, _, [Left]}, Right) when ?rearrange_uop(UOp) -> + %% TODO: Deprecate "not left in right" rearrangement on 1.7 + {UOp, meta_from_location(Location), [{'in', meta_from_location(Location), [Left, Right]}]}; +build_op({_Kind, Location, 'not in'}, Left, Right) -> + {'not', meta_from_location(Location), [{'in', meta_from_location(Location), [Left, Right]}]}; +build_op({_Kind, Location, Op}, Left, Right) -> + {Op, meta_from_location(Location), [Left, Right]}. -build_unary_op({_Kind, Line, Op}, Expr) -> - {Op, meta(Line), [Expr]}. +build_unary_op({_Kind, Location, Op}, Expr) -> + {Op, meta_from_location(Location), [Expr]}. build_list(Marker, Args) -> - {Args, ?line(Marker)}. + {Args, ?location(Marker)}. build_tuple(_Marker, [Left, Right]) -> {Left, Right}; build_tuple(Marker, Args) -> - {'{}', meta(Marker), Args}. + {'{}', meta_from_token(Marker), Args}. build_bit(Marker, Args) -> - {'<<>>', meta(Marker), Args}. + {'<<>>', meta_from_token(Marker), Args}. build_map(Marker, Args) -> - {'%{}', meta(Marker), Args}. + {'%{}', meta_from_token(Marker), Args}. build_map_update(Marker, {Pipe, Left, Right}, Extra) -> - {'%{}', meta(Marker), [build_op(Pipe, Left, Right ++ Extra)]}. + {'%{}', meta_from_token(Marker), [build_op(Pipe, Left, Right ++ Extra)]}. %% Blocks -build_block([{Op,_,[_]}]=Exprs) when ?rearrange_uop(Op) -> {'__block__', [], Exprs}; -build_block([{unquote_splicing,_,Args}]=Exprs) when - length(Args) =< 2 -> {'__block__', [], Exprs}; -build_block([Expr]) -> Expr; -build_block(Exprs) -> {'__block__', [], Exprs}. +build_block([{Op, _, [_]}]=Exprs) when ?rearrange_uop(Op) -> + {'__block__', [], Exprs}; +build_block([{unquote_splicing, _, Args}]=Exprs) when length(Args) =< 2 -> + {'__block__', [], Exprs}; +build_block([Expr]) -> + Expr; +build_block(Exprs) -> + {'__block__', [], Exprs}. %% Dots build_dot_alias(Dot, {'__aliases__', _, Left}, {'aliases', _, Right}) -> - {'__aliases__', meta(Dot), Left ++ Right}; - + {'__aliases__', meta_from_token(Dot), Left ++ Right}; +build_dot_alias(_Dot, Atom, {'aliases', _, _} = Token) when is_atom(Atom) -> + throw_bad_atom(Token); build_dot_alias(Dot, Other, {'aliases', _, Right}) -> - {'__aliases__', meta(Dot), [Other|Right]}. + {'__aliases__', meta_from_token(Dot), [Other | Right]}. + +build_dot_container(Dot, Left, Right) -> + Meta = meta_from_token(Dot), + {{'.', Meta, [Left, '{}']}, Meta, Right}. build_dot(Dot, Left, Right) -> - {'.', meta(Dot), [Left, extract_identifier(Right)]}. + {'.', meta_from_token(Dot), [Left, extract_identifier(Right)]}. extract_identifier({Kind, _, Identifier}) when Kind == identifier; Kind == bracket_identifier; Kind == paren_identifier; @@ -604,7 +680,7 @@ extract_identifier({Kind, _, Identifier}) when build_nested_parens(Dot, Args1, Args2) -> Identifier = build_identifier(Dot, Args1), - Meta = element(2, Identifier), + Meta = ?meta(Identifier), {Identifier, Meta, Args2}. build_identifier({'.', Meta, _} = Dot, Args) -> @@ -614,47 +690,46 @@ build_identifier({'.', Meta, _} = Dot, Args) -> end, {Dot, Meta, FArgs}; -build_identifier({Keyword, Line}, Args) when Keyword == fn -> - {fn, meta(Line), Args}; - -build_identifier({op_identifier, Line, Identifier}, [Arg]) -> - {Identifier, [{ambiguous_op,nil}|meta(Line)], [Arg]}; +build_identifier({op_identifier, Location, Identifier}, [Arg]) -> + {Identifier, [{ambiguous_op, nil} | meta_from_location(Location)], [Arg]}; -build_identifier({_, Line, Identifier}, Args) -> - {Identifier, meta(Line), Args}. +build_identifier({_, Location, Identifier}, Args) -> + {Identifier, meta_from_location(Location), Args}. %% Fn -build_fn(Op, Stab) -> - {fn, meta(Op), Stab}. +build_fn(Op, [{'->', _, [_, _]} | _] = Stab) -> + {fn, meta_from_token(Op), build_stab(Stab)}; +build_fn(Op, _Stab) -> + throw(meta_from_token(Op), "expected clauses to be defined with -> inside: ", "'fn'"). %% Access -build_access(Expr, {List, Line}) -> - Meta = meta(Line), +build_access(Expr, {List, Location}) -> + Meta = meta_from_location(Location), {{'.', Meta, ['Elixir.Access', get]}, Meta, [Expr, List]}. %% Interpolation aware -build_sigil({sigil, Line, Sigil, Parts, Modifiers}) -> - Meta = meta(Line), - {list_to_atom("sigil_" ++ [Sigil]), Meta, [ {'<<>>', Meta, string_parts(Parts)}, Modifiers ]}. +build_sigil({sigil, Location, Sigil, Parts, Modifiers}) -> + Meta = meta_from_location(Location), + {list_to_atom("sigil_" ++ [Sigil]), Meta, [{'<<>>', Meta, string_parts(Parts)}, Modifiers]}. -build_bin_string({bin_string, _Line, [H]}) when is_binary(H) -> +build_bin_string({bin_string, _Location, [H]}) when is_binary(H) -> H; -build_bin_string({bin_string, Line, Args}) -> - {'<<>>', meta(Line), string_parts(Args)}. +build_bin_string({bin_string, Location, Args}) -> + {'<<>>', meta_from_location(Location), string_parts(Args)}. -build_list_string({list_string, _Line, [H]}) when is_binary(H) -> +build_list_string({list_string, _Location, [H]}) when is_binary(H) -> elixir_utils:characters_to_list(H); -build_list_string({list_string, Line, Args}) -> - Meta = meta(Line), - {{'.', Meta, ['Elixir.String', to_char_list]}, Meta, [{'<<>>', Meta, string_parts(Args)}]}. +build_list_string({list_string, Location, Args}) -> + Meta = meta_from_location(Location), + {{'.', Meta, ['Elixir.String', to_charlist]}, Meta, [{'<<>>', Meta, string_parts(Args)}]}. -build_quoted_atom({_, _Line, [H]}, Safe) when is_binary(H) -> +build_quoted_atom({_, _Location, [H]}, Safe) when is_binary(H) -> Op = binary_to_atom_op(Safe), erlang:Op(H, utf8); -build_quoted_atom({_, Line, Args}, Safe) -> - Meta = meta(Line), +build_quoted_atom({_, Location, Args}, Safe) -> + Meta = meta_from_location(Location), {{'.', Meta, [erlang, binary_to_atom_op(Safe)]}, Meta, [{'<<>>', Meta, string_parts(Args)}, utf8]}. binary_to_atom_op(true) -> binary_to_existing_atom; @@ -664,9 +739,9 @@ string_parts(Parts) -> [string_part(Part) || Part <- Parts]. string_part(Binary) when is_binary(Binary) -> Binary; -string_part({Line, Tokens}) -> +string_part({Location, Tokens}) -> Form = string_tokens_parse(Tokens), - Meta = meta(Line), + Meta = meta_from_location(Location), {'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, Meta, [Form]}, {binary, Meta, nil}]}. string_tokens_parse(Tokens) -> @@ -677,27 +752,27 @@ string_tokens_parse(Tokens) -> %% Keywords -build_stab([{'->', Meta, [Left, Right]}|T]) -> +build_stab([{'->', Meta, [Left, Right]} | T]) -> build_stab(Meta, T, Left, [Right], []); build_stab(Else) -> build_block(Else). -build_stab(Old, [{'->', New, [Left, Right]}|T], Marker, Temp, Acc) -> +build_stab(Old, [{'->', New, [Left, Right]} | T], Marker, Temp, Acc) -> H = {'->', Old, [Marker, build_block(reverse(Temp))]}, - build_stab(New, T, Left, [Right], [H|Acc]); + build_stab(New, T, Left, [Right], [H | Acc]); -build_stab(Meta, [H|T], Marker, Temp, Acc) -> - build_stab(Meta, T, Marker, [H|Temp], Acc); +build_stab(Meta, [H | T], Marker, Temp, Acc) -> + build_stab(Meta, T, Marker, [H | Temp], Acc); build_stab(Meta, [], Marker, Temp, Acc) -> H = {'->', Meta, [Marker, build_block(reverse(Temp))]}, - reverse([H|Acc]). + reverse([H | Acc]). %% Every time the parser sees a (unquote_splicing()) %% it assumes that a block is being spliced, wrapping %% the splicing in a __block__. But in the stab clause, -%% we can have (unquote_splicing(1,2,3)) -> :ok, in such +%% we can have (unquote_splicing(1, 2, 3)) -> :ok, in such %% case, we don't actually want the block, since it is %% an arg style call. unwrap_splice unwraps the splice %% from such blocks. @@ -717,22 +792,81 @@ unwrap_when(Args) -> to_block([One]) -> One; to_block(Other) -> {'__block__', [], reverse(Other)}. -%% Errors - -throw(Line, Error, Token) -> - throw({error, {Line, ?MODULE, [Error, Token]}}). - -throw_no_parens_strict(Token) -> - throw(?line(Token), "unexpected parenthesis. If you are making a " - "function call, do not insert spaces in between the function name and the " - "opening parentheses. Syntax error before: ", "'('"). +%% Warnings and errors -throw_no_parens_many_strict(Token) -> +throw(Meta, Error, Token) -> Line = - case lists:keyfind(line, 1, element(2, Token)) of + case lists:keyfind(line, 1, Meta) of {line, L} -> L; false -> 0 end, + throw({error, {Line, ?MODULE, [Error, Token]}}). + +throw_bad_atom(Token) -> + throw(meta_from_token(Token), "atom cannot be followed by an alias. If the '.' was meant to be " + "part of the atom's name, the atom name must be quoted. Syntax error before: ", "'.'"). + +throw_no_parens_strict(Token) -> + throw(meta_from_token(Token), "unexpected parentheses. If you are making a " + "function call, do not insert spaces between the function name and the " + "opening parentheses. Syntax error before: ", "'('"). - throw(Line, "unexpected comma. Parentheses are required to solve ambiguity " - "in nested calls. Syntax error before: ", "','"). +throw_no_parens_many_strict(Node) -> + throw(?meta(Node), + "unexpected comma. Parentheses are required to solve ambiguity in nested calls.\n\n" + "This error happens when you have nested function calls without parentheses. " + "For example:\n\n" + " one a, two b, c, d\n\n" + "In the example above, we don't know if the parameters \"c\" and \"d\" apply " + "to the function \"one\" or \"two\". You can solve this by explicitly adding " + "parentheses:\n\n" + " one a, two(b, c, d)\n\n" + "Or by adding commas (in case a nested call is not intended):\n\n" + " one, a, two, b, c, d\n\n" + "Elixir cannot compile otherwise. Syntax error before: ", "','"). + +throw_no_parens_container_strict(Node) -> + throw(?meta(Node), + "unexpected comma. Parentheses are required to solve ambiguity inside containers.\n\n" + "This error may happen when you forget a comma in a list or other container:\n\n" + " [a, b c, d]\n\n" + "Or when you have ambiguous calls:\n\n" + " [one, two three, four, five]\n\n" + "In the example above, we don't know if the parameters \"four\" and \"five\" " + "belongs to the list or the function \"two\". You can solve this by explicitly " + "adding parentheses:\n\n" + " [one, two(three, four), five]\n\n" + "Elixir cannot compile otherwise. Syntax error before: ", "','"). + +throw_invalid_kw_identifier({_, _, do} = Token) -> + throw(meta_from_token(Token), elixir_tokenizer:invalid_do_error("unexpected keyword \"do:\""), "'do:'"); +throw_invalid_kw_identifier({_, _, KW} = Token) -> + throw(meta_from_token(Token), "syntax error before: ", "'" ++ atom_to_list(KW) ++ "':"). + +%% TODO: Make this an error on Elixir v2.0. +warn_empty_paren({_, {Line, _, _}}) -> + elixir_errors:warn(Line, ?file(), + "invalid expression (). " + "If you want to invoke or define a function, make sure there are " + "no spaces between the function name and its arguments. If you wanted " + "to pass an empty block, pass a value instead, such as a nil or an atom"). + +%% TODO: Make this an error on Elixir v2.0. +warn_empty_stab_clause({stab_op, {Line, _Begin, _End}, '->'}) -> + elixir_errors:warn(Line, ?file(), + "an expression is always required on the right side of ->. " + "Please provide a value after ->"). + +warn_pipe({arrow_op, {Line, _Begin, _End}, Op}, {_, [_ | _], [_ | _]}) -> + elixir_errors:warn(Line, ?file(), + io_lib:format( + "parentheses are required when piping into a function call. For example:\n\n" + " foo 1 ~ts bar 2 ~ts baz 3\n\n" + "is ambiguous and should be written as\n\n" + " foo(1) ~ts bar(2) ~ts baz(3)\n\n" + "Ambiguous pipe found at:", + [Op, Op, Op, Op] + ) + ); +warn_pipe(_Token, _) -> + ok. diff --git a/lib/elixir/src/elixir_quote.erl b/lib/elixir/src/elixir_quote.erl index 9e8cbc7bd9f..5263b9bdd58 100644 --- a/lib/elixir/src/elixir_quote.erl +++ b/lib/elixir/src/elixir_quote.erl @@ -1,10 +1,10 @@ -module(elixir_quote). -export([escape/2, linify/2, linify/3, linify_with_context_counter/3, quote/4]). --export([dot/6, tail_list/3, list/2]). %% Quote callbacks +-export([dot/5, tail_list/3, list/2]). %% Quote callbacks -include("elixir.hrl"). --define(defs(Kind), Kind == def; Kind == defp; Kind == defmacro; Kind == defmacrop). --define(lexical(Kind), Kind == import; Kind == alias; Kind == '__aliases__'). +-define(defs(Kind), Kind == def; Kind == defp; Kind == defmacro; Kind == defmacrop; Kind == '@'). +-define(lexical(Kind), Kind == import; Kind == alias; Kind == require). -compile({inline, [keyfind/2, keystore/3, keydelete/2, keyreplace/3, keynew/3]}). %% Apply the line from site call on quoted contents. @@ -23,7 +23,8 @@ do_linify(Line, Key, {Receiver, Counter} = Var, {Left, Meta, Receiver}) when is_atom(Left), is_list(Meta), Left /= '_' -> do_tuple_linify(Line, Key, Var, keynew(counter, Meta, Counter), Left, Receiver); -do_linify(Line, Key, {_, Counter} = Var, {Lexical, [_|_] = Meta, [_|_] = Args}) when ?lexical(Lexical) -> +do_linify(Line, Key, {_, Counter} = Var, {Lexical, [_ | _] = Meta, [_ | _] = Args}) + when ?lexical(Lexical); Lexical == '__aliases__' -> do_tuple_linify(Line, Key, Var, keynew(counter, Meta, Counter), Lexical, Args); do_linify(Line, Key, Var, {Left, Meta, Right}) when is_list(Meta) -> @@ -51,21 +52,21 @@ do_linify_meta(Line, line, Meta) -> _ -> keystore(line, Meta, Line) end; -do_linify_meta(Line, Key, Meta) -> - case keyfind(Key, Meta) of - {Key, Int} when is_integer(Int), Int /= 0 -> - keyreplace(Key, Meta, {line, Int}); +do_linify_meta(Line, keep, Meta) -> + case keyfind(keep, Meta) of + {keep, Int} when is_integer(Int), Int /= 0 -> + keyreplace(keep, keydelete(line, Meta), {line, Int}); _ -> do_linify_meta(Line, line, Meta) end. %% Some expressions cannot be unquoted at compilation time. %% This function is responsible for doing runtime unquoting. -dot(Meta, Left, Right, Args, Context, File) -> - annotate(dot(Meta, Left, Right, Args), Context, File). +dot(Meta, Left, Right, Args, Context) -> + annotate(dot(Meta, Left, Right, Args), Context). dot(Meta, Left, {'__aliases__', _, Args}, nil) -> - {'__aliases__', Meta, [Left|Args]}; + {'__aliases__', Meta, [Left | Args]}; dot(Meta, Left, Right, nil) when is_atom(Right) -> case atom_to_list(Right) of @@ -105,8 +106,8 @@ tail_list(Left, Right, Tail) when is_list(Right), is_list(Tail) -> tail_list(Left, Right, Tail) when is_list(Left) -> validate_list(Left), - [H|T] = lists:reverse(Tail ++ Left), - lists:reverse([{'|', [], [H, Right]}|T]). + [H | T] = lists:reverse(Tail ++ Left), + lists:reverse([{'|', [], [H, Right]} | T]). validate_list(List) when is_list(List) -> ok; @@ -115,30 +116,31 @@ validate_list(List) when not is_list(List) -> ('Elixir.Kernel':inspect(List))/binary>>). argument_error(Message) -> - error('Elixir.ArgumentError':exception([{message,Message}])). + error('Elixir.ArgumentError':exception([{message, Message}])). -%% Annotates the AST with context and other info +%% Annotates the AST with context and other info. +%% +%% Note we need to delete the counter because linify +%% adds the counter recursively, even inside quoted +%% expressions, so we need to clean up the forms to +%% allow them to get a new counter on the next expansion. -annotate({Def, Meta, [{H, M, A}|T]}, Context, File) when ?defs(Def) -> - %% Store the context information in the first element of the - %% definition tuple so we can access it later on. - MM = keystore(context, keystore(file, M, File), Context), - {Def, Meta, [{H, MM, A}|T]}; -annotate({{'.', _, [_, Def]} = Target, Meta, [{H, M, A}|T]}, Context, File) when ?defs(Def) -> - MM = keystore(context, keystore(file, M, File), Context), - {Target, Meta, [{H, MM, A}|T]}; +annotate({Def, Meta, [{H, M, A} | T]}, Context) when ?defs(Def) -> + {Def, Meta, [{H, keystore(context, M, Context), A} | T]}; +annotate({{'.', _, [_, Def]} = Target, Meta, [{H, M, A} | T]}, Context) when ?defs(Def) -> + {Target, Meta, [{H, keystore(context, M, Context), A} | T]}; -annotate({Lexical, Meta, [_|_] = Args}, Context, _File) when Lexical == import; Lexical == alias -> +annotate({Lexical, Meta, [_ | _] = Args}, Context) when ?lexical(Lexical) -> NewMeta = keystore(context, keydelete(counter, Meta), Context), {Lexical, NewMeta, Args}; -annotate(Tree, _Context, _File) -> Tree. +annotate(Tree, _Context) -> Tree. %% Escapes the given expression. It is similar to quote, but %% lines are kept and hygiene mechanisms are disabled. escape(Expr, Unquote) -> {Res, Q} = quote(Expr, nil, #elixir_quote{ line=true, - keep=false, + file=nil, vars_hygiene=false, aliases_hygiene=false, imports_hygiene=false, @@ -149,6 +151,10 @@ escape(Expr, Unquote) -> %% Quotes an expression and return its quoted Elixir AST. +quote({unquote_splicing, _, [_]}, _Binding, #elixir_quote{unquote=true}, _) -> + argument_error(<<"unquote_splicing only works inside arguments and block contexts, " + "wrap it in parens if you want it to work with one-liners">>); + quote(Expr, nil, Q, E) -> do_quote(Expr, Q, E); @@ -163,7 +169,7 @@ quote(Expr, Binding, Q, E) -> } || {K, V} <- Binding], {TExprs, TQ} = do_quote(Expr, Q, E), - {{'{}',[], ['__block__',[], Vars ++ [TExprs] ]}, TQ}. + {{'{}', [], ['__block__', [], Vars ++ [TExprs] ]}, TQ}. %% Actual quoting and helpers @@ -176,19 +182,22 @@ do_quote({unquote, _Meta, [Expr]}, #elixir_quote{unquote=true} = Q, _) -> %% Aliases -do_quote({'__aliases__', Meta, [H|T]} = Alias, #elixir_quote{aliases_hygiene=true} = Q, E) when is_atom(H) and (H /= 'Elixir') -> - Annotation = case elixir_aliases:expand(Alias, ?m(E, aliases), - ?m(E, macro_aliases), ?m(E, lexical_tracker)) of - Atom when is_atom(Atom) -> Atom; - Aliases when is_list(Aliases) -> false - end, +do_quote({'__aliases__', Meta, [H | T]} = Alias, #elixir_quote{aliases_hygiene=true} = Q, E) when is_atom(H) and (H /= 'Elixir') -> + Annotation = + case elixir_aliases:expand(Alias, ?key(E, aliases), ?key(E, macro_aliases), ?key(E, lexical_tracker)) of + Atom when is_atom(Atom) -> Atom; + Aliases when is_list(Aliases) -> false + end, AliasMeta = keystore(alias, keydelete(counter, Meta), Annotation), - do_quote_tuple({'__aliases__', AliasMeta, [H|T]}, Q, E); + do_quote_tuple('__aliases__', AliasMeta, [H | T], Q, E); %% Vars +do_quote({Left, Meta, nil}, #elixir_quote{vars_hygiene=true, imports_hygiene=true} = Q, E) when is_atom(Left) -> + do_quote_import(Left, Meta, Q#elixir_quote.context, Q, E); + do_quote({Left, Meta, nil}, #elixir_quote{vars_hygiene=true} = Q, E) when is_atom(Left) -> - do_quote_tuple({Left, Meta, Q#elixir_quote.context}, Q, E); + do_quote_tuple(Left, Meta, Q#elixir_quote.context, Q, E); %% Unquote @@ -201,31 +210,14 @@ do_quote({{'.', Meta, [Left, unquote]}, _, [Expr]}, #elixir_quote{unquote=true} %% Imports do_quote({'&', Meta, [{'/', _, [{F, _, C}, A]}] = Args}, - #elixir_quote{imports_hygiene=true} = Q, E) when is_atom(F), is_integer(A), is_atom(C) -> + #elixir_quote{imports_hygiene=true} = Q, E) when is_atom(F), is_integer(A), is_atom(C) -> do_quote_fa('&', Meta, Args, F, A, Q, E); do_quote({Name, Meta, ArgsOrAtom}, #elixir_quote{imports_hygiene=true} = Q, E) when is_atom(Name) -> - Arity = case is_atom(ArgsOrAtom) of - true -> 0; - false -> length(ArgsOrAtom) - end, - - NewMeta = case (keyfind(import, Meta) == false) andalso - elixir_dispatch:find_import(Meta, Name, Arity, E) of - false -> - case (Arity == 1) andalso keyfind(ambiguous_op, Meta) of - {ambiguous_op, nil} -> keystore(ambiguous_op, Meta, Q#elixir_quote.context); - _ -> Meta - end; - Receiver -> - keystore(import, keystore(context, Meta, Q#elixir_quote.context), Receiver) - end, - - Annotated = annotate({Name, NewMeta, ArgsOrAtom}, Q#elixir_quote.context, file(E, Q)), - do_quote_tuple(Annotated, Q, E); + do_quote_import(Name, Meta, ArgsOrAtom, Q, E); do_quote({_, _, _} = Tuple, #elixir_quote{escape=false} = Q, E) -> - Annotated = annotate(Tuple, Q#elixir_quote.context, file(E, Q)), + Annotated = annotate(Tuple, Q#elixir_quote.context), do_quote_tuple(Annotated, Q, E); %% Literals @@ -240,6 +232,15 @@ do_quote({Left, Right}, Q, E) -> {TRight, RQ} = do_quote(Right, LQ, E), {{TLeft, TRight}, RQ}; +do_quote(BitString, #elixir_quote{escape=true} = Q, _) when is_bitstring(BitString) -> + case bit_size(BitString) rem 8 of + 0 -> + {BitString, Q}; + Size -> + <> = BitString, + {{'<<>>', [], [{'::', [], [Bits, {size, [], [Size]}]}, {'::', [], [Bytes, {binary, [], []}]}]}, Q} + end; + do_quote(Map, #elixir_quote{escape=true} = Q, E) when is_map(Map) -> {TT, TQ} = do_quote(maps:to_list(Map), Q, E), {{'%{}', [], TT}, TQ}; @@ -249,7 +250,7 @@ do_quote(Tuple, #elixir_quote{escape=true} = Q, E) when is_tuple(Tuple) -> {{'{}', [], TT}, TQ}; do_quote(List, #elixir_quote{escape=true} = Q, E) when is_list(List) -> - % The improper case is pretty inefficient, but improper lists are are. + %% The improper case is a bit inefficient, but improper lists are rare. case reverse_improper(List) of {L} -> do_splice(L, Q, E); {L, R} -> @@ -257,17 +258,57 @@ do_quote(List, #elixir_quote{escape=true} = Q, E) when is_list(List) -> {TR, QR} = do_quote(R, QL, E), {update_last(TL, fun(X) -> {'|', [], [X, TR]} end), QR} end; + +do_quote(Other, #elixir_quote{escape=true} = Q, _) + when is_number(Other); is_pid(Other); is_atom(Other) -> + {Other, Q}; + +do_quote(Fun, #elixir_quote{escape=true} = Q, _) when is_function(Fun) -> + case (erlang:fun_info(Fun, env) == {env, []}) andalso + (erlang:fun_info(Fun, type) == {type, external}) of + true -> {Fun, Q}; + false -> bad_escape(Fun) + end; + +do_quote(Other, #elixir_quote{escape=true}, _) -> + bad_escape(Other); + do_quote(List, Q, E) when is_list(List) -> - do_splice(lists:reverse(List), Q, E); + do_splice(lists:reverse(List), Q, E); do_quote(Other, Q, _) -> {Other, Q}. %% Quote helpers +bad_escape(Arg) -> + argument_error(<<"cannot escape ", ('Elixir.Kernel':inspect(Arg, []))/binary, ". ", + "The supported values are: lists, tuples, maps, atoms, numbers, bitstrings, ", + "PIDs and remote functions in the format &Mod.fun/arity">>). + +do_quote_import(Name, Meta, ArgsOrAtom, #elixir_quote{imports_hygiene=true} = Q, E) -> + Arity = case is_atom(ArgsOrAtom) of + true -> 0; + false -> length(ArgsOrAtom) + end, + + NewMeta = case (keyfind(import, Meta) == false) andalso + elixir_dispatch:find_import(Meta, Name, Arity, E) of + false -> + case (Arity == 1) andalso keyfind(ambiguous_op, Meta) of + {ambiguous_op, nil} -> keystore(ambiguous_op, Meta, Q#elixir_quote.context); + _ -> Meta + end; + Receiver -> + keystore(import, keystore(context, Meta, Q#elixir_quote.context), Receiver) + end, + + Annotated = annotate({Name, NewMeta, ArgsOrAtom}, Q#elixir_quote.context), + do_quote_tuple(Annotated, Q, E). + do_quote_call(Left, Meta, Expr, Args, Q, E) -> All = [meta(Meta, Q), Left, {unquote, Meta, [Expr]}, Args, - Q#elixir_quote.context, file(E, Q)], + Q#elixir_quote.context], {TAll, TQ} = lists:mapfoldl(fun(X, Acc) -> do_quote(X, Acc, E) end, Q, All), {{{'.', Meta, [elixir_quote, dot]}, Meta, TAll}, TQ}. @@ -278,33 +319,54 @@ do_quote_fa(Target, Meta, Args, F, A, Q, E) -> false -> Meta; Receiver -> keystore(import_fa, Meta, {Receiver, Q#elixir_quote.context}) end, - do_quote_tuple({Target, NewMeta, Args}, Q, E). + do_quote_tuple(Target, NewMeta, Args, Q, E). do_quote_tuple({Left, Meta, Right}, Q, E) -> + do_quote_tuple(Left, Meta, Right, Q, E). + +% In a def unquote(name)(args) expression name will be an atom literal, +% thus location: :keep will not have enough information to generate the proper file/line annotation. +% This alters metadata to force Elixir to show the file to which the definition is added +% instead of the file where definition is quoted (i.e. we behave the opposite to location: :keep). +do_quote_tuple(Left, Meta, [{{unquote, _, _}, _, _}, _] = Right, Q, E) when ?defs(Left) -> + {TLeft, LQ} = do_quote(Left, Q, E), + {[Head, Body], RQ} = do_quote(Right, LQ, E), + {'{}', [], [HLeft, HMeta, HRight]} = Head, + NewMeta = lists:keydelete(file, 1, HMeta), + NewHead = {'{}', [], [HLeft, NewMeta, HRight]}, + {{'{}', [], [TLeft, meta(Meta, Q), [NewHead, Body]]}, RQ}; + +do_quote_tuple(Left, Meta, Right, Q, E) -> {TLeft, LQ} = do_quote(Left, Q, E), {TRight, RQ} = do_quote(Right, LQ, E), {{'{}', [], [TLeft, meta(Meta, Q), TRight]}, RQ}. -file(#{file := File}, #elixir_quote{keep=true}) -> File; -file(_, _) -> nil. +meta(Meta, Q) -> + generated(file(line(Meta, Q), Q), Q). + +generated(Meta, #elixir_quote{generated=true}) -> [{generated, true} | Meta]; +generated(Meta, #elixir_quote{generated=false}) -> Meta. + +file(Meta, #elixir_quote{file=nil}) -> Meta; +file(Meta, #elixir_quote{file=File}) -> [{file, File} | Meta]. -meta(Meta, #elixir_quote{keep=true}) -> +line(Meta, #elixir_quote{file=File}) when File /= nil -> [case KV of {line, V} -> {keep, V}; _ -> KV end || KV <- Meta]; -meta(Meta, #elixir_quote{line=true}) -> +line(Meta, #elixir_quote{line=true}) -> Meta; -meta(Meta, #elixir_quote{line=false}) -> +line(Meta, #elixir_quote{line=false}) -> keydelete(line, Meta); -meta(Meta, #elixir_quote{line=Line}) -> +line(Meta, #elixir_quote{line=Line}) -> keystore(line, Meta, Line). reverse_improper(L) -> reverse_improper(L, []). reverse_improper([], Acc) -> {Acc}; -reverse_improper([H|T], Acc) when is_list(T) -> reverse_improper(T, [H|Acc]); -reverse_improper([H|T], Acc) -> {[H|Acc], T}. +reverse_improper([H | T], Acc) when is_list(T) -> reverse_improper(T, [H | Acc]); +reverse_improper([H | T], Acc) -> {[H | Acc], T}. update_last([], _) -> []; update_last([H], F) -> [F(H)]; -update_last([H|T], F) -> [H|update_last(T,F)]. +update_last([H | T], F) -> [H | update_last(T, F)]. keyfind(Key, Meta) -> lists:keyfind(Key, 1, Meta). @@ -326,9 +388,9 @@ keynew(Key, Meta, Value) -> %% Quote splicing -do_splice([{'|', Meta, [{unquote_splicing, _, [Left]}, Right]}|T], #elixir_quote{unquote=true} = Q, E) -> +do_splice([{'|', Meta, [{unquote_splicing, _, [Left]}, Right]} | T], #elixir_quote{unquote=true} = Q, E) -> %% Process the remaining entries on the list. - %% For [1, 2, 3, unquote_splicing(arg)|tail], this will quote + %% For [1, 2, 3, unquote_splicing(arg) | tail], this will quote %% 1, 2 and 3, which could even be unquotes. {TT, QT} = do_splice(T, Q, E, [], []), {TR, QR} = do_quote(Right, QT, E), @@ -337,12 +399,12 @@ do_splice([{'|', Meta, [{unquote_splicing, _, [Left]}, Right]}|T], #elixir_quote do_splice(List, Q, E) -> do_splice(List, Q, E, [], []). -do_splice([{unquote_splicing, Meta, [Expr]}|T], #elixir_quote{unquote=true} = Q, E, Buffer, Acc) -> +do_splice([{unquote_splicing, Meta, [Expr]} | T], #elixir_quote{unquote=true} = Q, E, Buffer, Acc) -> do_splice(T, Q#elixir_quote{unquoted=true}, E, [], do_runtime_list(Meta, list, [Expr, do_join(Buffer, Acc)])); -do_splice([H|T], Q, E, Buffer, Acc) -> +do_splice([H | T], Q, E, Buffer, Acc) -> {TH, TQ} = do_quote(H, Q, E), - do_splice(T, TQ, E, [TH|Buffer], Acc); + do_splice(T, TQ, E, [TH | Buffer], Acc); do_splice([], Q, _E, Buffer, Acc) -> {do_join(Buffer, Acc), Q}. diff --git a/lib/elixir/src/elixir_rewrite.erl b/lib/elixir/src/elixir_rewrite.erl new file mode 100644 index 00000000000..ca8ae6bb9c8 --- /dev/null +++ b/lib/elixir/src/elixir_rewrite.erl @@ -0,0 +1,227 @@ +-module(elixir_rewrite). +-export([rewrite/5, inline/3]). +-include("elixir.hrl"). + +%% Convenience variables + +-define(atom, 'Elixir.Atom'). +-define(access, 'Elixir.Access'). +-define(enum, 'Elixir.Enum'). +-define(io, 'Elixir.IO'). +-define(integer, 'Elixir.Integer'). +-define(kernel, 'Elixir.Kernel'). +-define(list, 'Elixir.List'). +-define(list_chars, 'Elixir.List.Chars'). +-define(map, 'Elixir.Map'). +-define(node, 'Elixir.Node'). +-define(port, 'Elixir.Port'). +-define(process, 'Elixir.Process'). +-define(string, 'Elixir.String'). +-define(string_chars, 'Elixir.String.Chars'). +-define(system, 'Elixir.System'). +-define(tuple, 'Elixir.Tuple'). + +%% Inline + +%% Inline rules are straightforward, they keep the same +%% number and order of arguments and show up on captures. + +inline(?atom, to_charlist, 1) -> {erlang, atom_to_list}; +inline(?io, iodata_length, 1) -> {erlang, iolist_size}; +inline(?io, iodata_to_binary, 1) -> {erlang, iolist_to_binary}; +inline(?integer, to_string, 1) -> {erlang, integer_to_binary}; +inline(?integer, to_string, 2) -> {erlang, integer_to_binary}; +inline(?integer, to_charlist, 1) -> {erlang, integer_to_list}; +inline(?integer, to_charlist, 2) -> {erlang, integer_to_list}; +inline(?list, to_atom, 1) -> {erlang, list_to_atom}; +inline(?list, to_existing_atom, 1) -> {erlang, list_to_existing_atom}; +inline(?list, to_float, 1) -> {erlang, list_to_float}; +inline(?list, to_integer, 1) -> {erlang, list_to_integer}; +inline(?list, to_integer, 2) -> {erlang, list_to_integer}; +inline(?list, to_tuple, 1) -> {erlang, list_to_tuple}; + +inline(?kernel, '+', 2) -> {erlang, '+'}; +inline(?kernel, '-', 2) -> {erlang, '-'}; +inline(?kernel, '+', 1) -> {erlang, '+'}; +inline(?kernel, '-', 1) -> {erlang, '-'}; +inline(?kernel, '*', 2) -> {erlang, '*'}; +inline(?kernel, '/', 2) -> {erlang, '/'}; +inline(?kernel, '++', 2) -> {erlang, '++'}; +inline(?kernel, '--', 2) -> {erlang, '--'}; +inline(?kernel, 'not', 1) -> {erlang, 'not'}; +inline(?kernel, '<', 2) -> {erlang, '<'}; +inline(?kernel, '>', 2) -> {erlang, '>'}; +inline(?kernel, '<=', 2) -> {erlang, '=<'}; +inline(?kernel, '>=', 2) -> {erlang, '>='}; +inline(?kernel, '==', 2) -> {erlang, '=='}; +inline(?kernel, '!=', 2) -> {erlang, '/='}; +inline(?kernel, '===', 2) -> {erlang, '=:='}; +inline(?kernel, '!==', 2) -> {erlang, '=/='}; +inline(?kernel, abs, 1) -> {erlang, abs}; +inline(?kernel, apply, 2) -> {erlang, apply}; +inline(?kernel, apply, 3) -> {erlang, apply}; +inline(?kernel, binary_part, 3) -> {erlang, binary_part}; +inline(?kernel, bit_size, 1) -> {erlang, bit_size}; +inline(?kernel, byte_size, 1) -> {erlang, byte_size}; +inline(?kernel, 'div', 2) -> {erlang, 'div'}; +inline(?kernel, exit, 1) -> {erlang, exit}; +inline(?kernel, hd, 1) -> {erlang, hd}; +inline(?kernel, is_atom, 1) -> {erlang, is_atom}; +inline(?kernel, is_binary, 1) -> {erlang, is_binary}; +inline(?kernel, is_bitstring, 1) -> {erlang, is_bitstring}; +inline(?kernel, is_boolean, 1) -> {erlang, is_boolean}; +inline(?kernel, is_float, 1) -> {erlang, is_float}; +inline(?kernel, is_function, 1) -> {erlang, is_function}; +inline(?kernel, is_function, 2) -> {erlang, is_function}; +inline(?kernel, is_integer, 1) -> {erlang, is_integer}; +inline(?kernel, is_list, 1) -> {erlang, is_list}; +inline(?kernel, is_map, 1) -> {erlang, is_map}; +inline(?kernel, is_number, 1) -> {erlang, is_number}; +inline(?kernel, is_pid, 1) -> {erlang, is_pid}; +inline(?kernel, is_port, 1) -> {erlang, is_port}; +inline(?kernel, is_reference, 1) -> {erlang, is_reference}; +inline(?kernel, is_tuple, 1) -> {erlang, is_tuple}; +inline(?kernel, length, 1) -> {erlang, length}; +inline(?kernel, make_ref, 0) -> {erlang, make_ref}; +inline(?kernel, map_size, 1) -> {erlang, map_size}; +inline(?kernel, max, 2) -> {erlang, max}; +inline(?kernel, min, 2) -> {erlang, min}; +inline(?kernel, node, 0) -> {erlang, node}; +inline(?kernel, node, 1) -> {erlang, node}; +inline(?kernel, 'rem', 2) -> {erlang, 'rem'}; +inline(?kernel, round, 1) -> {erlang, round}; +inline(?kernel, self, 0) -> {erlang, self}; +inline(?kernel, send, 2) -> {erlang, send}; +inline(?kernel, spawn, 1) -> {erlang, spawn}; +inline(?kernel, spawn, 3) -> {erlang, spawn}; +inline(?kernel, spawn_link, 1) -> {erlang, spawn_link}; +inline(?kernel, spawn_link, 3) -> {erlang, spawn_link}; +inline(?kernel, spawn_monitor, 1) -> {erlang, spawn_monitor}; +inline(?kernel, spawn_monitor, 3) -> {erlang, spawn_monitor}; +inline(?kernel, throw, 1) -> {erlang, throw}; +inline(?kernel, tl, 1) -> {erlang, tl}; +inline(?kernel, trunc, 1) -> {erlang, trunc}; +inline(?kernel, tuple_size, 1) -> {erlang, tuple_size}; + +inline(?map, keys, 1) -> {maps, keys}; +inline(?map, merge, 2) -> {maps, merge}; +inline(?map, size, 1) -> {maps, size}; +inline(?map, values, 1) -> {maps, values}; +inline(?map, to_list, 1) -> {maps, to_list}; + +inline(?node, list, 0) -> {erlang, nodes}; +inline(?node, list, 1) -> {erlang, nodes}; +inline(?node, spawn, 2) -> {erlang, spawn}; +inline(?node, spawn, 3) -> {erlang, spawn_opt}; +inline(?node, spawn, 4) -> {erlang, spawn}; +inline(?node, spawn, 5) -> {erlang, spawn_opt}; +inline(?node, spawn_link, 2) -> {erlang, spawn_link}; +inline(?node, spawn_link, 4) -> {erlang, spawn_link}; + +inline(?process, 'alive?', 1) -> {erlang, is_process_alive}; +inline(?process, cancel_timer, 1) -> {erlang, cancel_timer}; +inline(?process, cancel_timer, 2) -> {erlang, cancel_timer}; +inline(?process, exit, 2) -> {erlang, exit}; +inline(?process, get, 0) -> {erlang, get}; +inline(?process, get_keys, 0) -> {erlang, get_keys}; +inline(?process, get_keys, 1) -> {erlang, get_keys}; +inline(?process, group_leader, 0) -> {erlang, group_leader}; +inline(?process, hibernate, 3) -> {erlang, hibernate}; +inline(?process, demonitor, 1) -> {erlang, demonitor}; +inline(?process, demonitor, 2) -> {erlang, demonitor}; +inline(?process, flag, 2) -> {erlang, process_flag}; +inline(?process, flag, 3) -> {erlang, process_flag}; +inline(?process, link, 1) -> {erlang, link}; +inline(?process, list, 0) -> {erlang, processes}; +inline(?process, read_timer, 1) -> {erlang, read_timer}; +inline(?process, registered, 0) -> {erlang, registered}; +inline(?process, send, 3) -> {erlang, send}; +inline(?process, spawn, 2) -> {erlang, spawn_opt}; +inline(?process, spawn, 4) -> {erlang, spawn_opt}; +inline(?process, unlink, 1) -> {erlang, unlink}; +inline(?process, unregister, 1) -> {erlang, unregister}; + +inline(?port, open, 2) -> {erlang, open_port}; +inline(?port, close, 1) -> {erlang, port_close}; +inline(?port, command, 2) -> {erlang, port_command}; +inline(?port, command, 3) -> {erlang, port_command}; +inline(?port, connect, 2) -> {erlang, port_connect}; +inline(?port, list, 0) -> {erlang, ports}; + +inline(?string, to_float, 1) -> {erlang, binary_to_float}; +inline(?string, to_integer, 1) -> {erlang, binary_to_integer}; +inline(?string, to_integer, 2) -> {erlang, binary_to_integer}; + +inline(?system, stacktrace, 0) -> {erlang, get_stacktrace}; +inline(?system, monotonic_time, 0) -> {erlang, monotonic_time}; +inline(?system, os_time, 0) -> {os, system_time}; +inline(?system, system_time, 0) -> {erlang, system_time}; +inline(?system, time_offset, 0) -> {erlang, time_offset}; +inline(?system, unique_integer, 0) -> {erlang, unique_integer}; +inline(?system, unique_integer, 1) -> {erlang, unique_integer}; + +inline(?tuple, to_list, 1) -> {erlang, tuple_to_list}; +inline(?tuple, append, 2) -> {erlang, append_element}; + +inline(_, _, _) -> false. + +%% Rewrite rules +%% +%% Rewrite rules are more complex than regular inlining code +%% as they may change the number of arguments. However, they +%% don't add new code (such as case statements), at best they +%% perform dead code removal. + +rewrite(?string_chars, _DotMeta, 'to_string', _Meta, [String]) when is_binary(String) -> + String; +rewrite(?string_chars, _, 'to_string', _, [{{'.', _, [?kernel, inspect]}, _, _} = Call]) -> + Call; +rewrite(Receiver, DotMeta, Right, Meta, Args) -> + {EReceiver, ERight, EArgs} = rewrite(Receiver, Right, Args), + {{'.', DotMeta, [EReceiver, ERight]}, Meta, EArgs}. + +rewrite(?atom, to_string, [Arg]) -> + {erlang, atom_to_binary, [Arg, utf8]}; +rewrite(?enum, into, [Arg, {'%{}', _, []}]) -> + {?map, new, [Arg]}; +rewrite(?enum, into, [Arg, {'%{}', _, []}, Fun]) -> + {?map, new, [Arg, Fun]}; +rewrite(?kernel, elem, [Tuple, Index]) -> + {erlang, element, [increment(Index), Tuple]}; +rewrite(?kernel, put_elem, [Tuple, Index, Value]) -> + {erlang, setelement, [increment(Index), Tuple, Value]}; +rewrite(?map, delete, [Map, Key]) -> + {maps, remove, [Key, Map]}; +rewrite(?map, fetch, [Map, Key]) -> + {maps, find, [Key, Map]}; +rewrite(?map, 'has_key?', [Map, Key]) -> + {maps, is_key, [Key, Map]}; +rewrite(?map, put, [Map, Key, Value]) -> + {maps, put, [Key, Value, Map]}; +rewrite(?map, 'replace!', [Map, Key, Value]) -> + {maps, update, [Key, Value, Map]}; +rewrite(?process, monitor, [Arg]) -> + {erlang, monitor, [process, Arg]}; +rewrite(?process, group_leader, [Pid, Leader]) -> + {erlang, group_leader, [Leader, Pid]}; +rewrite(?process, send_after, [Dest, Msg, Time]) -> + {erlang, send_after, [Time, Dest, Msg]}; +rewrite(?process, send_after, [Dest, Msg, Time, Opts]) -> + {erlang, send_after, [Time, Dest, Msg, Opts]}; +rewrite(?string, to_atom, [Arg]) -> + {erlang, binary_to_atom, [Arg, utf8]}; +rewrite(?string, to_existing_atom, [Arg]) -> + {erlang, binary_to_existing_atom, [Arg, utf8]}; +rewrite(?tuple, insert_at, [Tuple, Index, Term]) -> + {erlang, insert_element, [increment(Index), Tuple, Term]}; +rewrite(?tuple, delete_at, [Tuple, Index]) -> + {erlang, delete_element, [increment(Index), Tuple]}; +rewrite(?tuple, duplicate, [Data, Size]) -> + {erlang, make_tuple, [Size, Data]}; +rewrite(Receiver, Fun, Args) -> + {Receiver, Fun, Args}. + +increment(Number) when is_number(Number) -> + Number + 1; +increment(Other) -> + {{'.', [], [erlang, '+']}, [], [Other, 1]}. diff --git a/lib/elixir/src/elixir_scope.erl b/lib/elixir/src/elixir_scope.erl deleted file mode 100644 index c9f02e3dd0f..00000000000 --- a/lib/elixir/src/elixir_scope.erl +++ /dev/null @@ -1,140 +0,0 @@ -%% Convenience functions used to manipulate scope and its variables. --module(elixir_scope). --export([translate_var/4, build_var/2, - load_binding/2, dump_binding/2, - mergev/2, mergec/2, mergef/2, - merge_vars/2, merge_opt_vars/2 -]). --include("elixir.hrl"). - -%% VAR HANDLING - -translate_var(Meta, Name, Kind, S) when is_atom(Kind); is_integer(Kind) -> - Line = ?line(Meta), - Tuple = {Name, Kind}, - Vars = S#elixir_scope.vars, - - case orddict:find({Name, Kind}, Vars) of - {ok, {Current, _}} -> Exists = true; - error -> Current = nil, Exists = false - end, - - case S#elixir_scope.context of - match -> - MatchVars = S#elixir_scope.match_vars, - - case Exists andalso ordsets:is_element(Tuple, MatchVars) of - true -> - {{var, Line, Current}, S}; - false -> - %% We attempt to give vars a nice name because we - %% still use the unused vars warnings from erl_lint. - %% - %% Once we move the warning to Elixir compiler, we - %% can name vars as _@COUNTER. - {NewVar, Counter, NS} = - if - Kind /= nil -> - build_var('_', S); - true -> - build_var(Name, S) - end, - - FS = NS#elixir_scope{ - vars=orddict:store(Tuple, {NewVar, Counter}, Vars), - match_vars=ordsets:add_element(Tuple, MatchVars), - export_vars=case S#elixir_scope.export_vars of - nil -> nil; - EV -> orddict:store(Tuple, {NewVar, Counter}, EV) - end - }, - - {{var, Line, NewVar}, FS} - end; - _ when Exists -> - {{var, Line, Current}, S} - end. - -build_var(Key, S) -> - New = orddict:update_counter(Key, 1, S#elixir_scope.counter), - Cnt = orddict:fetch(Key, New), - {elixir_utils:atom_concat([Key, "@", Cnt]), Cnt, S#elixir_scope{counter=New}}. - -%% SCOPE MERGING - -%% Receives two scopes and return a new scope based on -%% the second with their variables merged. - -mergev(S1, S2) -> - S2#elixir_scope{ - vars=merge_vars(S1#elixir_scope.vars, S2#elixir_scope.vars), - export_vars=merge_opt_vars(S1#elixir_scope.export_vars, S2#elixir_scope.export_vars) - }. - -%% Receives two scopes and return the first scope with -%% counters and flags from the later. - -mergec(S1, S2) -> - S1#elixir_scope{ - counter=S2#elixir_scope.counter, - super=S2#elixir_scope.super, - caller=S2#elixir_scope.caller - }. - -%% Similar to mergec but does not merge the user vars counter. - -mergef(S1, S2) -> - S1#elixir_scope{ - super=S2#elixir_scope.super, - caller=S2#elixir_scope.caller - }. - -%% Mergers. - -merge_vars(V, V) -> V; -merge_vars(V1, V2) -> - orddict:merge(fun var_merger/3, V1, V2). - -merge_opt_vars(nil, _C2) -> nil; -merge_opt_vars(_C1, nil) -> nil; -merge_opt_vars(C, C) -> C; -merge_opt_vars(C1, C2) -> - orddict:merge(fun var_merger/3, C1, C2). - -var_merger(_Var, {_, V1} = K1, {_, V2}) when V1 > V2 -> K1; -var_merger(_Var, _K1, K2) -> K2. - -%% BINDINGS - -load_binding(Binding, Scope) -> - {NewBinding, NewVars, NewCounter} = load_binding(Binding, [], [], 0), - {NewBinding, Scope#elixir_scope{ - vars=NewVars, - counter=[{'_',NewCounter}] - }}. - -load_binding([{Key,Value}|T], Binding, Vars, Counter) -> - Actual = case Key of - {_Name, _Kind} -> Key; - Name when is_atom(Name) -> {Name, nil} - end, - InternalName = elixir_utils:atom_concat(["_@", Counter]), - load_binding(T, - orddict:store(InternalName, Value, Binding), - orddict:store(Actual, {InternalName, 0}, Vars), Counter + 1); -load_binding([], Binding, Vars, Counter) -> - {Binding, Vars, Counter}. - -dump_binding(Binding, #elixir_scope{vars=Vars}) -> - dump_binding(Vars, Binding, []). - -dump_binding([{{Var, Kind} = Key, {InternalName,_}}|T], Binding, Acc) when is_atom(Kind) -> - Actual = case Kind of - nil -> Var; - _ -> Key - end, - Value = proplists:get_value(InternalName, Binding, nil), - dump_binding(T, Binding, orddict:store(Actual, Value, Acc)); -dump_binding([_|T], Binding, Acc) -> - dump_binding(T, Binding, Acc); -dump_binding([], _Binding, Acc) -> Acc. diff --git a/lib/elixir/src/elixir_sup.erl b/lib/elixir/src/elixir_sup.erl index 80767df73c1..bc111dc5698 100644 --- a/lib/elixir/src/elixir_sup.erl +++ b/lib/elixir/src/elixir_sup.erl @@ -8,24 +8,24 @@ start_link() -> init(ok) -> Workers = [ { - elixir_code_server, - {elixir_code_server, start_link, []}, + elixir_config, + {elixir_config, start_link, []}, permanent, % Restart = permanent | transient | temporary 2000, % Shutdown = brutal_kill | int() >= 0 | infinity worker, % Type = worker | supervisor - [elixir_code_server] % Modules = [Module] | dynamic - }, + [elixir_config] % Modules = [Module] | dynamic + }, { - elixir_counter, - {elixir_counter, start_link, []}, + elixir_code_server, + {elixir_code_server, start_link, []}, permanent, % Restart = permanent | transient | temporary 2000, % Shutdown = brutal_kill | int() >= 0 | infinity worker, % Type = worker | supervisor - [elixir_counter] % Modules = [Module] | dynamic - } + [elixir_code_server] % Modules = [Module] | dynamic + } ], {ok, {{one_for_one, 3, 10}, Workers}}. diff --git a/lib/elixir/src/elixir_tokenizer.erl b/lib/elixir/src/elixir_tokenizer.erl index 84310831eb6..6a81e77589d 100644 --- a/lib/elixir/src/elixir_tokenizer.erl +++ b/lib/elixir/src/elixir_tokenizer.erl @@ -1,8 +1,29 @@ -module(elixir_tokenizer). -include("elixir.hrl"). --export([tokenize/3]). +-export([tokenize/1, tokenize/3, tokenize/4, invalid_do_error/1]). -import(elixir_interpolation, [unescape_tokens/1]). +%% Numbers +-define(is_hex(S), (?is_digit(S) orelse (S >= $A andalso S =< $F) orelse (S >= $a andalso S =< $f))). +-define(is_bin(S), (S >= $0 andalso S =< $1)). +-define(is_octal(S), (S >= $0 andalso S =< $7)). + +%% Digits and letters +-define(is_digit(S), (S >= $0 andalso S =< $9)). +-define(is_upcase(S), (S >= $A andalso S =< $Z)). +-define(is_downcase(S), (S >= $a andalso S =< $z)). + +%% Others +-define(is_quote(S), (S == $" orelse S == $')). +-define(is_sigil(S), ((S == $/) orelse (S == $<) orelse (S == $") orelse (S == $') orelse + (S == $[) orelse (S == $() orelse (S == ${) orelse (S == $|))). + +%% Spaces +-define(is_horizontal_space(S), ((S == $\s) orelse (S == $\t))). +-define(is_vertical_space(S), ((S == $\r) orelse (S == $\n))). +-define(is_space(S), (?is_horizontal_space(S) orelse ?is_vertical_space(S))). + +%% Operators -define(at_op(T), T == $@). @@ -16,15 +37,15 @@ -define(unary_op3(T1, T2, T3), T1 == $~, T2 == $~, T3 == $~). --define(hat_op3(T1, T2, T3), - T1 == $^, T2 == $^, T3 == $^). - -define(two_op(T1, T2), T1 == $+, T2 == $+; T1 == $-, T2 == $-; T1 == $<, T2 == $>; T1 == $., T2 == $.). +-define(three_op(T1, T2, T3), + T1 == $^, T2 == $^, T3 == $^). + -define(mult_op(T), T == $* orelse T == $/). @@ -33,10 +54,16 @@ -define(arrow_op3(T1, T2, T3), T1 == $<, T2 == $<, T3 == $<; - T1 == $>, T2 == $>, T3 == $>). + T1 == $>, T2 == $>, T3 == $>; + T1 == $~, T2 == $>, T3 == $>; + T1 == $<, T2 == $<, T3 == $~; + T1 == $<, T2 == $~, T3 == $>; + T1 == $<, T2 == $|, T3 == $>). -define(arrow_op(T1, T2), - T1 == $|, T2 == $>). + T1 == $|, T2 == $>; + T1 == $~, T2 == $>; + T1 == $<, T2 == $~). -define(rel_op(T), T == $<; @@ -80,13 +107,13 @@ -define(type_op(T1, T2), T1 == $:, T2 == $:). --define(pipe_op(T1), +-define(pipe_op(T), T == $|). -tokenize(String, Line, #elixir_tokenizer{} = Scope) -> - tokenize(String, Line, Scope, []); +tokenize(String, Line, Column, #elixir_tokenizer{} = Scope) -> + tokenize(String, Line, Column, Scope, []); -tokenize(String, Line, Opts) -> +tokenize(String, Line, Column, Opts) -> File = case lists:keyfind(file, 1, Opts) of {file, V1} -> V1; false -> <<"nofile">> @@ -102,480 +129,480 @@ tokenize(String, Line, Opts) -> false -> true end, - tokenize(String, Line, #elixir_tokenizer{ + tokenize(String, Line, Column, #elixir_tokenizer{ file=File, existing_atoms_only=Existing, - check_terminators=Check - }). + check_terminators=Check, + identifier_tokenizer=elixir_config:get(identifier_tokenizer) + }). + +tokenize(String, Line, Opts) -> + tokenize(String, Line, 1, Opts). -tokenize([], Line, #elixir_tokenizer{terminators=[]}, Tokens) -> - {ok, Line, lists:reverse(Tokens)}; +tokenize([], Line, Column, #elixir_tokenizer{terminators=[]}, Tokens) -> + {ok, Line, Column, lists:reverse(Tokens)}; -tokenize([], EndLine, #elixir_tokenizer{terminators=[{Start, StartLine}|_]}, Tokens) -> +tokenize([], EndLine, _Column, #elixir_tokenizer{terminators=[{Start, {StartLine, _, _}} | _]}, Tokens) -> End = terminator(Start), Message = io_lib:format("missing terminator: ~ts (for \"~ts\" starting at line ~B)", [End, Start, StartLine]), {error, {EndLine, Message, []}, [], Tokens}; +% VC merge conflict + +tokenize(("<<<<<<<" ++ _) = Original, Line, 1, _Scope, Tokens) -> + FirstLine = lists:takewhile(fun(C) -> C =/= $\n andalso C =/= $\r end, Original), + {error, {Line, "found an unexpected version control marker, please resolve the conflicts: ", FirstLine}, Original, Tokens}; + % Base integers -tokenize([$0,X,H|T], Line, Scope, Tokens) when (X == $x orelse X == $X), ?is_hex(H) -> - {Rest, Number} = tokenize_hex([H|T], []), - tokenize(Rest, Line, Scope, [{number, Line, Number}|Tokens]); +tokenize([$0, $x, H | T], Line, Column, Scope, Tokens) when ?is_hex(H) -> + {Rest, Number, Length} = tokenize_hex(T, [H], 1), + tokenize(Rest, Line, Column + 2 + Length, Scope, [{number, {Line, Column, Column + 2 + Length}, Number} | Tokens]); -tokenize([$0,B,H|T], Line, Scope, Tokens) when (B == $b orelse B == $B), ?is_bin(H) -> - {Rest, Number} = tokenize_bin([H|T], []), - tokenize(Rest, Line, Scope, [{number, Line, Number}|Tokens]); +tokenize([$0, $b, H | T], Line, Column, Scope, Tokens) when ?is_bin(H) -> + {Rest, Number, Length} = tokenize_bin(T, [H], 1), + tokenize(Rest, Line, Column + 2 + Length, Scope, [{number, {Line, Column, Column + 2 + Length}, Number} | Tokens]); -tokenize([$0,H|T], Line, Scope, Tokens) when ?is_octal(H) -> - {Rest, Number} = tokenize_octal([H|T], []), - tokenize(Rest, Line, Scope, [{number, Line, Number}|Tokens]); +tokenize([$0, $o, H | T], Line, Column, Scope, Tokens) when ?is_octal(H) -> + {Rest, Number, Length} = tokenize_octal(T, [H], 1), + tokenize(Rest, Line, Column + 2 + Length, Scope, [{number, {Line, Column, Column + 2 + Length}, Number} | Tokens]); % Comments -tokenize([$#|String], Line, Scope, Tokens) -> +tokenize([$# | String], Line, Column, Scope, Tokens) -> Rest = tokenize_comment(String), - tokenize(Rest, Line, Scope, Tokens); + tokenize(Rest, Line, Column, Scope, Tokens); % Sigils -tokenize([$~,S,H,H,H|T] = Original, Line, Scope, Tokens) when ?is_quote(H), ?is_upcase(S) orelse ?is_downcase(S) -> - case extract_heredoc_with_interpolation(Line, Scope, ?is_downcase(S), T, H) of - {ok, NewLine, Parts, Rest} -> +tokenize([$~, S, H, H, H | T] = Original, Line, Column, Scope, Tokens) when ?is_quote(H), ?is_upcase(S) orelse ?is_downcase(S) -> + case extract_heredoc_with_interpolation(Line, Column, Scope, ?is_downcase(S), T, H) of + {ok, NewLine, NewColumn, Parts, Rest} -> {Final, Modifiers} = collect_modifiers(Rest, []), - tokenize(Final, NewLine, Scope, [{sigil, Line, S, Parts, Modifiers}|Tokens]); + tokenize(Final, NewLine, NewColumn, Scope, [{sigil, {Line, Column, NewColumn}, S, Parts, Modifiers} | Tokens]); {error, Reason} -> {error, Reason, Original, Tokens} end; -tokenize([$~,S,H|T] = Original, Line, Scope, Tokens) when ?is_sigil(H), ?is_upcase(S) orelse ?is_downcase(S) -> - case elixir_interpolation:extract(Line, Scope, ?is_downcase(S), T, sigil_terminator(H)) of - {NewLine, Parts, Rest} -> +tokenize([$~, S, H | T] = Original, Line, Column, Scope, Tokens) when ?is_sigil(H), ?is_upcase(S) orelse ?is_downcase(S) -> + case elixir_interpolation:extract(Line, Column + 3, Scope, ?is_downcase(S), T, sigil_terminator(H)) of + {NewLine, NewColumn, Parts, Rest} -> {Final, Modifiers} = collect_modifiers(Rest, []), - tokenize(Final, NewLine, Scope, [{sigil, Line, S, Parts, Modifiers}|Tokens]); + tokenize(Final, NewLine, NewColumn, Scope, [{sigil, {Line, Column, NewColumn}, S, Parts, Modifiers} | Tokens]); {error, Reason} -> - Sigil = [$~,S,H], + Sigil = [$~, S, H], interpolation_error(Reason, Original, Tokens, " (for sigil ~ts starting at line ~B)", [Sigil, Line]) end; +tokenize([$~, S, H | _] = Original, Line, Column, _Scope, Tokens) when ?is_upcase(S) orelse ?is_downcase(S) -> + MessageString = + "\"~ts\" (column ~p, codepoint U+~4.16.0B). The available delimiters are: " + "//, ||, \"\", '', (), [], {}, <>", + Message = io_lib:format(MessageString, [[H], Column + 2, H]), + {error, {Line, "invalid sigil delimiter: ", Message}, Original, Tokens}; + % Char tokens -tokenize([$?,$\\,P,${,A,B,C,D,E,F,$}|T], Line, Scope, Tokens) - when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) -> - Char = escape_char([$\\,P,${,A,B,C,D,E,F,$}]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,P,${,A,B,C,D,E,$}|T], Line, Scope, Tokens) - when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) -> - Char = escape_char([$\\,P,${,A,B,C,D,E,$}]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,P,${,A,B,C,D,$}|T], Line, Scope, Tokens) - when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> - Char = escape_char([$\\,P,${,A,B,C,D,$}]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,P,${,A,B,C,$}|T], Line, Scope, Tokens) - when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C) -> - Char = escape_char([$\\,P,${,A,B,C,$}]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,P,${,A,B,$}|T], Line, Scope, Tokens) - when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) -> - Char = escape_char([$\\,P,${,A,B,$}]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,P,${,A,$}|T], Line, Scope, Tokens) - when (P == $x orelse P == $X), ?is_hex(A) -> - Char = escape_char([$\\,P,${,A,$}]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,P,A,B|T], Line, Scope, Tokens) - when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) -> - Char = escape_char([$\\,P,A,B]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,P,A|T], Line, Scope, Tokens) - when (P == $x orelse P == $X), ?is_hex(A) -> - Char = escape_char([$\\,P,A]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,A,B,C|T], Line, Scope, Tokens) - when ?is_octal(A), A =< $3,?is_octal(B), ?is_octal(C) -> - Char = escape_char([$\\,A,B,C]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,A,B|T], Line, Scope, Tokens) - when ?is_octal(A), ?is_octal(B) -> - Char = escape_char([$\\,A,B]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,A|T], Line, Scope, Tokens) - when ?is_octal(A) -> - Char = escape_char([$\\,A]), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); - -tokenize([$?,$\\,H|T], Line, Scope, Tokens) -> - Char = elixir_interpolation:unescape_map(H), - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); +% We tokenize char literals (?a) as {char, _, CharInt} instead of {number, _, +% CharInt}. This is exactly what Erlang does with Erlang char literals +% ($a). This means we'll have to adjust the error message for char literals in +% elixir_errors.erl as by default {char, _, _} tokens are "hijacked" by Erlang +% and printed with Erlang syntax ($a) in the parser's error messages. -tokenize([$?,Char|T], Line, Scope, Tokens) -> - tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]); +tokenize([$?, $\\, H | T], Line, Column, Scope, Tokens) -> + Char = elixir_interpolation:unescape_map(H), + tokenize(T, Line, Column + 3, Scope, [{char, {Line, Column, Column + 3}, Char} | Tokens]); + +tokenize([$?, Char | T], Line, Column, Scope, Tokens) -> + case handle_char(Char) of + {Escape, Name} -> + Msg = io_lib:format("found ? followed by codepoint 0x~.16B (~ts), please use ~ts instead", + [Char, Name, Escape]), + elixir_errors:warn(Line, Scope#elixir_tokenizer.file, Msg); + false -> + ok + end, + tokenize(T, Line, Column + 2, Scope, [{char, {Line, Column, Column + 2}, Char} | Tokens]); % Heredocs -tokenize("\"\"\"" ++ T, Line, Scope, Tokens) -> - handle_heredocs(T, Line, $", Scope, Tokens); +tokenize("\"\"\"" ++ T, Line, Column, Scope, Tokens) -> + handle_heredocs(T, Line, Column, $", Scope, Tokens); -tokenize("'''" ++ T, Line, Scope, Tokens) -> - handle_heredocs(T, Line, $', Scope, Tokens); +tokenize("'''" ++ T, Line, Column, Scope, Tokens) -> + handle_heredocs(T, Line, Column, $', Scope, Tokens); % Strings -tokenize([$"|T], Line, Scope, Tokens) -> - handle_strings(T, Line, $", Scope, Tokens); -tokenize([$'|T], Line, Scope, Tokens) -> - handle_strings(T, Line, $', Scope, Tokens); - -% Atoms - -tokenize([$:,H|T] = Original, Line, Scope, Tokens) when ?is_quote(H) -> - case elixir_interpolation:extract(Line, Scope, true, T, H) of - {NewLine, Parts, Rest} -> - Unescaped = unescape_tokens(Parts), - Key = case Scope#elixir_tokenizer.existing_atoms_only of - true -> atom_safe; - false -> atom_unsafe - end, - tokenize(Rest, NewLine, Scope, [{Key, Line, Unescaped}|Tokens]); - {error, Reason} -> - interpolation_error(Reason, Original, Tokens, " (for atom starting at line ~B)", [Line]) - end; - -tokenize([$:,T|String] = Original, Line, Scope, Tokens) when ?is_atom_start(T) -> - {Rest, Part} = tokenize_atom([T|String], []), - case unsafe_to_atom(Part, Line, Scope) of - {ok, Atom} -> - tokenize(Rest, Line, Scope, [{atom, Line, Atom}|Tokens]); - {error, Reason} -> - {error, Reason, Original, Tokens} - end; - -% %% Special atom identifiers / operators - -tokenize(":..." ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line, Scope, [{atom, Line, '...'}|Tokens]); -tokenize(":<<>>" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line, Scope, [{atom, Line, '<<>>'}|Tokens]); -tokenize(":%{}" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line, Scope, [{atom, Line, '%{}'}|Tokens]); -tokenize(":%" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line, Scope, [{atom, Line, '%'}|Tokens]); -tokenize(":{}" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line, Scope, [{atom, Line, '{}'}|Tokens]); - -tokenize("...:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) -> - tokenize(Rest, Line, Scope, [{kw_identifier, Line, '...'}|Tokens]); -tokenize("<<>>:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) -> - tokenize(Rest, Line, Scope, [{kw_identifier, Line, '<<>>'}|Tokens]); -tokenize("%{}:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) -> - tokenize(Rest, Line, Scope, [{kw_identifier, Line, '%{}'}|Tokens]); -tokenize("%:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) -> - tokenize(Rest, Line, Scope, [{kw_identifier, Line, '%'}|Tokens]); -tokenize("{}:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) -> - tokenize(Rest, Line, Scope, [{kw_identifier, Line, '{}'}|Tokens]); +tokenize([$" | T], Line, Column, Scope, Tokens) -> + handle_strings(T, Line, Column + 1, $", Scope, Tokens); +tokenize([$' | T], Line, Column, Scope, Tokens) -> + handle_strings(T, Line, Column + 1, $', Scope, Tokens); + +% Operator atoms + +tokenize("...:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> + tokenize(Rest, Line, Column + 4, Scope, [{kw_identifier, {Line, Column, Column + 4}, '...'} | Tokens]); +tokenize("<<>>:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> + tokenize(Rest, Line, Column + 5, Scope, [{kw_identifier, {Line, Column, Column + 5}, '<<>>'} | Tokens]); +tokenize("%{}:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> + tokenize(Rest, Line, Column + 4, Scope, [{kw_identifier, {Line, Column, Column + 4}, '%{}'} | Tokens]); +tokenize("%:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> + tokenize(Rest, Line, Column + 2, Scope, [{kw_identifier, {Line, Column, Column + 2}, '%'} | Tokens]); +tokenize("{}:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> + tokenize(Rest, Line, Column + 3, Scope, [{kw_identifier, {Line, Column, Column + 3}, '{}'} | Tokens]); + +tokenize(":..." ++ Rest, Line, Column, Scope, Tokens) -> + tokenize(Rest, Line, Column + 4, Scope, [{atom, {Line, Column, Column + 4}, '...'} | Tokens]); +tokenize(":<<>>" ++ Rest, Line, Column, Scope, Tokens) -> + tokenize(Rest, Line, Column + 5, Scope, [{atom, {Line, Column, Column + 5}, '<<>>'} | Tokens]); +tokenize(":%{}" ++ Rest, Line, Column, Scope, Tokens) -> + tokenize(Rest, Line, Column + 4, Scope, [{atom, {Line, Column, Column + 4}, '%{}'} | Tokens]); +tokenize(":%" ++ Rest, Line, Column, Scope, Tokens) -> + tokenize(Rest, Line, Column + 2, Scope, [{atom, {Line, Column, Column + 2}, '%'} | Tokens]); +tokenize(":{}" ++ Rest, Line, Column, Scope, Tokens) -> + tokenize(Rest, Line, Column + 3, Scope, [{atom, {Line, Column, Column + 3}, '{}'} | Tokens]); % ## Three Token Operators -tokenize([$:,T1,T2,T3|Rest], Line, Scope, Tokens) when +tokenize([$:, T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?unary_op3(T1, T2, T3); ?comp_op3(T1, T2, T3); ?and_op3(T1, T2, T3); ?or_op3(T1, T2, T3); - ?arrow_op3(T1, T2, T3); ?hat_op3(T1, T2, T3) -> - tokenize(Rest, Line, Scope, [{atom, Line, list_to_atom([T1,T2,T3])}|Tokens]); + ?arrow_op3(T1, T2, T3); ?three_op(T1, T2, T3) -> + tokenize(Rest, Line, Column + 4, Scope, [{atom, {Line, Column, Column + 4}, list_to_atom([T1, T2, T3])} | Tokens]); % ## Two Token Operators -tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when +tokenize([$:, T1, T2 | Rest], Line, Column, Scope, Tokens) when ?comp_op2(T1, T2); ?rel_op2(T1, T2); ?and_op(T1, T2); ?or_op(T1, T2); ?arrow_op(T1, T2); ?in_match_op(T1, T2); ?two_op(T1, T2); ?stab_op(T1, T2); ?type_op(T1, T2) -> - tokenize(Rest, Line, Scope, [{atom, Line, list_to_atom([T1,T2])}|Tokens]); + tokenize(Rest, Line, Column + 3, Scope, [{atom, {Line, Column, Column + 3}, list_to_atom([T1, T2])} | Tokens]); % ## Single Token Operators -tokenize([$:,T|Rest], Line, Scope, Tokens) when +tokenize([$:, T | Rest], Line, Column, Scope, Tokens) when ?at_op(T); ?unary_op(T); ?capture_op(T); ?dual_op(T); ?mult_op(T); ?rel_op(T); ?match_op(T); ?pipe_op(T); T == $. -> - tokenize(Rest, Line, Scope, [{atom, Line, list_to_atom([T])}|Tokens]); - -% End of line - -tokenize(";" ++ Rest, Line, Scope, []) -> - tokenize(Rest, Line, Scope, eol(Line, ';', [])); - -tokenize(";" ++ Rest, Line, Scope, [Top|Tokens]) when element(1, Top) /= eol -> - tokenize(Rest, Line, Scope, eol(Line, ';', [Top|Tokens])); - -tokenize("\\\n" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line + 1, Scope, Tokens); - -tokenize("\\\r\n" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line + 1, Scope, Tokens); - -tokenize("\n" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line + 1, Scope, eol(Line, newline, Tokens)); - -tokenize("\r\n" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line + 1, Scope, eol(Line, newline, Tokens)); + tokenize(Rest, Line, Column + 2, Scope, [{atom, {Line, Column, Column + 2}, list_to_atom([T])} | Tokens]); % Stand-alone tokens -tokenize("..." ++ Rest, Line, Scope, Tokens) -> - Token = check_call_identifier(identifier, Line, '...', Rest), - tokenize(Rest, Line, Scope, [Token|Tokens]); +tokenize("..." ++ Rest, Line, Column, Scope, Tokens) -> + Token = check_call_identifier(Line, Column, 3, '...', Rest), + tokenize(Rest, Line, Column + 3, Scope, [Token | Tokens]); -tokenize("=>" ++ Rest, Line, Scope, Tokens) -> - tokenize(Rest, Line, Scope, add_token_with_nl({assoc_op, Line, '=>'}, Tokens)); +tokenize("=>" ++ Rest, Line, Column, Scope, Tokens) -> + tokenize(Rest, Line, Column + 2, Scope, add_token_with_nl({assoc_op, {Line, Column, Column + 2}, '=>'}, Tokens)); % ## Three token operators -tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?unary_op3(T1, T2, T3) -> - handle_unary_op(Rest, Line, unary_op, list_to_atom([T1,T2,T3]), Scope, Tokens); +tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?unary_op3(T1, T2, T3) -> + handle_unary_op(Rest, Line, Column, unary_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); -tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?comp_op3(T1, T2, T3) -> - handle_op(Rest, Line, comp_op, list_to_atom([T1,T2,T3]), Scope, Tokens); +tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?comp_op3(T1, T2, T3) -> + handle_op(Rest, Line, Column, comp_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); -tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?and_op3(T1, T2, T3) -> - handle_op(Rest, Line, and_op, list_to_atom([T1,T2,T3]), Scope, Tokens); +tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?and_op3(T1, T2, T3) -> + handle_op(Rest, Line, Column, and_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); -tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?or_op3(T1, T2, T3) -> - handle_op(Rest, Line, or_op, list_to_atom([T1,T2,T3]), Scope, Tokens); +tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?or_op3(T1, T2, T3) -> + handle_op(Rest, Line, Column, or_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); -tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?arrow_op3(T1, T2, T3) -> - handle_op(Rest, Line, arrow_op, list_to_atom([T1,T2,T3]), Scope, Tokens); +tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?three_op(T1, T2, T3) -> + handle_op(Rest, Line, Column, three_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); -tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?hat_op3(T1, T2, T3) -> - handle_op(Rest, Line, hat_op, list_to_atom([T1,T2,T3]), Scope, Tokens); +tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?arrow_op3(T1, T2, T3) -> + handle_op(Rest, Line, Column, arrow_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); % ## Containers + punctuation tokens -tokenize([T,T|Rest], Line, Scope, Tokens) when T == $<; T == $> -> - Token = {list_to_atom([T,T]), Line}, - handle_terminator(Rest, Line, Scope, Token, Tokens); +tokenize([T, T | Rest], Line, Column, Scope, Tokens) when T == $<; T == $> -> + Token = {list_to_atom([T, T]), {Line, Column, Column + 2}}, + handle_terminator(Rest, Line, Column + 2, Scope, Token, Tokens); -tokenize([T|Rest], Line, Scope, Tokens) when T == $(; +tokenize([T | Rest], Line, Column, Scope, Tokens) when T == $(; T == ${; T == $}; T == $[; T == $]; T == $); T == $, -> - Token = {list_to_atom([T]), Line}, - handle_terminator(Rest, Line, Scope, Token, Tokens); + Token = {list_to_atom([T]), {Line, Column, Column + 1}}, + handle_terminator(Rest, Line, Column + 1, Scope, Token, Tokens); % ## Two Token Operators -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?two_op(T1, T2) -> - handle_op(Rest, Line, two_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?two_op(T1, T2) -> + handle_op(Rest, Line, Column, two_op, 2, list_to_atom([T1, T2]), Scope, Tokens); -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?arrow_op(T1, T2) -> - handle_op(Rest, Line, arrow_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?arrow_op(T1, T2) -> + handle_op(Rest, Line, Column, arrow_op, 2, list_to_atom([T1, T2]), Scope, Tokens); -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?comp_op2(T1, T2) -> - handle_op(Rest, Line, comp_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?comp_op2(T1, T2) -> + handle_op(Rest, Line, Column, comp_op, 2, list_to_atom([T1, T2]), Scope, Tokens); -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?rel_op2(T1, T2) -> - handle_op(Rest, Line, rel_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?rel_op2(T1, T2) -> + handle_op(Rest, Line, Column, rel_op, 2, list_to_atom([T1, T2]), Scope, Tokens); -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?and_op(T1, T2) -> - handle_op(Rest, Line, and_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?and_op(T1, T2) -> + handle_op(Rest, Line, Column, and_op, 2, list_to_atom([T1, T2]), Scope, Tokens); -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?or_op(T1, T2) -> - handle_op(Rest, Line, or_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?or_op(T1, T2) -> + handle_op(Rest, Line, Column, or_op, 2, list_to_atom([T1, T2]), Scope, Tokens); -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?in_match_op(T1, T2) -> - handle_op(Rest, Line, in_match_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?in_match_op(T1, T2) -> + handle_op(Rest, Line, Column, in_match_op, 2, list_to_atom([T1, T2]), Scope, Tokens); -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?type_op(T1, T2) -> - handle_op(Rest, Line, type_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?type_op(T1, T2) -> + handle_op(Rest, Line, Column, type_op, 2, list_to_atom([T1, T2]), Scope, Tokens); -tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?stab_op(T1, T2) -> - handle_op(Rest, Line, stab_op, list_to_atom([T1, T2]), Scope, Tokens); +tokenize([T1, T2 | Rest], Line, Column, Scope, Tokens) when ?stab_op(T1, T2) -> + handle_op(Rest, Line, Column, stab_op, 2, list_to_atom([T1, T2]), Scope, Tokens); % ## Single Token Operators -tokenize([T|Rest], Line, Scope, Tokens) when ?at_op(T) -> - handle_unary_op(Rest, Line, at_op, list_to_atom([T]), Scope, Tokens); +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?at_op(T) -> + handle_unary_op(Rest, Line, Column, at_op, 1, list_to_atom([T]), Scope, Tokens); -tokenize([T|Rest], Line, Scope, Tokens) when ?capture_op(T) -> - handle_unary_op(Rest, Line, capture_op, list_to_atom([T]), Scope, Tokens); +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?capture_op(T) -> + handle_unary_op(Rest, Line, Column, capture_op, 1, list_to_atom([T]), Scope, Tokens); -tokenize([T|Rest], Line, Scope, Tokens) when ?unary_op(T) -> - handle_unary_op(Rest, Line, unary_op, list_to_atom([T]), Scope, Tokens); +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?unary_op(T) -> + handle_unary_op(Rest, Line, Column, unary_op, 1, list_to_atom([T]), Scope, Tokens); -tokenize([T|Rest], Line, Scope, Tokens) when ?rel_op(T) -> - handle_op(Rest, Line, rel_op, list_to_atom([T]), Scope, Tokens); +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?rel_op(T) -> + handle_op(Rest, Line, Column, rel_op, 1, list_to_atom([T]), Scope, Tokens); -tokenize([T|Rest], Line, Scope, Tokens) when ?dual_op(T) -> - handle_unary_op(Rest, Line, dual_op, list_to_atom([T]), Scope, Tokens); +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?dual_op(T) -> + handle_unary_op(Rest, Line, Column, dual_op, 1, list_to_atom([T]), Scope, Tokens); -tokenize([T|Rest], Line, Scope, Tokens) when ?mult_op(T) -> - handle_op(Rest, Line, mult_op, list_to_atom([T]), Scope, Tokens); +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?mult_op(T) -> + handle_op(Rest, Line, Column, mult_op, 1, list_to_atom([T]), Scope, Tokens); -tokenize([T|Rest], Line, Scope, Tokens) when ?match_op(T) -> - handle_op(Rest, Line, match_op, list_to_atom([T]), Scope, Tokens); +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?match_op(T) -> + handle_op(Rest, Line, Column, match_op, 1, list_to_atom([T]), Scope, Tokens); -tokenize([T|Rest], Line, Scope, Tokens) when ?pipe_op(T) -> - handle_op(Rest, Line, pipe_op, list_to_atom([T]), Scope, Tokens); +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?pipe_op(T) -> + handle_op(Rest, Line, Column, pipe_op, 1, list_to_atom([T]), Scope, Tokens); -% Others +% Non-operator Atoms -tokenize([$%|T], Line, Scope, Tokens) -> - case strip_space(T, 0) of - {[${|_] = Rest, Counter} -> tokenize(Rest, Line + Counter, Scope, [{'%{}', Line}|Tokens]); - {Rest, Counter} -> tokenize(Rest, Line + Counter, Scope, [{'%', Line}|Tokens]) +tokenize([$:, H | T] = Original, Line, Column, Scope, Tokens) when ?is_quote(H) -> + case elixir_interpolation:extract(Line, Column + 2, Scope, true, T, H) of + {NewLine, NewColumn, Parts, Rest} -> + Unescaped = unescape_tokens(Parts), + Key = case Scope#elixir_tokenizer.existing_atoms_only of + true -> atom_safe; + false -> atom_unsafe + end, + tokenize(Rest, NewLine, NewColumn, Scope, [{Key, {Line, Column, NewColumn}, Unescaped} | Tokens]); + {error, Reason} -> + interpolation_error(Reason, Original, Tokens, " (for atom starting at line ~B)", [Line]) end; -tokenize([$.|T], Line, Scope, Tokens) -> - {Rest, Counter} = strip_space(T, 0), - handle_dot([$.|Rest], Line + Counter, Scope, Tokens); +tokenize([$: | String] = Original, Line, Column, Scope, Tokens) -> + case tokenize_identifier(String, Line, Scope) of + {_Kind, Atom, Rest, Length, _Ascii, _Special} -> + tokenize(Rest, Line, Column + 1 + Length, Scope, [{atom, {Line, Column, Column + 1 + Length}, Atom} | Tokens]); + empty -> + unexpected_token(Original, Line, Column, Tokens); + {error, Reason} -> + {error, Reason, Original, Tokens} + end; % Integers and floats -tokenize([H|_] = String, Line, Scope, Tokens) when ?is_digit(H) -> - {Rest, Number} = tokenize_number(String, [], false), - tokenize(Rest, Line, Scope, [{number, Line, Number}|Tokens]); +tokenize([H | T], Line, Column, Scope, Tokens) when ?is_digit(H) -> + case tokenize_number(T, [H], 1, false) of + {error, Reason, Number} -> + {error, {Line, Reason, Number}, T, Tokens}; + {Rest, Number, Length} -> + tokenize(Rest, Line, Column + Length, Scope, [{number, {Line, Column, Column + Length}, Number} | Tokens]) + end; -% Aliases +% Spaces -tokenize([H|_] = Original, Line, Scope, Tokens) when ?is_upcase(H) -> - {Rest, Alias} = tokenize_identifier(Original, []), - case unsafe_to_atom(Alias, Line, Scope) of - {ok, Atom} -> - case Rest of - [$:|T] when ?is_space(hd(T)) -> - tokenize(T, Line, Scope, [{kw_identifier, Line, Atom}|Tokens]); - _ -> - tokenize(Rest, Line, Scope, [{aliases, Line, [Atom]}|Tokens]) - end; - {error, Reason} -> - {error, Reason, Original, Tokens} - end; +tokenize([T | Rest], Line, Column, Scope, Tokens) when ?is_horizontal_space(T) -> + {Remaining, Stripped} = strip_horizontal_space(Rest), + handle_space_sensitive_tokens(Remaining, Line, Column + 1 + Stripped, Scope, Tokens); -% Identifier +% End of line -tokenize([H|_] = String, Line, Scope, Tokens) when ?is_downcase(H); H == $_ -> - case tokenize_any_identifier(String, Line, Scope, Tokens) of - {keyword, Rest, Check, T} -> - handle_terminator(Rest, Line, Scope, Check, T); - {identifier, Rest, Token} -> - tokenize(Rest, Line, Scope, [Token|Tokens]); - {error, _, _, _} = Error -> - Error - end; +tokenize(";" ++ Rest, Line, Column, Scope, []) -> + tokenize(Rest, Line, Column + 1, Scope, [{';', {Line, Column, Column + 1}}]); -% Ambiguous unary/binary operators tokens +tokenize(";" ++ Rest, Line, Column, Scope, [Top | _] = Tokens) when element(1, Top) /= ';' -> + tokenize(Rest, Line, Column + 1, Scope, [{';', {Line, Column, Column + 1}} | Tokens]); -tokenize([Space, Sign, NotMarker|T], Line, Scope, [{Identifier, _, _} = H|Tokens]) when - ?dual_op(Sign), - ?is_horizontal_space(Space), - not(?is_space(NotMarker)), - NotMarker /= $(, NotMarker /= $[, NotMarker /= $<, NotMarker /= ${, %% containers - NotMarker /= $%, NotMarker /= $+, NotMarker /= $-, NotMarker /= $/, NotMarker /= $>, %% operators - Identifier == identifier -> - Rest = [NotMarker|T], - tokenize(Rest, Line, Scope, [{dual_op, Line, list_to_atom([Sign])}, setelement(1, H, op_identifier)|Tokens]); +tokenize("\\" = Original, Line, _Column, _Scope, Tokens) -> + {error, {Line, "invalid escape \\ at end of file", []}, Original, Tokens}; -% Spaces +tokenize("\\\n" = Original, Line, _Column, _Scope, Tokens) -> + {error, {Line, "invalid escape \\ at end of file", []}, Original, Tokens}; + +tokenize("\\\r\n" = Original, Line, _Column, _Scope, Tokens) -> + {error, {Line, "invalid escape \\ at end of file", []}, Original, Tokens}; + +tokenize("\\\n" ++ Rest, Line, _Column, Scope, Tokens) -> + tokenize(Rest, Line + 1, 1, Scope, Tokens); + +tokenize("\\\r\n" ++ Rest, Line, _Column, Scope, Tokens) -> + tokenize(Rest, Line + 1, 1, Scope, Tokens); + +tokenize("\n" ++ Rest, Line, Column, Scope, Tokens) -> + tokenize(Rest, Line + 1, 1, Scope, eol(Line, Column, Tokens)); + +tokenize("\r\n" ++ Rest, Line, Column, Scope, Tokens) -> + tokenize(Rest, Line + 1, 1, Scope, eol(Line, Column, Tokens)); + +% Others + +tokenize([$%, ${ | T], Line, Column, Scope, Tokens) -> + tokenize([${ | T], Line, Column + 1, Scope, [{'%{}', {Line, Column, Column + 1}} | Tokens]); -tokenize([T|Rest], Line, Scope, Tokens) when ?is_horizontal_space(T) -> - tokenize(strip_horizontal_space(Rest), Line, Scope, Tokens); -tokenize(T, Line, _Scope, Tokens) -> - {error, {Line, "invalid token: ", until_eol(T)}, T, Tokens}. +tokenize([$% | T], Line, Column, Scope, Tokens) -> + tokenize(T, Line, Column + 1, Scope, [{'%', {Line, Column, Column + 1}} | Tokens]); + +tokenize([$. | T], Line, Column, Scope, Tokens) -> + {Rest, Counter, Offset} = strip_dot_space(T, 0, Column + 1), + handle_dot([$. | Rest], Line + Counter, Offset - 1, Column, Scope, Tokens); + +% Identifiers + +tokenize(String, Line, Column, Scope, Tokens) -> + case tokenize_identifier(String, Line, Scope) of + {Kind, Atom, Rest, Length, Ascii, Special} -> + HasAt = lists:member($@, Special), + + case Rest of + [$: | T] when ?is_space(hd(T)) -> + tokenize(T, Line, Column + Length + 1, Scope, [{kw_identifier, {Line, Column, Column + Length + 1}, Atom} | Tokens]); + [$: | T] when hd(T) /= $: -> + AtomName = atom_to_list(Atom) ++ [$:], + Reason = {Line, "keyword argument must be followed by space after: ", AtomName}, + {error, Reason, String, Tokens}; + _ when HasAt -> + Reason = {Line, invalid_character_error(Kind, $@), atom_to_list(Atom)}, + {error, Reason, String, Tokens}; + _ when Kind == alias -> + tokenize_alias(Rest, Line, Column, Atom, Length, Ascii, Special, Scope, Tokens); + _ when Kind == identifier -> + tokenize_other(Rest, Line, Column, Atom, Length, Scope, Tokens); + _ -> + unexpected_token(String, Line, Column, Tokens) + end; + empty -> + unexpected_token(String, Line, Column, Tokens); + {error, Reason} -> + {error, Reason, String, Tokens} + end. + +unexpected_token([T | Rest], Line, Column, Tokens) -> + Message = io_lib:format("\"~ts\" (column ~p, codepoint U+~4.16.0B)", [[T], Column, T]), + {error, {Line, "unexpected token: ", Message}, Rest, Tokens}. -strip_horizontal_space([H|T]) when ?is_horizontal_space(H) -> - strip_horizontal_space(T); strip_horizontal_space(T) -> - T. + strip_horizontal_space(T, 0). -strip_space(T, Counter) -> +strip_horizontal_space([H | T], Counter) when ?is_horizontal_space(H) -> + strip_horizontal_space(T, Counter + 1); +strip_horizontal_space(T, Counter) -> + {T, Counter}. + +strip_dot_space(T, Counter, Column) -> case strip_horizontal_space(T) of - "\r\n" ++ Rest -> strip_space(Rest, Counter + 1); - "\n" ++ Rest -> strip_space(Rest, Counter + 1); - Rest -> {Rest, Counter} + {"#" ++ Rest, _} -> strip_dot_space(tokenize_comment(Rest), Counter, 1); + {"\r\n" ++ Rest, _} -> strip_dot_space(Rest, Counter + 1, 1); + {"\n" ++ Rest, _} -> strip_dot_space(Rest, Counter + 1, 1); + {Rest, Length} -> {Rest, Counter, Column + Length} end. -until_eol("\r\n" ++ _) -> []; -until_eol("\n" ++ _) -> []; -until_eol([]) -> []; -until_eol([H|T]) -> [H|until_eol(T)]. - -escape_char(List) -> - << Char/utf8 >> = elixir_interpolation:unescape_chars(list_to_binary(List)), - Char. +handle_char(7) -> {"\\a", "alert"}; +handle_char($\b) -> {"\\b", "backspace"}; +handle_char($\d) -> {"\\d", "delete"}; +handle_char($\e) -> {"\\e", "escape"}; +handle_char($\f) -> {"\\f", "form feed"}; +handle_char($\n) -> {"\\n", "newline"}; +handle_char($\r) -> {"\\r", "carriage return"}; +handle_char($\s) -> {"\\s", "space"}; +handle_char($\t) -> {"\\t", "tab"}; +handle_char($\v) -> {"\\v", "vertical tab"}; +handle_char(_) -> false. %% Handlers -handle_heredocs(T, Line, H, Scope, Tokens) -> - case extract_heredoc_with_interpolation(Line, Scope, true, T, H) of - {ok, NewLine, Parts, Rest} -> - Token = {string_type(H), Line, unescape_tokens(Parts)}, - tokenize(Rest, NewLine, Scope, [Token|Tokens]); +handle_heredocs(T, Line, Column, H, Scope, Tokens) -> + case extract_heredoc_with_interpolation(Line, Column, Scope, true, T, H) of + {ok, NewLine, NewColumn, Parts, Rest} -> + Token = {string_type(H), {Line, Column, NewColumn}, unescape_tokens(Parts)}, + tokenize(Rest, NewLine, NewColumn, Scope, [Token | Tokens]); {error, Reason} -> {error, Reason, [H, H, H] ++ T, Tokens} end. -handle_strings(T, Line, H, Scope, Tokens) -> - case elixir_interpolation:extract(Line, Scope, true, T, H) of +handle_strings(T, Line, Column, H, Scope, Tokens) -> + case elixir_interpolation:extract(Line, Column, Scope, true, T, H) of {error, Reason} -> - interpolation_error(Reason, [H|T], Tokens, " (for string starting at line ~B)", [Line]); - {NewLine, Parts, [$:|Rest]} when ?is_space(hd(Rest)) -> + interpolation_error(Reason, [H | T], Tokens, " (for string starting at line ~B)", [Line]); + {NewLine, NewColumn, Parts, [$: | Rest]} when ?is_space(hd(Rest)) -> Unescaped = unescape_tokens(Parts), Key = case Scope#elixir_tokenizer.existing_atoms_only of true -> kw_identifier_safe; false -> kw_identifier_unsafe end, - tokenize(Rest, NewLine, Scope, [{Key, Line, Unescaped}|Tokens]); - {NewLine, Parts, Rest} -> - Token = {string_type(H), Line, unescape_tokens(Parts)}, - tokenize(Rest, NewLine, Scope, [Token|Tokens]) + tokenize(Rest, NewLine, NewColumn, Scope, [{Key, {Line, Column - 1, NewColumn}, Unescaped} | Tokens]); + {NewLine, NewColumn, Parts, Rest} -> + Token = {string_type(H), {Line, Column - 1, NewColumn}, unescape_tokens(Parts)}, + tokenize(Rest, NewLine, NewColumn, Scope, [Token | Tokens]) end. -handle_unary_op([$:|Rest], Line, _Kind, Op, Scope, Tokens) when ?is_space(hd(Rest)) -> - tokenize(Rest, Line, Scope, [{kw_identifier, Line, Op}|Tokens]); +handle_unary_op([$: | Rest], Line, Column, _Kind, Length, Op, Scope, Tokens) when ?is_space(hd(Rest)) -> + tokenize(Rest, Line, Column + Length + 1, Scope, [{kw_identifier, {Line, Column, Column + Length}, Op} | Tokens]); -handle_unary_op(Rest, Line, Kind, Op, Scope, Tokens) -> +handle_unary_op(Rest, Line, Column, Kind, Length, Op, Scope, Tokens) -> case strip_horizontal_space(Rest) of - [$/|_] -> tokenize(Rest, Line, Scope, [{identifier, Line, Op}|Tokens]); - _ -> tokenize(Rest, Line, Scope, [{Kind, Line, Op}|Tokens]) + {[$/ | _] = Remaining, Extra} -> + tokenize(Remaining, Line, Column + Length + Extra, Scope, + [{identifier, {Line, Column, Column + Length}, Op} | Tokens]); + {Remaining, Extra} -> + tokenize(Remaining, Line, Column + Length + Extra, Scope, + [{Kind, {Line, Column, Column + Length}, Op} | Tokens]) end. -handle_op([$:|Rest], Line, _Kind, Op, Scope, Tokens) when ?is_space(hd(Rest)) -> - tokenize(Rest, Line, Scope, [{kw_identifier, Line, Op}|Tokens]); +handle_op([$: | Rest], Line, Column, _Kind, Length, Op, Scope, Tokens) when ?is_space(hd(Rest)) -> + tokenize(Rest, Line, Column + Length + 1, Scope, + [{kw_identifier, {Line, Column, Column + Length}, Op} | Tokens]); -handle_op(Rest, Line, Kind, Op, Scope, Tokens) -> +handle_op(Rest, Line, Column, Kind, Length, Op, Scope, Tokens) -> case strip_horizontal_space(Rest) of - [$/|_] -> tokenize(Rest, Line, Scope, [{identifier, Line, Op}|Tokens]); - _ -> tokenize(Rest, Line, Scope, add_token_with_nl({Kind, Line, Op}, Tokens)) + {[$/ | _] = Remaining, Extra} -> + tokenize(Remaining, Line, Column + Length + Extra, Scope, + [{identifier, {Line, Column, Column + Length}, Op} | Tokens]); + {Remaining, Extra} -> + tokenize(Remaining, Line, Column + Length + Extra, Scope, + add_token_with_nl({Kind, {Line, Column, Column + Length}, Op}, Tokens)) end. % ## Three Token Operators -handle_dot([$.,T1,T2,T3|Rest], Line, Scope, Tokens) when +handle_dot([$., T1, T2, T3 | Rest], Line, Column, DotColumn, Scope, Tokens) when ?unary_op3(T1, T2, T3); ?comp_op3(T1, T2, T3); ?and_op3(T1, T2, T3); ?or_op3(T1, T2, T3); - ?arrow_op3(T1, T2, T3); ?hat_op3(T1, T2, T3) -> - handle_call_identifier(Rest, Line, list_to_atom([T1, T2, T3]), Scope, Tokens); + ?arrow_op3(T1, T2, T3); ?three_op(T1, T2, T3) -> + handle_call_identifier(Rest, Line, Column + 1, DotColumn, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); % ## Two Token Operators -handle_dot([$.,T1,T2|Rest], Line, Scope, Tokens) when +handle_dot([$., T1, T2 | Rest], Line, Column, DotColumn, Scope, Tokens) when ?comp_op2(T1, T2); ?rel_op2(T1, T2); ?and_op(T1, T2); ?or_op(T1, T2); ?arrow_op(T1, T2); ?in_match_op(T1, T2); ?two_op(T1, T2); ?stab_op(T1, T2); ?type_op(T1, T2) -> - handle_call_identifier(Rest, Line, list_to_atom([T1, T2]), Scope, Tokens); + handle_call_identifier(Rest, Line, Column + 1, DotColumn, 2, list_to_atom([T1, T2]), Scope, Tokens); % ## Single Token Operators -handle_dot([$.,T|Rest], Line, Scope, Tokens) when +handle_dot([$., T | Rest], Line, Column, DotColumn, Scope, Tokens) when ?at_op(T); ?unary_op(T); ?capture_op(T); ?dual_op(T); ?mult_op(T); - ?rel_op(T); ?match_op(T); ?pipe_op(T); T == $% -> - handle_call_identifier(Rest, Line, list_to_atom([T]), Scope, Tokens); + ?rel_op(T); ?match_op(T); ?pipe_op(T) -> + handle_call_identifier(Rest, Line, Column + 1, DotColumn, 1, list_to_atom([T]), Scope, Tokens); % ## Exception for .( as it needs to be treated specially in the parser -handle_dot([$.,$(|Rest], Line, Scope, Tokens) -> - tokenize([$(|Rest], Line, Scope, add_token_with_nl({dot_call_op, Line, '.'}, Tokens)); +handle_dot([$., $( | Rest], Line, Column, DotColumn, Scope, Tokens) -> + tokenize([$( | Rest], Line, Column + 2, Scope, add_token_with_nl({dot_call_op, {Line, DotColumn, DotColumn + 1}, '.'}, Tokens)); -handle_dot([$.,H|T] = Original, Line, Scope, Tokens) when ?is_quote(H) -> - case elixir_interpolation:extract(Line, Scope, true, T, H) of - {NewLine, [Part], Rest} when is_binary(Part) -> +handle_dot([$., H | T] = Original, Line, Column, DotColumn, Scope, Tokens) when ?is_quote(H) -> + case elixir_interpolation:extract(Line, Column + 2, Scope, true, T, H) of + {NewLine, NewColumn, [Part], Rest} when is_binary(Part) -> case unsafe_to_atom(Part, Line, Scope) of {ok, Atom} -> - Token = check_call_identifier(identifier, Line, Atom, Rest), - tokenize(Rest, NewLine, Scope, [Token|add_token_with_nl({'.', Line}, Tokens)]); + Token = check_call_identifier(Line, Column, max(NewColumn - Column, 0), Atom, Rest), + tokenize(Rest, NewLine, NewColumn, Scope, + [Token | add_token_with_nl({'.', {Line, DotColumn, DotColumn + 1}}, Tokens)]); {error, Reason} -> {error, Reason, Original, Tokens} end; @@ -583,18 +610,33 @@ handle_dot([$.,H|T] = Original, Line, Scope, Tokens) when ?is_quote(H) -> interpolation_error(Reason, Original, Tokens, " (for function name starting at line ~B)", [Line]) end; -handle_dot([$.|Rest], Line, Scope, Tokens) -> - tokenize(Rest, Line, Scope, add_token_with_nl({'.', Line}, Tokens)). +handle_dot([$. | Rest], Line, Column, DotColumn, Scope, Tokens) -> + tokenize(Rest, Line, Column + 1, Scope, add_token_with_nl({'.', {Line, DotColumn, DotColumn + 1}}, Tokens)). -handle_call_identifier(Rest, Line, Op, Scope, Tokens) -> - Token = check_call_identifier(identifier, Line, Op, Rest), - tokenize(Rest, Line, Scope, [Token|add_token_with_nl({'.', Line}, Tokens)]). +handle_call_identifier(Rest, Line, Column, DotColumn, Length, Op, Scope, Tokens) -> + {_, {_, _, NewColumn}, _} = Token = check_call_identifier(Line, Column, Length, Op, Rest), + tokenize(Rest, Line, NewColumn, Scope, + [Token | add_token_with_nl({'.', {Line, DotColumn, DotColumn + 1}}, Tokens)]). + +% ## Ambiguous unary/binary operators tokens +handle_space_sensitive_tokens([Sign, NotMarker | T], Line, Column, Scope, [{Identifier, _, _} = H | Tokens]) when + ?dual_op(Sign), + not(?is_space(NotMarker)), + NotMarker /= $(, NotMarker /= $[, NotMarker /= $<, NotMarker /= ${, %% containers + NotMarker /= $%, NotMarker /= $+, NotMarker /= $-, NotMarker /= $/, NotMarker /= $>, %% operators + Identifier == identifier -> + Rest = [NotMarker | T], + tokenize(Rest, Line, Column + 1, Scope, [{dual_op, {Line, Column, Column + 1}, list_to_atom([Sign])}, setelement(1, H, op_identifier) | Tokens]); + +handle_space_sensitive_tokens(String, Line, Column, Scope, Tokens) -> + tokenize(String, Line, Column, Scope, Tokens). %% Helpers -eol(_Line, _Mod, [{',',_}|_] = Tokens) -> Tokens; -eol(_Line, _Mod, [{eol,_,_}|_] = Tokens) -> Tokens; -eol(Line, Mod, Tokens) -> [{eol,Line,Mod}|Tokens]. +eol(_Line, _Column, [{';', _} | _] = Tokens) -> Tokens; +eol(_Line, _Column, [{',', _} | _] = Tokens) -> Tokens; +eol(_Line, _Column, [{eol, _} | _] = Tokens) -> Tokens; +eol(Line, Column, Tokens) -> [{eol, {Line, Column, Column + 1}} | Tokens]. unsafe_to_atom(Part, Line, #elixir_tokenizer{}) when is_binary(Part) andalso size(Part) > 255; @@ -609,58 +651,65 @@ unsafe_to_atom(List, _Line, #elixir_tokenizer{existing_atoms_only=true}) when is unsafe_to_atom(List, _Line, #elixir_tokenizer{}) when is_list(List) -> {ok, list_to_atom(List)}. -collect_modifiers([H|T], Buffer) when ?is_downcase(H) -> - collect_modifiers(T, [H|Buffer]); +collect_modifiers([H | T], Buffer) when ?is_downcase(H) or ?is_upcase(H) -> + collect_modifiers(T, [H | Buffer]); collect_modifiers(Rest, Buffer) -> {Rest, lists:reverse(Buffer)}. %% Heredocs -extract_heredoc_with_interpolation(Line, Scope, Interpol, T, H) -> - case extract_heredoc(Line, T, H) of - {ok, NewLine, Body, Rest} -> - case elixir_interpolation:extract(Line + 1, Scope, Interpol, Body, 0) of +extract_heredoc_with_interpolation(Line, Column, Scope, Interpol, T, H) -> + case extract_heredoc(Line, Column, T, H) of + {ok, NewLine, NewColumn, Body, Rest} -> + case elixir_interpolation:extract(Line + 1, 1, Scope, Interpol, Body, 0) of {error, Reason} -> {error, interpolation_format(Reason, " (for heredoc starting at line ~B)", [Line])}; - {_, Parts, []} -> - {ok, NewLine, Parts, Rest} + {_, _, Parts, []} -> + {ok, NewLine, NewColumn, Parts, Rest} end; {error, _} = Error -> Error end. -extract_heredoc(Line0, Rest0, Marker) -> +extract_heredoc(Line0, Column0, Rest0, Marker) -> case extract_heredoc_header(Rest0) of {ok, Rest1} -> %% We prepend a new line so we can transparently remove - %% spaces later. This new line is removed by calling `tl` + %% spaces later. This new line is removed by calling "tl" %% in the final heredoc body three lines below. - case extract_heredoc_body(Line0, Marker, [$\n|Rest1], []) of + case extract_heredoc_body(Line0, Column0, Marker, [$\n | Rest1], []) of {ok, Line1, Body, Rest2, Spaces} -> - {ok, Line1, tl(remove_heredoc_spaces(Body, Spaces)), Rest2}; - {error, ErrorLine} -> + {ok, Line1, 1, tl(remove_heredoc_spaces(Body, Spaces)), Rest2}; + {error, Reason, ErrorLine} -> Terminator = [Marker, Marker, Marker], - Message = "missing terminator: ~ts (for heredoc starting at line ~B)", - {error, {ErrorLine, io_lib:format(Message, [Terminator, Line0]), []}} + {Message, Token} = heredoc_error_message(Reason, Line0, Terminator), + {error, {ErrorLine, Message, Token}} end; error -> Message = "heredoc start must be followed by a new line after ", {error, {Line0, io_lib:format(Message, []), [Marker, Marker, Marker]}} end. +heredoc_error_message(eof, Line, Terminator) -> + {io_lib:format("missing terminator: ~ts (for heredoc starting at line ~B)", + [Terminator, Line]), + []}; +heredoc_error_message(misplacedterminator, _Line, Terminator) -> + {"invalid location for heredoc terminator, please escape token or move it to its own line: ", + Terminator}. %% Remove spaces from heredoc based on the position of the final quotes. remove_heredoc_spaces(Body, 0) -> - lists:reverse([0|Body]); + lists:reverse([0 | Body]); remove_heredoc_spaces(Body, Spaces) -> - remove_heredoc_spaces([0|Body], [], Spaces, Spaces). -remove_heredoc_spaces([H,$\n|T], [Backtrack|Buffer], Spaces, Original) when Spaces > 0, ?is_horizontal_space(H) -> - remove_heredoc_spaces([Backtrack,$\n|T], Buffer, Spaces - 1, Original); -remove_heredoc_spaces([$\n=H|T], Buffer, _Spaces, Original) -> - remove_heredoc_spaces(T, [H|Buffer], Original, Original); -remove_heredoc_spaces([H|T], Buffer, Spaces, Original) -> - remove_heredoc_spaces(T, [H|Buffer], Spaces, Original); + remove_heredoc_spaces([0 | Body], [], Spaces, Spaces). +remove_heredoc_spaces([H, $\n | T], [Backtrack | Buffer], Spaces, Original) when Spaces > 0, ?is_horizontal_space(H) -> + remove_heredoc_spaces([Backtrack, $\n | T], Buffer, Spaces - 1, Original); +remove_heredoc_spaces([$\n=H | T], Buffer, _Spaces, Original) -> + remove_heredoc_spaces(T, [H | Buffer], Original, Original); +remove_heredoc_spaces([H | T], Buffer, Spaces, Original) -> + remove_heredoc_spaces(T, [H | Buffer], Spaces, Original); remove_heredoc_spaces([], Buffer, _Spaces, _Original) -> Buffer. @@ -670,7 +719,7 @@ extract_heredoc_header("\r\n" ++ Rest) -> {ok, Rest}; extract_heredoc_header("\n" ++ Rest) -> {ok, Rest}; -extract_heredoc_header([H|T]) when ?is_horizontal_space(H) -> +extract_heredoc_header([H | T]) when ?is_horizontal_space(H) -> extract_heredoc_header(T); extract_heredoc_header(_) -> error. @@ -679,150 +728,207 @@ extract_heredoc_header(_) -> %% the remaining of the document and the number of spaces the heredoc %% is aligned. -extract_heredoc_body(Line, Marker, Rest, Buffer) -> +extract_heredoc_body(Line, _Column, Marker, Rest, Buffer) -> case extract_heredoc_line(Marker, Rest, Buffer, 0) of {ok, NewBuffer, NewRest} -> - extract_heredoc_body(Line + 1, Marker, NewRest, NewBuffer); + extract_heredoc_body(Line + 1, 1, Marker, NewRest, NewBuffer); {ok, NewBuffer, NewRest, Spaces} -> {ok, Line, NewBuffer, NewRest, Spaces}; - {error, eof} -> - {error, Line} + {error, Reason} -> + {error, Reason, Line} end. %% Extract a line from the heredoc prepending its contents to a buffer. - -extract_heredoc_line("\r\n" ++ Rest, Buffer) -> - {ok, [$\n|Buffer], Rest}; -extract_heredoc_line("\n" ++ Rest, Buffer) -> - {ok, [$\n|Buffer], Rest}; -extract_heredoc_line([H|T], Buffer) -> - extract_heredoc_line(T, [H|Buffer]); -extract_heredoc_line(_, _) -> +%% Allow lazy escaping (e.g. \""") + +extract_heredoc_line(Marker, [$\\, $\\ | T], Buffer) -> + extract_heredoc_line(Marker, T, [$\\, $\\ | Buffer]); +extract_heredoc_line(Marker, [$\\, Marker | T], Buffer) -> + extract_heredoc_line(Marker, T, [Marker, $\\ | Buffer]); +extract_heredoc_line(Marker, [Marker, Marker, Marker | _], _) -> + {error, misplacedterminator}; +extract_heredoc_line(_, "\r\n" ++ Rest, Buffer) -> + {ok, [$\n | Buffer], Rest}; +extract_heredoc_line(_, "\n" ++ Rest, Buffer) -> + {ok, [$\n | Buffer], Rest}; +extract_heredoc_line(Marker, [H | T], Buffer) -> + extract_heredoc_line(Marker, T, [H | Buffer]); +extract_heredoc_line(_, _, _) -> {error, eof}. %% Extract each heredoc line trying to find a match according to the marker. -extract_heredoc_line(Marker, [H|T], Buffer, Counter) when ?is_horizontal_space(H) -> - extract_heredoc_line(Marker, T, [H|Buffer], Counter + 1); -extract_heredoc_line(Marker, [Marker,Marker,Marker|T], Buffer, Counter) -> +extract_heredoc_line(Marker, [H | T], Buffer, Counter) when ?is_horizontal_space(H) -> + extract_heredoc_line(Marker, T, [H | Buffer], Counter + 1); +extract_heredoc_line(Marker, [Marker, Marker, Marker | T], Buffer, Counter) -> {ok, Buffer, T, Counter}; -extract_heredoc_line(_Marker, Rest, Buffer, _Counter) -> - extract_heredoc_line(Rest, Buffer). +extract_heredoc_line(Marker, Rest, Buffer, _Counter) -> + extract_heredoc_line(Marker, Rest, Buffer). %% Integers and floats %% At this point, we are at least sure the first digit is a number. %% Check if we have a point followed by a number; -tokenize_number([$.,H|T], Acc, false) when ?is_digit(H) -> - tokenize_number(T, [H,$.|Acc], true); +tokenize_number([$., H | T], Acc, Length, false) when ?is_digit(H) -> + tokenize_number(T, [H, $. | Acc], Length + 2, true); %% Check if we have an underscore followed by a number; -tokenize_number([$_,H|T], Acc, Bool) when ?is_digit(H) -> - tokenize_number(T, [H|Acc], Bool); +tokenize_number([$_, H | T], Acc, Length, Bool) when ?is_digit(H) -> + tokenize_number(T, [H | Acc], Length + 2, Bool); %% Check if we have e- followed by numbers (valid only for floats); -tokenize_number([E,S,H|T], Acc, true) +tokenize_number([E, S, H | T], Acc, Length, true) when (E == $E) or (E == $e), ?is_digit(H), S == $+ orelse S == $- -> - tokenize_number(T, [H,S,$e|Acc], true); + tokenize_number(T, [H, S, $e | Acc], Length + 3, true); %% Check if we have e followed by numbers (valid only for floats); -tokenize_number([E,H|T], Acc, true) +tokenize_number([E, H | T], Acc, Length, true) when (E == $E) or (E == $e), ?is_digit(H) -> - tokenize_number(T, [H,$e|Acc], true); + tokenize_number(T, [H, $e | Acc], Length + 2, true); %% Finally just numbers. -tokenize_number([H|T], Acc, Bool) when ?is_digit(H) -> - tokenize_number(T, [H|Acc], Bool); +tokenize_number([H | T], Acc, Length, Bool) when ?is_digit(H) -> + tokenize_number(T, [H | Acc], Length + 1, Bool); %% Cast to float... -tokenize_number(Rest, Acc, true) -> - {Rest, list_to_float(lists:reverse(Acc))}; +tokenize_number(Rest, Acc, Length, true) -> + try + {Rest, list_to_float(lists:reverse(Acc)), Length} + catch + error:badarg -> {error, "invalid float number ", lists:reverse(Acc)} + end; %% Or integer. -tokenize_number(Rest, Acc, false) -> - {Rest, list_to_integer(lists:reverse(Acc))}. - -tokenize_hex([H|T], Acc) when ?is_hex(H) -> tokenize_hex(T, [H|Acc]); -tokenize_hex(Rest, Acc) -> {Rest, list_to_integer(lists:reverse(Acc), 16)}. - -tokenize_octal([H|T], Acc) when ?is_octal(H) -> tokenize_octal(T, [H|Acc]); -tokenize_octal(Rest, Acc) -> {Rest, list_to_integer(lists:reverse(Acc), 8)}. - -tokenize_bin([H|T], Acc) when ?is_bin(H) -> tokenize_bin(T, [H|Acc]); -tokenize_bin(Rest, Acc) -> {Rest, list_to_integer(lists:reverse(Acc), 2)}. +tokenize_number(Rest, Acc, Length, false) -> + {Rest, list_to_integer(lists:reverse(Acc)), Length}. + +tokenize_hex([H | T], Acc, Length) when ?is_hex(H) -> + tokenize_hex(T, [H | Acc], Length + 1); +tokenize_hex([$_, H | T], Acc, Length) when ?is_hex(H) -> + tokenize_hex(T, [H | Acc], Length + 2); +tokenize_hex(Rest, Acc, Length) -> + {Rest, list_to_integer(lists:reverse(Acc), 16), Length}. + +tokenize_octal([H | T], Acc, Length) when ?is_octal(H) -> + tokenize_octal(T, [H | Acc], Length + 1); +tokenize_octal([$_, H | T], Acc, Length) when ?is_octal(H) -> + tokenize_octal(T, [H | Acc], Length + 2); +tokenize_octal(Rest, Acc, Length) -> + {Rest, list_to_integer(lists:reverse(Acc), 8), Length}. + +tokenize_bin([H | T], Acc, Length) when ?is_bin(H) -> + tokenize_bin(T, [H | Acc], Length + 1); +tokenize_bin([$_, H | T], Acc, Length) when ?is_bin(H) -> + tokenize_bin(T, [H | Acc], Length + 2); +tokenize_bin(Rest, Acc, Length) -> + {Rest, list_to_integer(lists:reverse(Acc), 2), Length}. %% Comments tokenize_comment("\r\n" ++ _ = Rest) -> Rest; tokenize_comment("\n" ++ _ = Rest) -> Rest; -tokenize_comment([_|Rest]) -> tokenize_comment(Rest); +tokenize_comment([_ | Rest]) -> tokenize_comment(Rest); tokenize_comment([]) -> []. -%% Atoms -%% Handle atoms specially since they support @ - -tokenize_atom([H|T], Acc) when ?is_atom(H) -> - tokenize_atom(T, [H|Acc]); - -tokenize_atom([H|T], Acc) when H == $?; H == $! -> - {T, lists:reverse([H|Acc])}; - -tokenize_atom(Rest, Acc) -> - {Rest, lists:reverse(Acc)}. - %% Identifiers -%% At this point, the validity of the first character was already verified. - -tokenize_identifier([H|T], Acc) when ?is_identifier(H) -> - tokenize_identifier(T, [H|Acc]); -tokenize_identifier(Rest, Acc) -> - {Rest, lists:reverse(Acc)}. - -%% Tokenize any identifier, handling kv, punctuated, paren, bracket and do identifiers. - -tokenize_any_identifier(Original, Line, Scope, Tokens) -> - {Rest, Identifier} = tokenize_identifier(Original, []), - - {AllIdentifier, AllRest} = - case Rest of - [H|T] when H == $?; H == $! -> {Identifier ++ [H], T}; - _ -> {Identifier, Rest} - end, - - case unsafe_to_atom(AllIdentifier, Line, Scope) of - {ok, Atom} -> - tokenize_kw_or_other(AllRest, identifier, Line, Atom, Tokens); - {error, Reason} -> - {error, Reason, Original, Tokens} +tokenize([H | T]) when ?is_upcase(H) -> + {Acc, Rest, Length, Special} = tokenize_continue(T, [H], 1, []), + {alias, lists:reverse(Acc), Rest, Length, true, Special}; +tokenize([H | T]) when ?is_downcase(H); H == $_ -> + {Acc, Rest, Length, Special} = tokenize_continue(T, [H], 1, []), + {identifier, lists:reverse(Acc), Rest, Length, true, Special}; +tokenize(_List) -> + {error, empty}. + +tokenize_continue([$@ | T], Acc, Length, Special) -> + tokenize_continue(T, [$@ | Acc], Length + 1, [$@ | lists:delete($@, Special)]); +tokenize_continue([$! | T], Acc, Length, Special) -> + {[$! | Acc], T, Length + 1, [$! | Special]}; +tokenize_continue([$? | T], Acc, Length, Special) -> + {[$? | Acc], T, Length + 1, [$? | Special]}; +tokenize_continue([H | T], Acc, Length, Special) when ?is_upcase(H); ?is_downcase(H); ?is_digit(H); H == $_ -> + tokenize_continue(T, [H | Acc], Length + 1, Special); +tokenize_continue(Rest, Acc, Length, Special) -> + {Acc, Rest, Length, Special}. + +tokenize_identifier(String, Line, Scope) -> + case (Scope#elixir_tokenizer.identifier_tokenizer):tokenize(String) of + {Kind, Acc, Rest, Length, Ascii, Special} -> + case unsafe_to_atom(Acc, Line, Scope) of + {ok, Atom} -> + {Kind, Atom, Rest, Length, Ascii, Special}; + {error, _Reason} = Error -> + Error + end; + {error, {not_nfc, Wrong}} -> + Right = unicode:characters_to_nfc_list(Wrong), + RightCodepoints = list_to_codepoint_hex(Right), + WrongCodepoints = list_to_codepoint_hex(Wrong), + Message = io_lib:format("Elixir expects unquoted Unicode atoms and variables to be in NFC form.\n\n" + "Got:\n\n \"~ts\" (codepoints~ts)\n\n" + "Expected:\n\n \"~ts\" (codepoints~ts)\n\n" + "Syntax error before: ", + [Wrong, WrongCodepoints, Right, RightCodepoints]), + {error, {Line, Message, Wrong}}; + {error, empty} -> + empty end. -tokenize_kw_or_other([$:,H|T], _Kind, Line, Atom, _Tokens) when ?is_space(H) -> - {identifier, [H|T], {kw_identifier, Line, Atom}}; +list_to_codepoint_hex(List) -> + [io_lib:format(" ~4.16.0B", [Codepoint]) || Codepoint <- List]. + +tokenize_alias(Rest, Line, Column, Atom, Length, Ascii, Special, Scope, Tokens) -> + if + not Ascii -> + AtomName = atom_to_list(Atom), + Invalid = hd([C || C <- AtomName, C > 127]), + Reason = {Line, invalid_character_error("alias (only ascii characters are allowed)", Invalid), AtomName}, + {error, Reason, AtomName ++ Rest, Tokens}; + Special /= [] -> + AtomName = atom_to_list(Atom), + Reason = {Line, invalid_character_error("alias", hd(Special)), AtomName}, + {error, Reason, AtomName ++ Rest, Tokens}; + true -> + tokenize(Rest, Line, Column + Length, Scope, [{aliases, {Line, Column, Column + Length}, [Atom]} | Tokens]) + end. -tokenize_kw_or_other([$:,H|T], _Kind, Line, Atom, Tokens) when ?is_atom_start(H); ?is_digit(H) -> - Original = atom_to_list(Atom) ++ [$:], - Reason = {Line, "keyword argument must be followed by space after: ", Original}, - {error, Reason, Original ++ [H|T], Tokens}; +tokenize_other(Rest, Line, Column, Atom, Length, Scope, Tokens) -> + case tokenize_keyword_or_identifier(Rest, Line, Column, Length, Atom, Tokens) of + {keyword, Rest, {_, {_, _, EndColumn}} = Check, T} -> + handle_terminator(Rest, Line, EndColumn, Scope, Check, T); + {keyword, Rest, {_, {_, _, EndColumn}, _} = Check, T} -> + handle_terminator(Rest, Line, EndColumn, Scope, Check, T); + {identifier, Rest, {_, {_, _, EndColumn}, _} = Token} -> + tokenize(Rest, Line, EndColumn, Scope, [Token | Tokens]); + {error, _, _, _} = Error -> + Error + end. -tokenize_kw_or_other(Rest, Kind, Line, Atom, Tokens) -> - case check_keyword(Line, Atom, Tokens) of +tokenize_keyword_or_identifier(Rest, Line, Column, Length, Atom, Tokens) -> + case check_keyword(Line, Column, Length, Atom, Tokens, Rest) of nomatch -> - {identifier, Rest, check_call_identifier(Kind, Line, Atom, Rest)}; - {ok, [Check|T]} -> + {identifier, Rest, check_call_identifier(Line, Column, Length, Atom, Rest)}; + {ok, [{in_op, {_, _, InEndColumn}, in} | [{unary_op, {NotLine, NotColumn, _}, 'not'} | T]]} -> + {keyword, Rest, {in_op, {NotLine, NotColumn, InEndColumn}, 'not in'}, T}; + {ok, [Check | T]} -> {keyword, Rest, Check, T}; - {error, Token} -> - {error, {Line, "syntax error before: ", Token}, atom_to_list(Atom) ++ Rest, Tokens} + {error, Message, Token} -> + {error, {Line, Message, Token}, atom_to_list(Atom) ++ Rest, Tokens} end. %% Check if it is a call identifier (paren | bracket | do) -check_call_identifier(_Kind, Line, Atom, [$(|_]) -> {paren_identifier, Line, Atom}; -check_call_identifier(_Kind, Line, Atom, [$[|_]) -> {bracket_identifier, Line, Atom}; -check_call_identifier(Kind, Line, Atom, _Rest) -> {Kind, Line, Atom}. +check_call_identifier(Line, Column, Length, Atom, [$( | _]) -> + {paren_identifier, {Line, Column, Column + Length}, Atom}; +check_call_identifier(Line, Column, Length, Atom, [$[ | _]) -> + {bracket_identifier, {Line, Column, Column + Length}, Atom}; +check_call_identifier(Line, Column, Length, Atom, _Rest) -> + {identifier, {Line, Column, Column + Length}, Atom}. -add_token_with_nl(Left, [{eol,_,newline}|T]) -> [Left|T]; -add_token_with_nl(Left, T) -> [Left|T]. +add_token_with_nl({unary_op, _, _} = Left, T) -> [Left | T]; +add_token_with_nl(Left, [{eol, _} | T]) -> [Left | T]; +add_token_with_nl(Left, T) -> [Left | T]. %% Error handling @@ -830,18 +936,18 @@ interpolation_error(Reason, Rest, Tokens, Extension, Args) -> {error, interpolation_format(Reason, Extension, Args), Rest, Tokens}. interpolation_format({string, Line, Message, Token}, Extension, Args) -> - {Line, io_lib:format("~ts" ++ Extension, [Message|Args]), Token}; + {Line, io_lib:format("~ts" ++ Extension, [Message | Args]), Token}; interpolation_format({_, _, _} = Reason, _Extension, _Args) -> Reason. %% Terminators -handle_terminator(Rest, Line, Scope, Token, Tokens) -> +handle_terminator(Rest, Line, Column, Scope, Token, Tokens) -> case handle_terminator(Token, Scope) of {error, Reason} -> {error, Reason, atom_to_list(element(1, Token)) ++ Rest, Tokens}; New -> - tokenize(Rest, Line, New, [Token|Tokens]) + tokenize(Rest, Line, Column, New, [Token | Tokens]) end. handle_terminator(_, #elixir_tokenizer{check_terminators=false} = Scope) -> @@ -852,18 +958,16 @@ handle_terminator(Token, #elixir_tokenizer{terminators=Terminators} = Scope) -> New -> Scope#elixir_tokenizer{terminators=New} end. -check_terminator({S, Line}, Terminators) when S == 'fn' -> - [{fn, Line}|Terminators]; - check_terminator({S, _} = New, Terminators) when + S == 'fn'; S == 'do'; S == '('; S == '['; S == '{'; S == '<<' -> - [New|Terminators]; + [New | Terminators]; -check_terminator({E, _}, [{S, _}|Terminators]) when +check_terminator({E, _}, [{S, _} | Terminators]) when S == 'do', E == 'end'; S == 'fn', E == 'end'; S == '(', E == ')'; @@ -872,12 +976,13 @@ check_terminator({E, _}, [{S, _}|Terminators]) when S == '<<', E == '>>' -> Terminators; -check_terminator({E, Line}, [{Start, StartLine}|_]) when +check_terminator({E, {Line, _, _}}, [{Start, {StartLine, _, _}} | _]) when E == 'end'; E == ')'; E == ']'; E == '}'; E == '>>' -> End = terminator(Start), - Message = io_lib:format("\"~ts\" starting at line ~B is missing terminator \"~ts\". " - "Unexpected token: ", [Start, StartLine, End]), - {error, {Line, Message, atom_to_list(E)}}; + MessagePrefix = io_lib:format("\"~ts\" is missing terminator \"~ts\". unexpected token: \"", + [Start, End]), + MessageSuffix = io_lib:format("\" at line ~B", [Line]), + {error, {StartLine, {MessagePrefix, MessageSuffix}, [atom_to_list(E)]}}; check_terminator({E, Line}, []) when E == 'end'; E == ')'; E == ']'; E == '}'; E == '>>' -> @@ -904,27 +1009,42 @@ terminator('<<') -> '>>'. %% Keywords checking -check_keyword(_Line, _Atom, [{'.', _}|_]) -> +check_keyword(_Line, _Column, _Length, _Atom, [{'.', _} | _], _Rest) -> nomatch; -check_keyword(DoLine, do, [{Identifier, Line, Atom}|T]) when Identifier == identifier -> - {ok, add_token_with_nl({do, DoLine}, [{do_identifier, Line, Atom}|T])}; -check_keyword(Line, do, Tokens) -> +check_keyword(DoLine, DoColumn, _Length, do, + [{Identifier, {Line, Column, EndColumn}, Atom} | T], _Rest) when Identifier == identifier -> + {ok, add_token_with_nl({do, {DoLine, DoColumn, DoColumn + 2}}, + [{do_identifier, {Line, Column, EndColumn}, Atom} | T])}; +check_keyword(_Line, _Column, _Length, do, [{'fn', _} | _], _Rest) -> + {error, do_with_fn_error("unexpected token \"do\""), "do"}; +check_keyword(Line, Column, _Length, do, Tokens, _Rest) -> case do_keyword_valid(Tokens) of - true -> {ok, add_token_with_nl({do, Line}, Tokens)}; - false -> {error, "do"} + true -> {ok, add_token_with_nl({do, {Line, Column, Column + 2}}, Tokens)}; + false -> {error, invalid_do_error("unexpected token \"do\""), "do"} end; -check_keyword(Line, Atom, Tokens) -> +check_keyword(Line, Column, Length, Atom, Tokens, Rest) -> case keyword(Atom) of - false -> nomatch; - token -> {ok, [{Atom, Line}|Tokens]}; - block -> {ok, [{block_identifier, Line, Atom}|Tokens]}; - unary_op -> {ok, [{unary_op, Line, Atom}|Tokens]}; - Kind -> {ok, add_token_with_nl({Kind, Line, Atom}, Tokens)} + false -> + nomatch; + token -> + {ok, [{Atom, {Line, Column, Column + Length}} | Tokens]}; + block -> + {ok, [{block_identifier, {Line, Column, Column + Length}, Atom} | Tokens]}; + Kind -> + case strip_horizontal_space(Rest) of + {[$/ | _], _} -> + {ok, [{identifier, {Line, Column, Column + Length}, Atom} | Tokens]}; + _ -> + {ok, add_token_with_nl({Kind, {Line, Column, Column + Length}, Atom}, Tokens)} + end end. -%% do is only valid after the end, true, false and nil keywords -do_keyword_valid([{Atom, _}|_]) -> +%% Fail early on invalid do syntax. For example, after +%% most keywords, after comma and so on. +do_keyword_valid([{Atom, _} | _]) -> case Atom of + ',' -> false; + ';' -> false; 'end' -> true; nil -> true; true -> true; @@ -945,7 +1065,6 @@ keyword('nil') -> token; keyword('not') -> unary_op; keyword('and') -> and_op; keyword('or') -> or_op; -keyword('xor') -> or_op; keyword('when') -> when_op; keyword('in') -> in_op; @@ -956,3 +1075,25 @@ keyword('rescue') -> block; keyword('catch') -> block; keyword(_) -> false. + +invalid_character_error(What, Char) -> + io_lib:format("invalid character \"~ts\" (codepoint U+~4.16.0B) in ~ts: ", [[Char], Char, What]). + +invalid_do_error(Prefix) -> + Prefix ++ ". In case you wanted to write a \"do\" expression, " + "you must either separate the keyword argument with comma or use do-blocks. " + "For example, the following construct:\n\n" + " if some_condition? do\n" + " :this\n" + " else\n" + " :that\n" + " end\n\n" + "is syntactic sugar for the Elixir construct:\n\n" + " if(some_condition?, do: :this, else: :that)\n\n" + "where \"some_condition?\" is the first argument and the second argument is a keyword list.\n\n" + "Syntax error before: ". + +do_with_fn_error(Prefix) -> + Prefix ++ ". Anonymous functions are written as:\n\n" + " fn pattern -> expression end\n\n" + "Syntax error before: ". diff --git a/lib/elixir/src/elixir_translator.erl b/lib/elixir/src/elixir_translator.erl deleted file mode 100644 index bc0b1389ee9..00000000000 --- a/lib/elixir/src/elixir_translator.erl +++ /dev/null @@ -1,425 +0,0 @@ -%% Translate Elixir quoted expressions to Erlang Abstract Format. -%% Expects the tree to be expanded. --module(elixir_translator). --export([translate/2, translate_arg/3, translate_args/2, translate_block/3]). --import(elixir_scope, [mergev/2, mergec/2]). --import(elixir_errors, [compile_error/3, compile_error/4]). --include("elixir.hrl"). - -%% = - -translate({'=', Meta, [Left, Right]}, S) -> - Return = case Left of - {'_', _, Atom} when is_atom(Atom) -> false; - _ -> true - end, - - {TRight, SR} = translate_block(Right, Return, S), - {TLeft, SL} = elixir_clauses:match(fun translate/2, Left, SR), - {{match, ?line(Meta), TLeft, TRight}, SL}; - -%% Containers - -translate({'{}', Meta, Args}, S) when is_list(Args) -> - {TArgs, SE} = translate_args(Args, S), - {{tuple, ?line(Meta), TArgs}, SE}; - -translate({'%{}', Meta, Args}, S) when is_list(Args) -> - elixir_map:translate_map(Meta, Args, S); - -translate({'%', Meta, [Left, Right]}, S) -> - elixir_map:translate_struct(Meta, Left, Right, S); - -translate({'<<>>', Meta, Args}, S) when is_list(Args) -> - elixir_bitstring:translate(Meta, Args, S); - -%% Blocks - -translate({'__block__', Meta, Args}, #elixir_scope{return=Return} = S) when is_list(Args) -> - {TArgs, SA} = translate_block(Args, [], Return, S#elixir_scope{return=true}), - {{block, ?line(Meta), TArgs}, SA}; - -%% Erlang op - -translate({'__op__', Meta, [Op, Expr]}, S) when is_atom(Op) -> - {TExpr, NS} = translate(Expr, S), - {{op, ?line(Meta), Op, TExpr}, NS}; - -translate({'__op__', Meta, [Op, Left, Right]}, S) when is_atom(Op) -> - {[TLeft, TRight], NS} = translate_args([Left, Right], S), - {{op, ?line(Meta), Op, TLeft, TRight}, NS}; - -%% Lexical - -translate({Lexical, _, [_, _]}, S) when Lexical == import; Lexical == alias; Lexical == require -> - {{atom, 0, nil}, S}; - -%% Pseudo variables - -translate({'__CALLER__', Meta, Atom}, S) when is_atom(Atom) -> - {{var, ?line(Meta), '__CALLER__'}, S#elixir_scope{caller=true}}; - -%% Functions - -translate({'&', Meta, [{'/', [], [{Fun, [], Atom}, Arity]}]}, S) - when is_atom(Fun), is_atom(Atom), is_integer(Arity) -> - {{'fun', ?line(Meta), {function, Fun, Arity}}, S}; -translate({'&', Meta, [Arg]}, S) when is_integer(Arg) -> - compile_error(Meta, S#elixir_scope.file, "unhandled &~B outside of a capture", [Arg]); - -translate({fn, Meta, Clauses}, S) -> - elixir_fn:translate(Meta, Clauses, S); - -%% Cond - -translate({'cond', _Meta, [[{do, Pairs}]]}, S) -> - [{'->', Meta, [[Condition], Body]}|T] = lists:reverse(Pairs), - Case = - case Condition of - {'_', _, Atom} when is_atom(Atom) -> - compile_error(Meta, S#elixir_scope.file, "unbound variable _ inside cond. " - "If you want the last clause to always match, you probably meant to use: true ->"); - X when is_atom(X) and (X /= false) and (X /= nil) -> - build_cond_clauses(T, Body, Meta); - _ -> - {Truthy, Other} = build_truthy_clause(Meta, Condition, Body), - Error = {{'.', Meta, [erlang, error]}, [], [cond_clause]}, - Falsy = {'->', Meta, [[Other], Error]}, - Acc = {'case', Meta, [Condition, [{do, [Truthy, Falsy]}]]}, - build_cond_clauses(T, Acc, Meta) - end, - translate(Case, S); - -%% Case - -translate({'case', Meta, [Expr, KV]}, #elixir_scope{return=Return} = RS) -> - S = RS#elixir_scope{return=true}, - Clauses = elixir_clauses:get_pairs(do, KV, match), - {TExpr, NS} = translate(Expr, S), - {TClauses, TS} = elixir_clauses:clauses(Meta, Clauses, Return, NS), - {{'case', ?line(Meta), TExpr, TClauses}, TS}; - -%% Try - -translate({'try', Meta, [Clauses]}, #elixir_scope{return=Return} = RS) -> - S = RS#elixir_scope{noname=true, return=true}, - Do = proplists:get_value('do', Clauses, nil), - {TDo, SB} = elixir_translator:translate(Do, S), - - Catch = [Tuple || {X, _} = Tuple <- Clauses, X == 'rescue' orelse X == 'catch'], - {TCatch, SC} = elixir_try:clauses(Meta, Catch, Return, mergec(S, SB)), - - case lists:keyfind('after', 1, Clauses) of - {'after', After} -> - {TBlock, SA} = translate(After, mergec(S, SC)), - TAfter = unblock(TBlock); - false -> - {TAfter, SA} = {[], mergec(S, SC)} - end, - - Else = elixir_clauses:get_pairs(else, Clauses, match), - {TElse, SE} = elixir_clauses:clauses(Meta, Else, Return, mergec(S, SA)), - - SF = (mergec(S, SE))#elixir_scope{noname=RS#elixir_scope.noname}, - {{'try', ?line(Meta), unblock(TDo), TElse, TCatch, TAfter}, SF}; - -%% Receive - -translate({'receive', Meta, [KV]}, #elixir_scope{return=Return} = RS) -> - S = RS#elixir_scope{return=true}, - Do = elixir_clauses:get_pairs(do, KV, match, true), - - case lists:keyfind('after', 1, KV) of - false -> - {TClauses, SC} = elixir_clauses:clauses(Meta, Do, Return, S), - {{'receive', ?line(Meta), TClauses}, SC}; - _ -> - After = elixir_clauses:get_pairs('after', KV, expr), - {TClauses, SC} = elixir_clauses:clauses(Meta, Do ++ After, Return, S), - {FClauses, TAfter} = elixir_utils:split_last(TClauses), - {_, _, [FExpr], _, FAfter} = TAfter, - {{'receive', ?line(Meta), FClauses, FExpr, FAfter}, SC} - end; - -%% Comprehensions - -translate({for, Meta, [_|_] = Args}, S) -> - elixir_for:translate(Meta, Args, S); - -%% Super - -translate({super, Meta, Args}, S) when is_list(Args) -> - Module = assert_module_scope(Meta, super, S), - Function = assert_function_scope(Meta, super, S), - elixir_def_overridable:ensure_defined(Meta, Module, Function, S), - - {_, Arity} = Function, - - {TArgs, TS} = if - length(Args) == Arity -> - translate_args(Args, S); - true -> - compile_error(Meta, S#elixir_scope.file, "super must be called with the same number of " - "arguments as the current function") - end, - - Super = elixir_def_overridable:name(Module, Function), - {{call, ?line(Meta), {atom, ?line(Meta), Super}, TArgs}, TS#elixir_scope{super=true}}; - -%% Variables - -translate({'^', Meta, [{Name, VarMeta, Kind}]}, #elixir_scope{context=match} = S) when is_atom(Name), is_atom(Kind) -> - Tuple = {Name, var_kind(VarMeta, Kind)}, - case orddict:find(Tuple, S#elixir_scope.backup_vars) of - {ok, {Value, _Counter}} -> - {{var, ?line(Meta), Value}, S}; - error -> - compile_error(Meta, S#elixir_scope.file, "unbound variable ^~ts", [Name]) - end; - -translate({'_', Meta, Kind}, #elixir_scope{context=match} = S) when is_atom(Kind) -> - {{var, ?line(Meta), '_'}, S}; - -translate({'_', Meta, Kind}, S) when is_atom(Kind) -> - compile_error(Meta, S#elixir_scope.file, "unbound variable _"); - -translate({Name, Meta, Kind}, #elixir_scope{extra=map_key} = S) when is_atom(Name), is_atom(Kind) -> - compile_error(Meta, S#elixir_scope.file, "illegal use of variable ~ts in map key", [Name]); - -translate({Name, Meta, Kind}, S) when is_atom(Name), is_atom(Kind) -> - elixir_scope:translate_var(Meta, Name, var_kind(Meta, Kind), S); - -%% Local calls - -translate({Name, Meta, Args}, S) when is_atom(Name), is_list(Meta), is_list(Args) -> - if - S#elixir_scope.context == match -> - compile_error(Meta, S#elixir_scope.file, - "cannot invoke function ~ts/~B inside match", [Name, length(Args)]); - S#elixir_scope.context == guard -> - Arity = length(Args), - File = S#elixir_scope.file, - case Arity of - 0 -> compile_error(Meta, File, "unknown variable ~ts or cannot invoke " - "function ~ts/~B inside guard", [Name, Name, Arity]); - _ -> compile_error(Meta, File, "cannot invoke local ~ts/~B inside guard", - [Name, Arity]) - end; - S#elixir_scope.function == nil -> - compile_error(Meta, S#elixir_scope.file, "undefined function ~ts/~B", [Name, length(Args)]); - true -> - Line = ?line(Meta), - {TArgs, NS} = translate_args(Args, S), - {{call, Line, {atom, Line, Name}, TArgs}, NS} - end; - -%% Remote calls - -translate({{'.', _, [Left, Right]}, Meta, Args}, S) - when (is_tuple(Left) orelse is_atom(Left)), is_atom(Right), is_list(Meta), is_list(Args) -> - {TLeft, SL} = translate(Left, S), - {TArgs, SA} = translate_args(Args, mergec(S, SL)), - - Line = ?line(Meta), - Arity = length(Args), - TRight = {atom, Line, Right}, - - %% We need to rewrite erlang function calls as operators - %% because erl_eval chokes on them. We can remove this - %% once a fix is merged into Erlang, keeping only the - %% list operators one (since it is required for inlining - %% [1,2,3] ++ Right in matches). - case (Left == erlang) andalso erl_op(Right, Arity) of - true -> - {list_to_tuple([op, Line, Right] ++ TArgs), mergev(SL, SA)}; - false -> - assert_allowed_in_context(Meta, Left, Right, Arity, S), - SC = mergev(SL, SA), - - case not is_atom(Left) andalso (Arity == 0) of - true -> - {Var, _, SV} = elixir_scope:build_var('_', SC), - TVar = {var, Line, Var}, - TMap = {map, Line, [ - {map_field_assoc, Line, - {atom, Line, '__struct__'}, - {atom, Line, 'Elixir.KeyError'}}, - {map_field_assoc, Line, - {atom, Line, '__exception__'}, - {atom, Line, 'true'}}, - {map_field_assoc, Line, - {atom, Line, key}, - {atom, Line, TRight}}, - {map_field_assoc, Line, - {atom, Line, term}, - TVar}]}, - - %% TODO There is a bug in dialyzer that makes it fail on - %% empty maps. We work around the bug below by using - %% the is_map/1 guard instead of matching on map. Hopefully - %% we can use a match on 17.1. - %% - %% http://erlang.org/pipermail/erlang-bugs/2014-April/004338.html - {{'case', -1, TLeft, [ - {clause, -1, - [{map, Line, [{map_field_exact, Line, TRight, TVar}]}], - [], - [TVar]}, - {clause, -1, - [TVar], - [[elixir_utils:erl_call(Line, erlang, is_map, [TVar])]], - [elixir_utils:erl_call(Line, erlang, error, [TMap])]}, - {clause, -1, - [TVar], - [], - [{call, Line, {remote, Line, TVar, TRight}, []}]} - ]}, SV}; - false -> - {{call, Line, {remote, Line, TLeft, TRight}, TArgs}, SC} - end - end; - -%% Anonymous function calls - -translate({{'.', _, [Expr]}, Meta, Args}, S) when is_list(Args) -> - {TExpr, SE} = translate(Expr, S), - {TArgs, SA} = translate_args(Args, mergec(S, SE)), - {{call, ?line(Meta), TExpr, TArgs}, mergev(SE, SA)}; - -%% Literals - -translate(List, S) when is_list(List) -> - Fun = case S#elixir_scope.context of - match -> fun translate/2; - _ -> fun(X, Acc) -> translate_arg(X, Acc, S) end - end, - translate_list(List, Fun, S, []); - -translate({Left, Right}, S) -> - {TArgs, SE} = translate_args([Left, Right], S), - {{tuple, 0, TArgs}, SE}; - -translate(Other, S) -> - {elixir_utils:elixir_to_erl(Other), S}. - -%% Helpers - -erl_op(Op, Arity) -> - erl_internal:list_op(Op, Arity) orelse - erl_internal:comp_op(Op, Arity) orelse - erl_internal:bool_op(Op, Arity) orelse - erl_internal:arith_op(Op, Arity). - -translate_list([{'|', _, [_, _]=Args}], Fun, Acc, List) -> - {[TLeft,TRight], TAcc} = lists:mapfoldl(Fun, Acc, Args), - {build_list([TLeft|List], TRight), TAcc}; -translate_list([H|T], Fun, Acc, List) -> - {TH, TAcc} = Fun(H, Acc), - translate_list(T, Fun, TAcc, [TH|List]); -translate_list([], _Fun, Acc, List) -> - {build_list(List, {nil, 0}), Acc}. - -build_list([H|T], Acc) -> - build_list(T, {cons, 0, H, Acc}); -build_list([], Acc) -> - Acc. - -var_kind(Meta, Kind) -> - case lists:keyfind(counter, 1, Meta) of - {counter, Counter} -> Counter; - false -> Kind - end. - -%% Pack a list of expressions from a block. -unblock({'block', _, Exprs}) -> Exprs; -unblock(Expr) -> [Expr]. - -%% Translate args - -translate_arg(Arg, Acc, S) when is_number(Arg); is_atom(Arg); is_binary(Arg); is_pid(Arg); is_function(Arg) -> - {TArg, _} = translate(Arg, S), - {TArg, Acc}; -translate_arg(Arg, Acc, S) -> - {TArg, TAcc} = translate(Arg, mergec(S, Acc)), - {TArg, mergev(Acc, TAcc)}. - -translate_args(Args, #elixir_scope{context=match} = S) -> - lists:mapfoldl(fun translate/2, S, Args); - -translate_args(Args, S) -> - lists:mapfoldl(fun(X, Acc) -> translate_arg(X, Acc, S) end, S, Args). - -%% Translate blocks - -translate_block([], Acc, _Return, S) -> - {lists:reverse(Acc), S}; -translate_block([H], Acc, Return, S) -> - {TH, TS} = translate_block(H, Return, S), - translate_block([], [TH|Acc], Return, TS); -translate_block([H|T], Acc, Return, S) -> - {TH, TS} = translate_block(H, false, S), - translate_block(T, [TH|Acc], Return, TS). - -translate_block(Expr, Return, S) -> - case (Return == false) andalso handles_no_return(Expr) of - true -> translate(Expr, S#elixir_scope{return=Return}); - false -> translate(Expr, S) - end. - -%% return is typically true, except when we find one -%% of the expressions below, which may handle return=false -%% but must always return return=true. -handles_no_return({'try', _, [_]}) -> true; -handles_no_return({'cond', _, [_]}) -> true; -handles_no_return({'for', _, [_|_]}) -> true; -handles_no_return({'case', _, [_, _]}) -> true; -handles_no_return({'receive', _, [_]}) -> true; -handles_no_return({'__block__', _, [_|_]}) -> true; -handles_no_return(_) -> false. - -%% Cond - -build_cond_clauses([{'->', NewMeta, [[Condition], Body]}|T], Acc, OldMeta) -> - {Truthy, Other} = build_truthy_clause(NewMeta, Condition, Body), - Falsy = {'->', OldMeta, [[Other], Acc]}, - Case = {'case', NewMeta, [Condition, [{do, [Truthy, Falsy]}]]}, - build_cond_clauses(T, Case, NewMeta); -build_cond_clauses([], Acc, _) -> - Acc. - -build_truthy_clause(Meta, Condition, Body) -> - case elixir_utils:returns_boolean(Condition) of - true -> - {{'->', Meta, [[true], Body]}, false}; - false -> - Var = {'cond', [], 'Elixir'}, - Head = {'when', [], [Var, - {'__op__', [], [ - 'andalso', - {{'.', [], [erlang, '/=']}, [], [Var, nil]}, - {{'.', [], [erlang, '/=']}, [], [Var, false]} - ]} - ]}, - {{'->', Meta, [[Head], Body]}, {'_', [], nil}} - end. - -%% Assertions - -assert_module_scope(Meta, Kind, #elixir_scope{module=nil,file=File}) -> - compile_error(Meta, File, "cannot invoke ~ts outside module", [Kind]); -assert_module_scope(_Meta, _Kind, #elixir_scope{module=Module}) -> Module. - -assert_function_scope(Meta, Kind, #elixir_scope{function=nil,file=File}) -> - compile_error(Meta, File, "cannot invoke ~ts outside function", [Kind]); -assert_function_scope(_Meta, _Kind, #elixir_scope{function=Function}) -> Function. - -assert_allowed_in_context(Meta, Left, Right, Arity, #elixir_scope{context=Context} = S) - when (Context == match) orelse (Context == guard) -> - case (Left == erlang) andalso erl_internal:guard_bif(Right, Arity) of - true -> ok; - false -> - compile_error(Meta, S#elixir_scope.file, "cannot invoke remote function ~ts.~ts/~B inside ~ts", - ['Elixir.Macro':to_string(Left), Right, Arity, Context]) - end; -assert_allowed_in_context(_, _, _, _, _) -> - ok. diff --git a/lib/elixir/src/elixir_utils.erl b/lib/elixir/src/elixir_utils.erl index 1268bd6b185..23e633eb450 100644 --- a/lib/elixir/src/elixir_utils.erl +++ b/lib/elixir/src/elixir_utils.erl @@ -1,35 +1,66 @@ %% Convenience functions used throughout elixir source code %% for ast manipulation and querying. -module(elixir_utils). --export([elixir_to_erl/1, get_line/1, split_last/1, - characters_to_list/1, characters_to_binary/1, macro_name/1, - convert_to_boolean/4, returns_boolean/1, atom_concat/1, - read_file_type/1, read_link_type/1, relative_to_cwd/1, erl_call/4]). +-export([get_line/1, split_last/1, noop/0, + characters_to_list/1, characters_to_binary/1, relative_to_cwd/1, + macro_name/1, returns_boolean/1, caller/4, meta_location/1, + read_file_type/1, read_link_type/1, read_mtime_and_size/1, change_universal_time/2, + guard_op/2, match_op/2, extract_splat_guards/1, extract_guards/1]). -include("elixir.hrl"). -include_lib("kernel/include/file.hrl"). +% Builds the macro name + macro_name(Macro) -> - list_to_atom(lists:concat(['MACRO-',Macro])). + list_to_atom("MACRO-" ++ atom_to_list(Macro)). + +% Operators + +match_op('++', 2) -> true; +match_op('+', 1) -> true; +match_op('-', 1) -> true; +match_op(_, _) -> false. + +guard_op('andalso', 2) -> + true; +guard_op('orelse', 2) -> + true; +guard_op(Op, Arity) -> + try erl_internal:op_type(Op, Arity) of + arith -> true; + list -> true; + comp -> true; + bool -> true; + send -> false + catch + _:_ -> false + end. -atom_concat(Atoms) -> - list_to_atom(lists:concat(Atoms)). +% Extract guards -erl_call(Line, Module, Function, Args) -> - {call, Line, - {remote, Line, {atom, Line, Module}, {atom, Line, Function}}, - Args - }. +extract_guards({'when', _, [Left, Right]}) -> {Left, extract_or_guards(Right)}; +extract_guards(Else) -> {Else, []}. -get_line(Opts) when is_list(Opts) -> - case lists:keyfind(line, 1, Opts) of - {line, Line} when is_integer(Line) -> Line; - false -> 0 - end. +extract_or_guards({'when', _, [Left, Right]}) -> [Left | extract_or_guards(Right)]; +extract_or_guards(Term) -> [Term]. + +% Extract guards when multiple left side args are allowed. + +extract_splat_guards([{'when', _, [_ | _] = Args}]) -> + {Left, Right} = split_last(Args), + {Left, extract_or_guards(Right)}; +extract_splat_guards(Else) -> + {Else, []}. + +%% No-op function that can be used for stuff like preventing tail-call +%% optimization to kick in. +noop() -> + ok. -split_last([]) -> {[], []}; -split_last(List) -> split_last(List, []). -split_last([H], Acc) -> {lists:reverse(Acc), H}; -split_last([H|T], Acc) -> split_last(T, [H|Acc]). +split_last([]) -> {[], []}; +split_last(List) -> split_last(List, []). +split_last([H], Acc) -> {lists:reverse(Acc), H}; +split_last([H | T], Acc) -> split_last(T, [H | Acc]). read_file_type(File) -> case file:read_file_info(File) of @@ -43,10 +74,21 @@ read_link_type(File) -> {error, _} = Error -> Error end. +read_mtime_and_size(File) -> + case file:read_file_info(File, [{time, universal}]) of + {ok, #file_info{mtime=Mtime, size=Size}} -> {ok, Mtime, Size}; + {error, _} = Error -> Error + end. + +change_universal_time(Name, {{Y, M, D}, {H, Min, Sec}}=Time) + when is_integer(Y), is_integer(M), is_integer(D), + is_integer(H), is_integer(Min), is_integer(Sec)-> + file:write_file_info(Name, #file_info{mtime=Time}, [{time, universal}]). + relative_to_cwd(Path) -> - case elixir_compiler:get_opt(internal) of - true -> Path; - false -> 'Elixir.String':to_char_list('Elixir.Path':relative_to_cwd(Path)) + case elixir_compiler:get_opt(relative_paths) of + true -> 'Elixir.Path':relative_to_cwd(Path); + false -> Path end. characters_to_list(Data) when is_list(Data) -> @@ -54,7 +96,7 @@ characters_to_list(Data) when is_list(Data) -> characters_to_list(Data) -> case elixir_compiler:get_opt(internal) of true -> unicode:characters_to_list(Data); - false -> 'Elixir.String':to_char_list(Data) + false -> 'Elixir.String':to_charlist(Data) end. characters_to_binary(Data) when is_binary(Data) -> @@ -65,67 +107,45 @@ characters_to_binary(Data) -> false -> 'Elixir.List':to_string(Data) end. -%% elixir to erl. Handles only valid quoted expressions, -%% that's why things like maps and references are not in the list. - -elixir_to_erl(Tree) when is_tuple(Tree) -> - {tuple, 0, [elixir_to_erl(X) || X <- tuple_to_list(Tree)]}; - -elixir_to_erl([]) -> - {nil, 0}; - -elixir_to_erl(<<>>) -> - {bin, 0, []}; +%% Returns the caller as a stacktrace entry. +caller(Line, File, nil, _) -> + {elixir_compiler_0, '__FILE__', 1, stack_location(Line, File)}; +caller(Line, File, Module, nil) -> + {Module, '__MODULE__', 0, stack_location(Line, File)}; +caller(Line, File, Module, {Name, Arity}) -> + {Module, Name, Arity, stack_location(Line, File)}. -elixir_to_erl(Tree) when is_list(Tree) -> - elixir_to_erl_cons_1(Tree, []); +stack_location(Line, File) -> + [{file, elixir_utils:characters_to_list(elixir_utils:relative_to_cwd(File))}, + {line, Line}]. -elixir_to_erl(Tree) when is_atom(Tree) -> - {atom, 0, Tree}; - -elixir_to_erl(Tree) when is_integer(Tree) -> - {integer, 0, Tree}; - -elixir_to_erl(Tree) when is_float(Tree) -> - {float, 0, Tree}; - -elixir_to_erl(Tree) when is_binary(Tree) -> - %% Note that our binaries are utf-8 encoded and we are converting - %% to a list using binary_to_list. The reason for this is that Erlang - %% considers a string in a binary to be encoded in latin1, so the bytes - %% are not changed in any fashion. - {bin, 0, [{bin_element, 0, {string, 0, binary_to_list(Tree)}, default, default}]}; - -elixir_to_erl(Function) when is_function(Function) -> - case (erlang:fun_info(Function, type) == {type, external}) andalso - (erlang:fun_info(Function, env) == {env, []}) of - true -> - {module, Module} = erlang:fun_info(Function, module), - {name, Name} = erlang:fun_info(Function, name), - {arity, Arity} = erlang:fun_info(Function, arity), - - {'fun', 0, {function, - {atom, 0, Module}, - {atom, 0, Name}, - {integer, 0, Arity}}}; - false -> - error(badarg) - end; - -elixir_to_erl(Pid) when is_pid(Pid) -> - elixir_utils:erl_call(0, erlang, binary_to_term, - [elixir_utils:elixir_to_erl(term_to_binary(Pid))]); - -elixir_to_erl(_Other) -> - error(badarg). - -elixir_to_erl_cons_1([H|T], Acc) -> elixir_to_erl_cons_1(T, [H|Acc]); -elixir_to_erl_cons_1(Other, Acc) -> elixir_to_erl_cons_2(Acc, elixir_to_erl(Other)). +get_line(Opts) when is_list(Opts) -> + case lists:keyfind(line, 1, Opts) of + {line, Line} when is_integer(Line) -> Line; + _ -> 0 + end. -elixir_to_erl_cons_2([H|T], Acc) -> - elixir_to_erl_cons_2(T, {cons, 0, elixir_to_erl(H), Acc}); -elixir_to_erl_cons_2([], Acc) -> - Acc. +%% Meta location. +%% +%% Macros add a file+keep pair on location keep +%% which we should take into account for error +%% reporting. +%% +%% Returns {binary, integer} on location keep or +%% nil. + +meta_location(Meta) -> + case lists:keyfind(file, 1, Meta) of + {file, MetaFile} when is_binary(MetaFile) -> + MetaLine = + case lists:keyfind(keep, 1, Meta) of + {keep, Keep} when is_integer(Keep) -> Keep; + _ -> 0 + end, + {MetaFile, MetaLine}; + _ -> + nil + end. %% Boolean checks @@ -138,61 +158,33 @@ returns_boolean({{'.', _, [erlang, Op]}, _, [_, _]}) when Op == '=='; Op == '/='; Op == '=<'; Op == '>='; Op == '<'; Op == '>'; Op == '=:='; Op == '=/=' -> true; -returns_boolean({'__op__', _, [Op, _, Right]}) when Op == 'andalso'; Op == 'orelse' -> +returns_boolean({{'.', _, [erlang, Op]}, _, [_, Right]}) when + Op == 'andalso'; Op == 'orelse' -> returns_boolean(Right); returns_boolean({{'.', _, [erlang, Fun]}, _, [_]}) when Fun == is_atom; Fun == is_binary; Fun == is_bitstring; Fun == is_boolean; Fun == is_float; Fun == is_function; Fun == is_integer; Fun == is_list; Fun == is_number; Fun == is_pid; Fun == is_port; Fun == is_reference; - Fun == is_tuple -> true; + Fun == is_tuple; Fun == is_map; Fun == is_process_alive -> true; returns_boolean({{'.', _, [erlang, Fun]}, _, [_, _]}) when - Fun == is_function -> true; + Fun == is_function; Fun == is_record -> true; returns_boolean({{'.', _, [erlang, Fun]}, _, [_, _, _]}) when - Fun == function_exported -> true; + Fun == function_exported; Fun == is_record -> true; returns_boolean({'case', _, [_, [{do, Clauses}]]}) -> lists:all(fun - ({'->',_,[_, Expr]}) -> returns_boolean(Expr) + ({'->', _, [_, Expr]}) -> returns_boolean(Expr) end, Clauses); returns_boolean({'cond', _, [[{do, Clauses}]]}) -> lists:all(fun - ({'->',_,[_, Expr]}) -> returns_boolean(Expr) + ({'->', _, [_, Expr]}) -> returns_boolean(Expr) end, Clauses); returns_boolean({'__block__', [], Exprs}) -> returns_boolean(lists:last(Exprs)); returns_boolean(_) -> false. - -convert_to_boolean(Line, Expr, Bool, S) when is_integer(Line) -> - case {returns_boolean(Expr), Bool} of - {true, true} -> {Expr, S}; - {true, false} -> {{op, Line, 'not', Expr}, S}; - _ -> do_convert_to_boolean(Line, Expr, Bool, S) - end. - -%% Notice we use a temporary var and bundle nil -%% and false checks in the same clause since -%% it makes dialyzer happy. -do_convert_to_boolean(Line, Expr, Bool, S) -> - {Name, _, TS} = elixir_scope:build_var('_', S), - Var = {var, Line, Name}, - Any = {var, Line, '_'}, - OrElse = do_guarded_convert_to_boolean(Line, Var, 'orelse', '=='), - - FalseResult = {atom,Line,not Bool}, - TrueResult = {atom,Line,Bool}, - - {{'case', Line, Expr, [ - {clause, Line, [Var], [[OrElse]], [FalseResult]}, - {clause, Line, [Any], [], [TrueResult]} - ]}, TS}. - -do_guarded_convert_to_boolean(Line, Expr, Op, Comp) -> - Left = {op, Line, Comp, Expr, {atom, Line, false}}, - Right = {op, Line, Comp, Expr, {atom, Line, nil}}, - {op, Line, Op, Left, Right}. diff --git a/lib/elixir/src/elixir_with.erl b/lib/elixir/src/elixir_with.erl new file mode 100644 index 00000000000..836f6e847b4 --- /dev/null +++ b/lib/elixir/src/elixir_with.erl @@ -0,0 +1,105 @@ +-module(elixir_with). +-export([expand/3]). +-include("elixir.hrl"). + +expand(Meta, Args, Env) -> + {Exprs, Opts} = + case elixir_utils:split_last(Args) of + {_, LastArg} = SplitResult when is_list(LastArg) -> + SplitResult; + _ -> + {Args, []} + end, + + {DoExpr, OtherOpts1} = + case lists:keytake(do, 1, Opts) of + {value, {do, DoValue}, RestOpts1} -> + {DoValue, RestOpts1}; + false -> + elixir_errors:form_error(Meta, ?key(Env, file), elixir_expand, {missing_option, 'with', [do]}) + end, + + {ElseExpr, OtherOpts2} = + case lists:keytake(else, 1, OtherOpts1) of + {value, {else, ElseValue}, RestOpts2} -> + assert_clauses(Meta, ElseValue, Env), + {ElseValue, RestOpts2}; + false -> + {nil, OtherOpts1} + end, + + case OtherOpts2 of + [{Key, _} | _] -> + elixir_errors:form_error(Meta, ?key(Env, file), elixir_clauses, {unexpected_option, with, Key}); + [] -> + ok + end, + + ResultCase = + case ElseExpr of + nil -> + {MainCase, _} = build_main_case(Exprs, DoExpr, fun(Ret) -> Ret end, false), + MainCase; + _ -> + Wrapper = fun(Ret) -> {error, Ret} end, + case build_main_case(Exprs, {ok, DoExpr}, Wrapper, false) of + {MainCase, false} -> + Message = + "\"else\" clauses will never match" + " because all patterns in \"with\" will always match", + elixir_errors:warn(?line(Meta), ?key(Env, file), Message), + {{'.', Meta, [erlang, element]}, Meta, [MainCase, 2]}; + {MainCase, true} -> + build_else_case(Meta, MainCase, ElseExpr, Wrapper) + end + end, + elixir_expand:expand(ResultCase, Env). + +%% Helpers + +assert_clauses(_Meta, [], _Env) -> + ok; +assert_clauses(Meta, [{'->', _, [_, _]} | Rest], Env) -> + assert_clauses(Meta, Rest, Env); +assert_clauses(Meta, _Other, Env) -> + elixir_errors:form_error(Meta, ?key(Env, file), elixir_clauses, {bad_or_missing_clauses, {with, else}}). + +build_main_case([{'<-', Meta, [{Name, _, Ctx}, _] = Args} | Rest], DoExpr, Wrapper, HasMatch) + when is_atom(Name) andalso is_atom(Ctx) -> + build_main_case([{'=', Meta, Args} | Rest], DoExpr, Wrapper, HasMatch); +build_main_case([{'<-', Meta, [Left, Right]} | Rest], DoExpr, Wrapper, _HasMatch) -> + {InnerCase, true} = build_main_case(Rest, DoExpr, Wrapper, true), + Generated = ?generated(Meta), + Other = {other, Generated, ?var_context}, + Clauses = [ + {'->', Generated, [[Left], InnerCase]}, + {'->', Generated, [[Other], Wrapper(Other)]} + ], + {{'case', [{export_vars, false} | Meta], [Right, [{do, Clauses}]]}, true}; +build_main_case([Expr | Rest], DoExpr, Wrapper, HasMatch) -> + {InnerCase, InnerHasMatch} = build_main_case(Rest, DoExpr, Wrapper, HasMatch), + {{'__block__', [], [Expr, InnerCase]}, InnerHasMatch}; +build_main_case([], DoExpr, _Wrapper, HasMatch) -> + {DoExpr, HasMatch}. + +build_else_case(Meta, MainCase, Clauses, Wrapper) -> + Generated = ?generated(Meta), + + Return = {return, Generated, ?var_context}, + ReturnClause = {'->', Generated, [[{ok, Return}], Return]}, + + Other = {other, Generated, ?var_context}, + RaiseError = {{'.', Generated, [erlang, error]}, Meta, [{with_clause, Other}]}, + RaiseErrorClause = {'->', Generated, [[Wrapper(Other)], RaiseError]}, + + ClauseWrapper = fun(Clause) -> wrap_clause_pattern(Clause, Wrapper) end, + ResultClauses = [ReturnClause] ++ lists:map(ClauseWrapper, Clauses) ++ [RaiseErrorClause], + {'case', [{export_vars, false} | Meta], [MainCase, [{do, ResultClauses}]]}. + +wrap_clause_pattern({'->', Meta, [[Left], Right]}, Wrapper) -> + {'->', Meta, [[wrap_pattern(Left, Wrapper)], Right]}. + +wrap_pattern({'when', Meta, [Left, Right]}, Wrapper) -> + {'when', Meta, [Wrapper(Left), Right]}; +wrap_pattern(Expr, Wrapper) -> + Wrapper(Expr). diff --git a/lib/elixir/test/doc_test.exs b/lib/elixir/test/doc_test.exs deleted file mode 100644 index 3713782a0b1..00000000000 --- a/lib/elixir/test/doc_test.exs +++ /dev/null @@ -1,42 +0,0 @@ -ExUnit.start [] - -defmodule KernelTest do - use ExUnit.Case, async: true - - doctest Access - doctest Atom - doctest Base - doctest Bitwise - doctest Code - doctest Collectable - doctest Enum - doctest Exception - doctest Float - doctest Inspect - doctest Inspect.Algebra - doctest Integer - doctest IO - doctest IO.ANSI - doctest Kernel - doctest Kernel.SpecialForms - doctest Keyword - doctest List - doctest Macro - doctest Map - doctest Module - doctest Node - doctest OptionParser - doctest Path - doctest Process - doctest Protocol - doctest Range - doctest Record - doctest Regex - doctest Stream - doctest String - doctest String.Chars - doctest StringIO - doctest Tuple - doctest URI - doctest Version -end diff --git a/lib/elixir/test/elixir/access_test.exs b/lib/elixir/test/elixir/access_test.exs index b28993addf3..081f533da65 100644 --- a/lib/elixir/test/elixir/access_test.exs +++ b/lib/elixir/test/elixir/access_test.exs @@ -3,6 +3,8 @@ Code.require_file "test_helper.exs", __DIR__ defmodule AccessTest do use ExUnit.Case, async: true + doctest Access + # Test nil at compilation time does not fail # and that @config[:foo] has proper precedence. @config nil @@ -19,8 +21,11 @@ defmodule AccessTest do test "for nil" do assert nil[:foo] == nil + assert Access.fetch(nil, :foo) == :error assert Access.get(nil, :foo) == nil - assert Access.get_and_update(nil, :foo, fn nil -> {:ok, :bar} end) == {:ok, :bar} + assert_raise ArgumentError, "could not put/update key :foo on a nil value", fn -> + Access.get_and_update(nil, :foo, fn nil -> {:ok, :bar} end) + end end test "for keywords" do @@ -28,9 +33,20 @@ defmodule AccessTest do assert [foo: [bar: :baz]][:foo][:bar] == :baz assert [foo: [bar: :baz]][:fuu][:bar] == nil + assert Access.fetch([foo: :bar], :foo) == {:ok, :bar} + assert Access.fetch([foo: :bar], :bar) == :error + + msg = ~r/the Access calls for keywords expect the key to be an atom/ + assert_raise ArgumentError, msg, fn -> + Access.fetch([], "foo") + end + assert Access.get([foo: :bar], :foo) == :bar assert Access.get_and_update([], :foo, fn nil -> {:ok, :baz} end) == {:ok, [foo: :baz]} assert Access.get_and_update([foo: :bar], :foo, fn :bar -> {:ok, :baz} end) == {:ok, [foo: :baz]} + + assert Access.pop([foo: :bar], :foo) == {:bar, []} + assert Access.pop([], :foo) == {nil, []} end test "for maps" do @@ -39,18 +55,35 @@ defmodule AccessTest do assert %{1.0 => 1.0}[1.0] == 1.0 assert %{1 => 1}[1.0] == nil + assert Access.fetch(%{foo: :bar}, :foo) == {:ok, :bar} + assert Access.fetch(%{foo: :bar}, :bar) == :error + assert Access.get(%{foo: :bar}, :foo) == :bar assert Access.get_and_update(%{}, :foo, fn nil -> {:ok, :baz} end) == {:ok, %{foo: :baz}} assert Access.get_and_update(%{foo: :bar}, :foo, fn :bar -> {:ok, :baz} end) == {:ok, %{foo: :baz}} + + assert Access.pop(%{foo: :bar}, :foo) == {:bar, %{}} + assert Access.pop(%{}, :foo) == {nil, %{}} end - test "for atoms" do - assert_raise Protocol.UndefinedError, ~r"protocol Access not implemented for :foo", fn -> - Access.get(:foo, :bar) + test "for struct" do + defmodule Sample do + defstruct [:name] + end + + assert_raise UndefinedFunctionError, + "function AccessTest.Sample.fetch/2 is undefined (AccessTest.Sample does not implement the Access behaviour)", fn -> + Access.fetch(struct(Sample, []), :name) + end + + assert_raise UndefinedFunctionError, + "function AccessTest.Sample.get_and_update/3 is undefined (AccessTest.Sample does not implement the Access behaviour)", fn -> + Access.get_and_update(struct(Sample, []), :name, fn nil -> {:ok, :baz} end) end - assert_raise Protocol.UndefinedError, ~r"protocol Access not implemented for :foo", fn -> - Access.get_and_update(:foo, :bar, fn _ -> {:ok, :baz} end) + assert_raise UndefinedFunctionError, + "function AccessTest.Sample.pop/2 is undefined (AccessTest.Sample does not implement the Access behaviour)", fn -> + Access.pop(struct(Sample, []), :name) end end end diff --git a/lib/elixir/test/elixir/agent_test.exs b/lib/elixir/test/elixir/agent_test.exs index d4d58548806..0cb7e288107 100644 --- a/lib/elixir/test/elixir/agent_test.exs +++ b/lib/elixir/test/elixir/agent_test.exs @@ -3,12 +3,20 @@ Code.require_file "test_helper.exs", __DIR__ defmodule AgentTest do use ExUnit.Case, async: true - test "start_link/2 workflow with unregistered name" do - {:ok, pid} = Agent.start_link(fn -> %{} end) + doctest Agent - {:links, links} = Process.info(self, :links) + def identity(state) do + state + end + + test "start_link/2 workflow with unregistered name and anonymous functions" do + {:ok, pid} = Agent.start_link(&Map.new/0) + + {:links, links} = Process.info(self(), :links) assert pid in links + assert :proc_lib.translate_initial_call(pid) == {Map, :new, 0} + assert Agent.update(pid, &Map.put(&1, :hello, :world)) == :ok assert Agent.get(pid, &Map.get(&1, :hello), 3000) == :world assert Agent.get_and_update(pid, &Map.pop(&1, :hello), 3000) == :world @@ -17,13 +25,14 @@ defmodule AgentTest do wait_until_dead(pid) end - test "start/2 workflow with registered name" do - {:ok, pid} = Agent.start(fn -> %{} end, name: :agent) + test "start/2 workflow with registered name and module functions" do + {:ok, pid} = Agent.start(Map, :new, [], name: :agent) assert Process.info(pid, :registered_name) == {:registered_name, :agent} - assert Agent.cast(:agent, &Map.put(&1, :hello, :world)) == :ok - assert Agent.get(:agent, &Map.get(&1, :hello)) == :world - assert Agent.get_and_update(:agent, &Map.pop(&1, :hello)) == :world - assert Agent.get(:agent, &(&1)) == %{} + assert :proc_lib.translate_initial_call(pid) == {Map, :new, 0} + assert Agent.cast(:agent, Map, :put, [:hello, :world]) == :ok + assert Agent.get(:agent, Map, :get, [:hello]) == :world + assert Agent.get_and_update(:agent, Map, :pop, [:hello]) == :world + assert Agent.get(:agent, AgentTest, :identity, []) == %{} assert Agent.stop(:agent) == :ok assert Process.info(pid, :registered_name) == nil end @@ -31,7 +40,7 @@ defmodule AgentTest do test ":sys.change_code/4 with mfa" do { :ok, pid } = Agent.start_link(fn -> %{} end) :ok = :sys.suspend(pid) - mfa = { Map, :put, [:hello, :world] } + mfa = {Map, :put, [:hello, :world]} assert :sys.change_code(pid, __MODULE__, "vsn", mfa) == :ok :ok = :sys.resume(pid) assert Agent.get(pid, &Map.get(&1, :hello)) == :world diff --git a/lib/elixir/test/elixir/application_test.exs b/lib/elixir/test/elixir/application_test.exs index 20258320fb2..6ba5bd89a4f 100644 --- a/lib/elixir/test/elixir/application_test.exs +++ b/lib/elixir/test/elixir/application_test.exs @@ -3,13 +3,19 @@ Code.require_file "test_helper.exs", __DIR__ defmodule ApplicationTest do use ExUnit.Case, async: true + import PathHelpers + test "application environment" do assert Application.get_env(:elixir, :unknown) == nil assert Application.get_env(:elixir, :unknown, :default) == :default assert Application.fetch_env(:elixir, :unknown) == :error + assert_raise ArgumentError, fn -> + Application.fetch_env!(:elixir, :unknown) + end assert Application.put_env(:elixir, :unknown, :known) == :ok assert Application.fetch_env(:elixir, :unknown) == {:ok, :known} + assert Application.fetch_env!(:elixir, :unknown) == :known assert Application.get_env(:elixir, :unknown, :default) == :known assert {:unknown, :known} in Application.get_all_env(:elixir) @@ -17,15 +23,56 @@ defmodule ApplicationTest do assert Application.get_env(:elixir, :unknown, :default) == :default end + test "loaded and started applications" do + started = Application.started_applications + assert is_list(started) + assert {:elixir, 'elixir', _} = List.keyfind(started, :elixir, 0) + + started_timeout = Application.started_applications(7000) + assert is_list(started_timeout) + assert {:elixir, 'elixir', _} = List.keyfind(started_timeout, :elixir, 0) + + loaded = Application.loaded_applications + assert is_list(loaded) + assert {:elixir, 'elixir', _} = List.keyfind(loaded, :elixir, 0) + end + + test "application specification" do + assert is_list Application.spec(:elixir) + assert Application.spec(:unknown) == nil + assert Application.spec(:unknown, :description) == nil + + assert Application.spec(:elixir, :description) == 'elixir' + assert_raise FunctionClauseError, fn -> Application.spec(:elixir, :unknown) end + end + + test "application module" do + assert Application.get_application(String) == :elixir + assert Application.get_application(__MODULE__) == nil + assert Application.get_application(__MODULE__.Unknown) == nil + end + test "application directory" do root = Path.expand("../../../..", __DIR__) - assert Application.app_dir(:elixir) == - Path.join(root, "bin/../lib/elixir") - assert Application.app_dir(:elixir, "priv") == - Path.join(root, "bin/../lib/elixir/priv") + assert normalize_app_dir(Application.app_dir(:elixir)) == + normalize_app_dir(Path.join(root, "bin/../lib/elixir")) + assert normalize_app_dir(Application.app_dir(:elixir, "priv")) == + normalize_app_dir(Path.join(root, "bin/../lib/elixir/priv")) + assert normalize_app_dir(Application.app_dir(:elixir, ["priv", "foo"])) == + normalize_app_dir(Path.join(root, "bin/../lib/elixir/priv/foo")) assert_raise ArgumentError, fn -> Application.app_dir(:unknown) end end + + if windows?() do + defp normalize_app_dir(path) do + path |> String.downcase |> Path.expand + end + else + defp normalize_app_dir(path) do + path |> String.downcase + end + end end diff --git a/lib/elixir/test/elixir/atom_test.exs b/lib/elixir/test/elixir/atom_test.exs index 7d6d5bcb7ff..0f8ea7ff677 100644 --- a/lib/elixir/test/elixir/atom_test.exs +++ b/lib/elixir/test/elixir/atom_test.exs @@ -3,11 +3,13 @@ Code.require_file "test_helper.exs", __DIR__ defmodule AtomTest do use ExUnit.Case, async: true + doctest Atom + test "to_string/1" do assert Atom.to_string(:"héllo") == "héllo" end - test "to_char_list/1" do - assert Atom.to_char_list(:"héllo") == 'héllo' + test "to_charlist/1" do + assert Atom.to_charlist(:"héllo") == 'héllo' end end diff --git a/lib/elixir/test/elixir/base_test.exs b/lib/elixir/test/elixir/base_test.exs index d976bc14173..1fb910c08d8 100644 --- a/lib/elixir/test/elixir/base_test.exs +++ b/lib/elixir/test/elixir/base_test.exs @@ -2,9 +2,12 @@ Code.require_file "test_helper.exs", __DIR__ defmodule BaseTest do use ExUnit.Case, async: true + + doctest Base + import Base - test "encode16" do + test "encode16/1" do assert "" == encode16("") assert "66" == encode16("f") assert "666F" == encode16("fo") @@ -17,7 +20,7 @@ defmodule BaseTest do assert "a1b2c3d4e5f67891" == encode16(<<161, 178, 195, 212, 229, 246, 120, 145>>, case: :lower) end - test "decode16" do + test "decode16/1" do assert {:ok, ""} == decode16("") assert {:ok, "f"} == decode16("66") assert {:ok, "fo"} == decode16("666F") @@ -25,13 +28,13 @@ defmodule BaseTest do assert {:ok, "foob"} == decode16("666F6F62") assert {:ok, "fooba"} == decode16("666F6F6261") assert {:ok, "foobar"} == decode16("666F6F626172") - assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("A1B2C3D4E5F67891") + assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("A1B2C3D4E5F67891") - assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("a1b2c3d4e5f67891", case: :lower) - assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("a1B2c3D4e5F67891", case: :mixed) + assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("a1b2c3d4e5f67891", case: :lower) + assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("a1B2c3D4e5F67891", case: :mixed) end - test "decode16!" do + test "decode16!/1" do assert "" == decode16!("") assert "f" == decode16!("66") assert "fo" == decode16!("666F") @@ -41,424 +44,710 @@ defmodule BaseTest do assert "foobar" == decode16!("666F6F626172") assert <<161, 178, 195, 212, 229, 246, 120, 145>> == decode16!("A1B2C3D4E5F67891") - assert <<161, 178, 195, 212, 229, 246, 120, 145>> == decode16!("a1b2c3d4e5f67891", case: :lower) - assert <<161, 178, 195, 212, 229, 246, 120, 145>> == decode16!("a1B2c3D4e5F67891", case: :mixed) + assert <<161, 178, 195, 212, 229, 246, 120, 145>> == decode16!("a1b2c3d4e5f67891", case: :lower) + assert <<161, 178, 195, 212, 229, 246, 120, 145>> == decode16!("a1B2c3D4e5F67891", case: :mixed) end - test "decode16 non-alphabet digit" do + test "decode16/1 errors on non-alphabet digit" do assert :error == decode16("66KF") assert :error == decode16("66ff") assert :error == decode16("66FF", case: :lower) end - test "decode16! non-alphabet digit" do - assert_raise ArgumentError, "non-alphabet digit found: K", fn -> + test "decode16!/1 errors on non-alphabet digit" do + assert_raise ArgumentError, "non-alphabet digit found: \"K\" (byte 75)", fn -> decode16!("66KF") end - assert_raise ArgumentError, "non-alphabet digit found: f", fn -> + assert_raise ArgumentError, "non-alphabet digit found: \"f\" (byte 102)", fn -> decode16!("66ff") end - assert_raise ArgumentError, "non-alphabet digit found: F", fn -> + assert_raise ArgumentError, "non-alphabet digit found: \"F\" (byte 70)", fn -> decode16!("66FF", case: :lower) end end - test "decode16 odd-length string" do + test "decode16/1 errors on odd-length string" do assert :error == decode16("666") end - test "decode16! odd-length string" do + test "decode16!/1 errors odd-length string" do assert_raise ArgumentError, "odd-length string", fn -> decode16!("666") end end - test "encode64 empty" do + test "encode64/1 can deal with empty strings" do assert "" == encode64("") end - test "encode64 two pads" do + test "encode64/1 with two pads" do assert "QWxhZGRpbjpvcGVuIHNlc2FtZQ==" == encode64("Aladdin:open sesame") end - test "encode64 one pad" do + test "encode64/1 with one pad" do assert "SGVsbG8gV29ybGQ=" == encode64("Hello World") end - test "encode64 no pad" do + test "encode64/1 with no pad" do assert "QWxhZGRpbjpvcGVuIHNlc2Ft" == encode64("Aladdin:open sesam") assert "MDEyMzQ1Njc4OSFAIzBeJiooKTs6PD4sLiBbXXt9" == encode64(<<"0123456789!@#0^&*();:<>,. []{}">>) end - test "decode64 empty" do + test "encode64/1 with one pad and ignoring padding" do + assert "SGVsbG8gV29ybGQ" == encode64("Hello World", padding: false) + end + + test "encode64/1 with two pads and ignoring padding" do + assert "QWxhZGRpbjpvcGVuIHNlc2FtZQ" == encode64("Aladdin:open sesame", padding: false) + end + + test "encode64/1 with no pads and ignoring padding" do + assert "QWxhZGRpbjpvcGVuIHNlc2Ft" == encode64("Aladdin:open sesam", padding: false) + end + + test "decode64/1 can deal with empty strings" do assert {:ok, ""} == decode64("") end - test "decode64! empty" do + test "decode64!/1 can deal with empty strings" do assert "" == decode64!("") end - test "decode64 two pads" do + test "decode64/1 with two pads" do assert {:ok, "Aladdin:open sesame"} == decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ==") end - test "decode64! two pads" do + test "decode64!/1 with two pads" do assert "Aladdin:open sesame" == decode64!("QWxhZGRpbjpvcGVuIHNlc2FtZQ==") end - test "decode64 one pad" do + test "decode64/1 with one pad" do assert {:ok, "Hello World"} == decode64("SGVsbG8gV29ybGQ=") end - test "decode64! one pad" do + test "decode64!/1 with one pad" do assert "Hello World" == decode64!("SGVsbG8gV29ybGQ=") end - test "decode64 no pad" do + test "decode64/1 with no pad" do assert {:ok, "Aladdin:open sesam"} == decode64("QWxhZGRpbjpvcGVuIHNlc2Ft") end - test "decode64! no pad" do + test "decode64!/1 with no pad" do assert "Aladdin:open sesam" == decode64!("QWxhZGRpbjpvcGVuIHNlc2Ft") end - test "decode64 non-alphabet digit" do + test "decode64/1 errors on non-alphabet digit" do assert :error == decode64("Zm9)") end - test "decode64! non-alphabet digit" do - assert_raise ArgumentError, "non-alphabet digit found: )", fn -> + test "decode64!/1 errors on non-alphabet digit" do + assert_raise ArgumentError, "non-alphabet digit found: \")\" (byte 41)", fn -> decode64!("Zm9)") end end - test "decode64 incorrect padding" do + test "decode64/1 errors on whitespace unless there's ignore: :whitespace" do + assert :error == decode64("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t") + assert {:ok, "Aladdin:open sesam"} == decode64("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t", ignore: :whitespace) + end + + test "decode64!/1 errors on whitespace unless there's ignore: :whitespace" do + assert_raise ArgumentError, "non-alphabet digit found: \"\\n\" (byte 10)", fn -> + decode64!("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t") + end + assert "Aladdin:open sesam" == decode64!("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t", ignore: :whitespace) + end + + test "decode64/1 errors on incorrect padding" do assert :error == decode64("SGVsbG8gV29ybGQ") end - test "decode64! incorrect padding" do + test "decode64!/1 errors on incorrect padding" do assert_raise ArgumentError, "incorrect padding", fn -> decode64!("SGVsbG8gV29ybGQ") end end - test "url_encode64 empty" do + test "decode64/2 with two pads and ignoring padding" do + assert {:ok, "Aladdin:open sesame"} == decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ", padding: false) + end + + test "decode64!/2 with two pads and ignoring padding" do + assert "Aladdin:open sesame" == decode64!("QWxhZGRpbjpvcGVuIHNlc2FtZQ", padding: false) + end + + test "decode64/2 with one pad and ignoring padding" do + assert {:ok, "Hello World"} == decode64("SGVsbG8gV29ybGQ", padding: false) + end + + test "decode64!/2 with one pad and ignoring padding" do + assert "Hello World" == decode64!("SGVsbG8gV29ybGQ", padding: false) + end + + test "decode64/2 with no pad and ignoring padding" do + assert {:ok, "Aladdin:open sesam"} == decode64("QWxhZGRpbjpvcGVuIHNlc2Ft", padding: false) + end + + test "decode64!/2 with no pad and ignoring padding" do + assert "Aladdin:open sesam" == decode64!("QWxhZGRpbjpvcGVuIHNlc2Ft", padding: false) + end + + test "decode64/2 with incorrect padding and ignoring padding" do + assert {:ok, "Hello World"} == decode64("SGVsbG8gV29ybGQ", padding: false) + end + + test "decode64!/2 with incorrect padding and ignoring padding" do + assert "Hello World" == decode64!("SGVsbG8gV29ybGQ", padding: false) + end + + test "url_encode64/1 can deal with empty strings" do assert "" == url_encode64("") end - test "url_encode64 two pads" do + test "url_encode64/1 with two pads" do assert "QWxhZGRpbjpvcGVuIHNlc2FtZQ==" == url_encode64("Aladdin:open sesame") end - test "url_encode64 one pad" do + test "url_encode64/1 with one pad" do assert "SGVsbG8gV29ybGQ=" == url_encode64("Hello World") end - test "url_encode64 no pad" do + test "url_encode64/1 with no pad" do assert "QWxhZGRpbjpvcGVuIHNlc2Ft" == url_encode64("Aladdin:open sesam") assert "MDEyMzQ1Njc4OSFAIzBeJiooKTs6PD4sLiBbXXt9" == url_encode64(<<"0123456789!@#0^&*();:<>,. []{}">>) end - test "url_encode64 no URL unsafe characters" do - refute "/3/+/A==" == url_encode64(<<255,127,254,252>>) - assert "_3_-_A==" == url_encode64(<<255,127,254,252>>) + test "url_encode64/2 with two pads and ignoring padding" do + assert "QWxhZGRpbjpvcGVuIHNlc2FtZQ" == url_encode64("Aladdin:open sesame", padding: false) end - test "url_decode64 empty" do + test "url_encode64/2 with one pad and ignoring padding" do + assert "SGVsbG8gV29ybGQ" == url_encode64("Hello World", padding: false) + end + + test "url_encode64/2 with no pad and ignoring padding" do + assert "QWxhZGRpbjpvcGVuIHNlc2Ft" == url_encode64("Aladdin:open sesam", padding: false) + end + + test "url_encode64/1 doesn't produce URL-unsafe characters" do + refute "/3/+/A==" == url_encode64(<<255, 127, 254, 252>>) + assert "_3_-_A==" == url_encode64(<<255, 127, 254, 252>>) + end + + test "url_decode64/1 can deal with empty strings" do assert {:ok, ""} == url_decode64("") end - test "url_decode64! empty" do + test "url_decode64!/1 can deal with empty strings" do assert "" == url_decode64!("") end - test "url_decode64 two pads" do + test "url_decode64/1 with two pads" do assert {:ok, "Aladdin:open sesame"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ==") end - test "url_decode64! two pads" do + test "url_decode64!/1 with two pads" do assert "Aladdin:open sesame" == url_decode64!("QWxhZGRpbjpvcGVuIHNlc2FtZQ==") end - test "url_decode64 one pad" do + test "url_decode64/1 with one pad" do assert {:ok, "Hello World"} == url_decode64("SGVsbG8gV29ybGQ=") end - test "url_decode64! one pad" do + test "url_decode64!/1 with one pad" do assert "Hello World" == url_decode64!("SGVsbG8gV29ybGQ=") end - test "url_decode64 no pad" do + test "url_decode64/1 with no pad" do assert {:ok, "Aladdin:open sesam"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2Ft") end - test "url_decode64! no pad" do + test "url_decode64!/1 with no pad" do assert "Aladdin:open sesam" == url_decode64!("QWxhZGRpbjpvcGVuIHNlc2Ft") end - test "url_decode64 non-alphabet digit" do + test "url_decode64/1,2 error on whitespace unless there's ignore: :whitespace" do + assert :error == url_decode64("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t") + assert {:ok, "Aladdin:open sesam"} == url_decode64("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t", ignore: :whitespace) + end + + test "url_decode64!/1,2 error on whitespace unless there's ignore: :whitespace" do + assert_raise ArgumentError, "non-alphabet digit found: \"\\n\" (byte 10)", fn -> + url_decode64!("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t") + end + assert "Aladdin:open sesam" == url_decode64!("\nQWxhZGRp bjpvcGVu\sIHNlc2Ft\t", ignore: :whitespace) + end + + test "url_decode64/1 errors on non-alphabet digit" do assert :error == url_decode64("Zm9)") end - test "url_decode64! non-alphabet digit" do - assert_raise ArgumentError, "non-alphabet digit found: )", fn -> + test "url_decode64!/1 errors on non-alphabet digit" do + assert_raise ArgumentError, "non-alphabet digit found: \")\" (byte 41)", fn -> url_decode64!("Zm9)") end end - test "url_decode64 incorrect padding" do + test "url_decode64/1 errors on incorrect padding" do assert :error == url_decode64("SGVsbG8gV29ybGQ") end - test "url_decode64! incorrect padding" do + test "url_decode64!/1 errors on incorrect padding" do assert_raise ArgumentError, "incorrect padding", fn -> url_decode64!("SGVsbG8gV29ybGQ") end end - test "encode32 empty" do + test "url_decode64/2 with two pads and ignoring padding" do + assert {:ok, "Aladdin:open sesame"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ", padding: false) + end + + test "url_decode64!/2 with two pads and ignoring padding" do + assert "Aladdin:open sesame" == url_decode64!("QWxhZGRpbjpvcGVuIHNlc2FtZQ", padding: false) + end + + test "url_decode64/2 with one pad and ignoring padding" do + assert {:ok, "Hello World"} == url_decode64("SGVsbG8gV29ybGQ", padding: false) + end + + test "url_decode64!/2 with one pad and ignoring padding" do + assert "Hello World" == url_decode64!("SGVsbG8gV29ybGQ", padding: false) + end + + test "url_decode64/2 with no pad and ignoring padding" do + assert {:ok, "Aladdin:open sesam"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2Ft", padding: false) + end + + test "url_decode64!/2 with no pad and ignoring padding" do + assert "Aladdin:open sesam" == url_decode64!("QWxhZGRpbjpvcGVuIHNlc2Ft", padding: false) + end + + test "url_decode64/2 ignores incorrect padding when :padding is false" do + assert {:ok, "Hello World"} == url_decode64("SGVsbG8gV29ybGQ", padding: false) + end + + test "url_decode64!/2 ignores incorrect padding when :padding is false" do + assert "Hello World" == url_decode64!("SGVsbG8gV29ybGQ", padding: false) + end + + test "encode32/1 can deal with empty strings" do assert "" == encode32("") end - test "encode32 one pad" do + test "encode32/1 with one pad" do assert "MZXW6YQ=" == encode32("foob") end - test "encode32 three pads" do + test "encode32/1 with three pads" do assert "MZXW6===" == encode32("foo") end - test "encode32 four pads" do + test "encode32/1 with four pads" do assert "MZXQ====" == encode32("fo") end - test "encode32 six pads" do + test "encode32/1 with six pads" do assert "MZXW6YTBOI======" == encode32("foobar") assert "MY======" == encode32("f") end - test "encode32 no pads" do + test "encode32/1 with no pads" do assert "MZXW6YTB" == encode32("fooba") end - test "encode32 lowercase" do + test "encode32/2 with one pad and ignoring padding" do + assert "MZXW6YQ" == encode32("foob", padding: false) + end + + test "encode32/2 with three pads and ignoring padding" do + assert "MZXW6" == encode32("foo", padding: false) + end + + test "encode32/2 with four pads and ignoring padding" do + assert "MZXQ" == encode32("fo", padding: false) + end + + test "encode32/2 with six pads and ignoring padding" do + assert "MZXW6YTBOI" == encode32("foobar", padding: false) + end + + test "encode32/2 with no pads and ignoring padding" do + assert "MZXW6YTB" == encode32("fooba", padding: false) + end + + test "encode32/2 with lowercase" do assert "mzxw6ytb" == encode32("fooba", case: :lower) end - test "decode32 empty" do + test "decode32/1 can deal with empty strings" do assert {:ok, ""} == decode32("") end - test "decode32! empty" do + test "decode32!/2 can deal with empty strings" do assert "" == decode32!("") end - test "decode32 one pad" do + test "decode32/1 with one pad" do assert {:ok, "foob"} == decode32("MZXW6YQ=") end - test "decode32! one pad" do + test "decode32!/1 with one pad" do assert "foob" == decode32!("MZXW6YQ=") end - test "decode32 three pads" do + test "decode32/1 with three pads" do assert {:ok, "foo"} == decode32("MZXW6===") end - test "decode32! three pads" do + test "decode32!/1 with three pads" do assert "foo" == decode32!("MZXW6===") end - test "decode32 four pads" do + test "decode32/1 with four pads" do assert {:ok, "fo"} == decode32("MZXQ====") end - test "decode32! four pads" do + test "decode32!/1 with four pads" do assert "fo" == decode32!("MZXQ====") end - test "decode32 lowercase" do + test "decode32/2 with lowercase" do assert {:ok, "fo"} == decode32("mzxq====", case: :lower) end - test "decode32! lowercase" do + test "decode32!/2 with lowercase" do assert "fo" == decode32!("mzxq====", case: :lower) end - test "decode32 mixed case" do + test "decode32/2 with mixed case" do assert {:ok, "fo"} == decode32("mZXq====", case: :mixed) end - test "decode32! mixed case" do + test "decode32!/2 with mixed case" do assert "fo" == decode32!("mZXq====", case: :mixed) end - test "decode32 six pads" do + test "decode32/1 with six pads" do assert {:ok, "foobar"} == decode32("MZXW6YTBOI======") assert {:ok, "f"} == decode32("MY======") end - test "decode32! six pads" do + test "decode32!/1 with six pads" do assert "foobar" == decode32!("MZXW6YTBOI======") assert "f" == decode32!("MY======") end - test "decode32 no pads" do + test "decode32/1 with no pads" do assert {:ok, "fooba"} == decode32("MZXW6YTB") end - test "decode32! no pads" do + test "decode32!/1 with no pads" do assert "fooba" == decode32!("MZXW6YTB") end - test "decode32 non-alphabet digit" do + test "decode32/1,2 error on non-alphabet digit" do assert :error == decode32("MZX)6YTB") assert :error == decode32("66ff") assert :error == decode32("66FF", case: :lower) end - test "decode32! non-alphabet digit" do - assert_raise ArgumentError, "non-alphabet digit found: )", fn -> + test "decode32!/1,2 error on non-alphabet digit" do + assert_raise ArgumentError, "non-alphabet digit found: \")\" (byte 41)", fn -> decode32!("MZX)6YTB") end - assert_raise ArgumentError, "non-alphabet digit found: m", fn -> + assert_raise ArgumentError, "non-alphabet digit found: \"m\" (byte 109)", fn -> decode32!("mzxw6ytboi======") end - assert_raise ArgumentError, "non-alphabet digit found: M", fn -> + assert_raise ArgumentError, "non-alphabet digit found: \"M\" (byte 77)", fn -> decode32!("MZXW6YTBOI======", case: :lower) end end - test "decode32 incorrect padding" do + test "decode32/1 errors on incorrect padding" do assert :error == decode32("MZXW6YQ") end - test "decode32! incorrect padding" do + test "decode32!/1 errors on incorrect padding" do assert_raise ArgumentError, "incorrect padding", fn -> decode32!("MZXW6YQ") end end - test "hex_encode32 empty" do + test "decode32/2 with one pad and :pading to false" do + assert {:ok, "foob"} == decode32("MZXW6YQ", padding: false) + end + + test "decode32!/2 with one pad and :pading to false" do + assert "foob" == decode32!("MZXW6YQ", padding: false) + end + + test "decode32/2 with three pads and ignoring padding" do + assert {:ok, "foo"} == decode32("MZXW6", padding: false) + end + + test "decode32!/2 with three pads and ignoring padding" do + assert "foo" == decode32!("MZXW6", padding: false) + end + + test "decode32/2 with four pads and ignoring padding" do + assert {:ok, "fo"} == decode32("MZXQ", padding: false) + end + + test "decode32!/2 with four pads and ignoring padding" do + assert "fo" == decode32!("MZXQ", padding: false) + end + + test "decode32/2 with :lower case and ignoring padding" do + assert {:ok, "fo"} == decode32("mzxq", case: :lower, padding: false) + end + + test "decode32!/2 with :lower case and ignoring padding" do + assert "fo" == decode32!("mzxq", case: :lower, padding: false) + end + + test "decode32/2 with :mixed case and ignoring padding" do + assert {:ok, "fo"} == decode32("mZXq", case: :mixed, padding: false) + end + + test "decode32!/2 with :mixed case and ignoring padding" do + assert "fo" == decode32!("mZXq", case: :mixed, padding: false) + end + + test "decode32/2 with six pads and ignoring padding" do + assert {:ok, "foobar"} == decode32("MZXW6YTBOI", padding: false) + end + + test "decode32!/2 with six pads and ignoring padding" do + assert "foobar" == decode32!("MZXW6YTBOI", padding: false) + end + + test "decode32/2 with no pads and ignoring padding" do + assert {:ok, "fooba"} == decode32("MZXW6YTB", padding: false) + end + + test "decode32!/2 with no pads and ignoring padding" do + assert "fooba" == decode32!("MZXW6YTB", padding: false) + end + + test "decode32/2 ignores incorrect padding when :padding is false" do + assert {:ok, "foob"} == decode32("MZXW6YQ", padding: false) + end + + test "decode32!/2 ignores incorrect padding when :padding is false" do + "foob" = decode32!("MZXW6YQ", padding: false) + end + + test "hex_encode32/1 can deal with empty strings" do assert "" == hex_encode32("") end - test "hex_encode32 one pad" do + test "hex_encode32/1 with one pad" do assert "CPNMUOG=" == hex_encode32("foob") end - test "hex_encode32 three pads" do + test "hex_encode32/1 with three pads" do assert "CPNMU===" == hex_encode32("foo") end - test "hex_encode32 four pads" do + test "hex_encode32/1 with four pads" do assert "CPNG====" == hex_encode32("fo") end - test "hex_encode32 six pads" do + test "hex_encode32/1 with six pads" do assert "CPNMUOJ1E8======" == hex_encode32("foobar") assert "CO======" == hex_encode32("f") end - test "hex_encode32 no pads" do + test "hex_encode32/1 with no pads" do assert "CPNMUOJ1" == hex_encode32("fooba") end - test "hex_encode32 lowercase" do + test "hex_encode32/2 with one pad and ignoring padding" do + assert "CPNMUOG" == hex_encode32("foob", padding: false) + end + + test "hex_encode32/2 with three pads and ignoring padding" do + assert "CPNMU" == hex_encode32("foo", padding: false) + end + + test "hex_encode32/2 with four pads and ignoring padding" do + assert "CPNG" == hex_encode32("fo", padding: false) + end + + test "hex_encode32/2 with six pads and ignoring padding" do + assert "CPNMUOJ1E8" == hex_encode32("foobar", padding: false) + end + + test "hex_encode32/2 with no pads and ignoring padding" do + assert "CPNMUOJ1" == hex_encode32("fooba", padding: false) + end + + test "hex_encode32/2 with lowercase" do assert "cpnmuoj1" == hex_encode32("fooba", case: :lower) end - test "hex_decode32 empty" do + test "hex_decode32/1 can deal with empty strings" do assert {:ok, ""} == hex_decode32("") end - test "hex_decode32! empty" do + test "hex_decode32!/1 can deal with empty strings" do assert "" == hex_decode32!("") end - test "hex_decode32 one pad" do + test "hex_decode32/1 with one pad" do assert {:ok, "foob"} == hex_decode32("CPNMUOG=") end - test "hex_decode32! one pad" do + test "hex_decode32!/1 with one pad" do assert "foob" == hex_decode32!("CPNMUOG=") end - test "hex_decode32 three pads" do + test "hex_decode32/1 with three pads" do assert {:ok, "foo"} == hex_decode32("CPNMU===") end - test "hex_decode32! three pads" do + test "hex_decode32!/1 with three pads" do assert "foo" == hex_decode32!("CPNMU===") end - test "hex_decode32 four pads" do + test "hex_decode32/1 with four pads" do assert {:ok, "fo"} == hex_decode32("CPNG====") end - test "hex_decode32! four pads" do + test "hex_decode32!/1 with four pads" do assert "fo" == hex_decode32!("CPNG====") end - test "hex_decode32 six pads" do + test "hex_decode32/1 with six pads" do assert {:ok, "foobar"} == hex_decode32("CPNMUOJ1E8======") assert {:ok, "f"} == hex_decode32("CO======") end - test "hex_decode32! six pads" do + test "hex_decode32!/1 with six pads" do assert "foobar" == hex_decode32!("CPNMUOJ1E8======") assert "f" == hex_decode32!("CO======") end - test "hex_decode32 no pads" do + test "hex_decode32/1 with no pads" do assert {:ok, "fooba"} == hex_decode32("CPNMUOJ1") end - test "hex_decode32! no pads" do + test "hex_decode32!/1 with no pads" do assert "fooba" == hex_decode32!("CPNMUOJ1") end - test "hex_decode32 non-alphabet digit" do + test "hex_decode32/1,2 error on non-alphabet digit" do assert :error == hex_decode32("CPN)UOJ1") assert :error == hex_decode32("66f") assert :error == hex_decode32("66F", case: :lower) end - test "hex_decode32! non-alphabet digit" do - assert_raise ArgumentError, "non-alphabet digit found: )", fn -> + test "hex_decode32!/1,2 error non-alphabet digit" do + assert_raise ArgumentError, "non-alphabet digit found: \")\" (byte 41)", fn -> hex_decode32!("CPN)UOJ1") end - assert_raise ArgumentError, "non-alphabet digit found: c", fn -> + assert_raise ArgumentError, "non-alphabet digit found: \"c\" (byte 99)", fn -> hex_decode32!("cpnmuoj1e8======") end - assert_raise ArgumentError, "non-alphabet digit found: C", fn -> + assert_raise ArgumentError, "non-alphabet digit found: \"C\" (byte 67)", fn -> hex_decode32!("CPNMUOJ1E8======", case: :lower) end end - test "hex_decode32 incorrect padding" do + test "hex_decode32/1 errors on incorrect padding" do assert :error == hex_decode32("CPNMUOG") end - test "hex_decode32! incorrect padding" do + test "hex_decode32!/1 errors on incorrect padding" do assert_raise ArgumentError, "incorrect padding", fn -> hex_decode32!("CPNMUOG") end end - test "hex_decode32 lowercase" do + test "hex_decode32/2 with lowercase" do assert {:ok, "fo"} == hex_decode32("cpng====", case: :lower) end - test "hex_decode32! lowercase" do + test "hex_decode32!/2 with lowercase" do assert "fo" == hex_decode32!("cpng====", case: :lower) end - test "hex_decode32 mixed case" do + test "hex_decode32/2 with mixed case" do assert {:ok, "fo"} == hex_decode32("cPNg====", case: :mixed) end - test "hex_decode32! mixed case" do + test "hex_decode32!/2 with mixed case" do assert "fo" == hex_decode32!("cPNg====", case: :mixed) end + + test "decode16!/1 errors on non-UTF-8 char" do + assert_raise ArgumentError, "non-alphabet digit found: \"\\0\" (byte 0)", fn -> + decode16!("012" <> <<0>>) + end + end + + test "hex_decode32/2 with one pad and ignoring padding" do + assert {:ok, "foob"} == hex_decode32("CPNMUOG", padding: false) + end + + test "hex_decode32!/2 with one pad and ignoring padding" do + assert "foob" == hex_decode32!("CPNMUOG", padding: false) + end + + test "hex_decode32/2 with three pads and ignoring padding" do + assert {:ok, "foo"} == hex_decode32("CPNMU", padding: false) + end + + test "hex_decode32!/2 with three pads and ignoring padding" do + assert "foo" == hex_decode32!("CPNMU", padding: false) + end + + test "hex_decode32/2 with four pads and ignoring padding" do + assert {:ok, "fo"} == hex_decode32("CPNG", padding: false) + end + + test "hex_decode32!/2 with four pads and ignoring padding" do + assert "fo" == hex_decode32!("CPNG", padding: false) + end + + test "hex_decode32/2 with six pads and ignoring padding" do + assert {:ok, "foobar"} == hex_decode32("CPNMUOJ1E8", padding: false) + end + + test "hex_decode32!/2 with six pads and ignoring padding" do + assert "foobar" == hex_decode32!("CPNMUOJ1E8", padding: false) + end + + test "hex_decode32/2 with no pads and ignoring padding" do + assert {:ok, "fooba"} == hex_decode32("CPNMUOJ1", padding: false) + end + + test "hex_decode32!/2 with no pads and ignoring padding" do + assert "fooba" == hex_decode32!("CPNMUOJ1", padding: false) + end + + test "hex_decode32/2 ignores incorrect padding when :padding is false" do + assert {:ok, "foob"} == hex_decode32("CPNMUOG", padding: false) + end + + test "hex_decode32!/2 ignores incorrect padding when :padding is false" do + "foob" = hex_decode32!("CPNMUOG", padding: false) + end + + test "hex_decode32/2 with :lower case and ignoring padding" do + assert {:ok, "fo"} == hex_decode32("cpng", case: :lower, padding: false) + end + + test "hex_decode32!/2 with :lower case and ignoring padding" do + assert "fo" == hex_decode32!("cpng", case: :lower, padding: false) + end + + test "hex_decode32/2 with :mixed case and ignoring padding" do + assert {:ok, "fo"} == hex_decode32("cPNg====", case: :mixed, padding: false) + end + + test "hex_decode32!/2 with :mixed case and ignoring padding" do + assert "fo" == hex_decode32!("cPNg", case: :mixed, padding: false) + end end diff --git a/lib/elixir/test/elixir/behaviour_test.exs b/lib/elixir/test/elixir/behaviour_test.exs deleted file mode 100644 index 3ff575302d5..00000000000 --- a/lib/elixir/test/elixir/behaviour_test.exs +++ /dev/null @@ -1,64 +0,0 @@ -Code.require_file "test_helper.exs", __DIR__ - -defmodule BehaviourTest do - use ExUnit.Case, async: true - - defmodule Sample do - use Behaviour - - @doc "I should be first." - defcallback first(integer) :: integer - - @doc "Foo" - defcallback foo(atom(), binary) :: binary - - @doc "Bar" - defcallback bar(External.hello, my_var :: binary) :: binary - - defcallback guarded(my_var) :: my_var when my_var: binary - - defcallback orr(atom | integer) :: atom - - defcallback literal(123, {atom}, :atom, [integer], true) :: atom - - @doc "I should be last." - defmacrocallback last(integer) :: Macro.t - end - - test :docs do - docs = Sample.__behaviour__(:docs) - assert [ - {{:first, 1}, 10, :def, "I should be first."}, - {{:foo, 2}, 13, :def, "Foo"}, - {{:bar, 2}, 16, :def, "Bar"}, - {{:guarded, 1}, 18, :def, nil}, - {{:orr, 1}, 20, :def, nil}, - {{:literal, 5}, 22, :def, nil}, - {{:last, 1}, 25, :defmacro, "I should be last."} - ] = docs - end - - test :callbacks do - assert Sample.__behaviour__(:callbacks) == [first: 1, guarded: 1, "MACRO-last": 2, literal: 5, orr: 1, foo: 2, bar: 2] - end - - test :specs do - assert length(Keyword.get_values(Sample.module_info[:attributes], :callback)) == 7 - end - - test :default_is_not_supported do - assert_raise ArgumentError, fn -> - defmodule WithDefault do - use Behaviour - defcallback hello(num \\ 0 :: integer) :: integer - end - end - - assert_raise ArgumentError, fn -> - defmodule WithDefault do - use Behaviour - defcallback hello(num :: integer \\ 0) :: integer - end - end - end -end diff --git a/lib/elixir/test/elixir/bitwise_test.exs b/lib/elixir/test/elixir/bitwise_test.exs index 17872979d26..7b0b38edf05 100644 --- a/lib/elixir/test/elixir/bitwise_test.exs +++ b/lib/elixir/test/elixir/bitwise_test.exs @@ -2,29 +2,32 @@ Code.require_file "test_helper.exs", __DIR__ defmodule Bitwise.FunctionsTest do use ExUnit.Case, async: true + + doctest Bitwise, import: true + use Bitwise, skip_operators: true - test :bnot do + test "bnot/1" do assert bnot(1) == -2 end - test :band do + test "band/2" do assert band(1, 1) == 1 end - test :bor do + test "bor/2" do assert bor(0, 1) == 1 end - test :bxor do + test "bxor/2" do assert bxor(1, 1) == 0 end - test :bsl do + test "bsl/2" do assert bsl(1, 1) == 2 end - test :bsr do + test "bsr/2" do assert bsr(1, 1) == 0 end end @@ -33,27 +36,27 @@ defmodule Bitwise.OperatorsTest do use ExUnit.Case, async: true use Bitwise, only_operators: true - test :bnot do + test "bnot (~~~)" do assert ~~~1 == -2 end - test :band do + test "band (&&&)" do assert (1 &&& 1) == 1 end - test :bor do + test "bor (|||)" do assert (0 ||| 1) == 1 end - test :bxor do + test "bxor (^^^)" do assert 1 ^^^ 1 == 0 end - test :bsl do + test "bsl (<<<)" do assert (1 <<< 1) == 2 end - test :bsr do + test "bsr (>>>)" do assert (1 >>> 1) == 0 end end diff --git a/lib/elixir/test/elixir/calendar/iso_test.exs b/lib/elixir/test/elixir/calendar/iso_test.exs new file mode 100644 index 00000000000..e2b6c0b9fd8 --- /dev/null +++ b/lib/elixir/test/elixir/calendar/iso_test.exs @@ -0,0 +1,6 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule Calendar.ISOTest do + use ExUnit.Case, async: true + doctest Calendar.ISO +end diff --git a/lib/elixir/test/elixir/calendar_test.exs b/lib/elixir/test/elixir/calendar_test.exs new file mode 100644 index 00000000000..e773d987f7c --- /dev/null +++ b/lib/elixir/test/elixir/calendar_test.exs @@ -0,0 +1,255 @@ +Code.require_file "test_helper.exs", __DIR__ +Code.require_file "fixtures/calendar/julian.exs", __DIR__ + +defmodule FakeCalendar do + def date_to_string(_, _, _), do: "boom" + def time_to_string(_, _, _, _), do: "boom" + def naive_datetime_to_string(_, _, _, _, _, _, _), do: "boom" + def datetime_to_string(_, _, _, _, _, _, _, _, _, _), do: "boom" + def day_rollover_relative_to_midnight_utc, do: {123456, 123457} +end + +defmodule DateTest do + use ExUnit.Case, async: true + doctest Date + + test "to_string/1" do + assert to_string(~D[2000-01-01]) == "2000-01-01" + + date = %{~D[2000-01-01] | calendar: FakeCalendar} + assert to_string(date) == "boom" + end + + test "inspect/1" do + assert inspect(~D[2000-01-01]) == "~D[2000-01-01]" + + date = %{~D[2000-01-01] | calendar: FakeCalendar} + assert inspect(date) == "%Date{calendar: FakeCalendar, day: 1, month: 1, year: 2000}" + end + + test "compare/2" do + date1 = ~D[2000-01-01] + date2 = ~D[2000-01-02] + assert Date.compare(date1, date1) == :eq + assert Date.compare(date1, date2) == :lt + assert Date.compare(date2, date1) == :gt + end + + test "compare/2 across calendars" do + date1 = ~D[2000-01-01] + date2 = Calendar.Julian.date(2000, 01, 01) + assert Date.compare(date1, date2) == :lt + assert Date.compare(date2, date1) == :gt + end + + test "day_of_week/1" do + assert Date.day_of_week(~D[2016-10-31]) == 1 + assert Date.day_of_week(~D[2016-11-01]) == 2 + assert Date.day_of_week(~D[2016-11-02]) == 3 + assert Date.day_of_week(~D[2016-11-03]) == 4 + assert Date.day_of_week(~D[2016-11-04]) == 5 + assert Date.day_of_week(~D[2016-11-05]) == 6 + assert Date.day_of_week(~D[2016-11-06]) == 7 + end + + test "convert/2" do + assert Date.convert(~D[2000-01-01], Calendar.Julian) == + {:ok, Calendar.Julian.date(1999, 12, 19)} + assert (~D[2000-01-01] |> Date.convert!(Calendar.Julian) |> Date.convert!(Calendar.ISO)) == + ~D[2000-01-01] + assert Date.convert(~D[2016-02-03], FakeCalendar) == + {:error, :incompatible_calendars} + end + + test "diff/2" do + assert Date.diff(~D[2000-01-31], ~D[2000-01-01]) == 30 + assert Date.diff(~D[2000-01-01], ~D[2000-01-31]) == -30 + + date1 = ~D[2000-01-01] + date2 = Calendar.Julian.date(2000, 01, 01) + assert Date.diff(date1, date2) == -13 + assert Date.diff(date2, date1) == 13 + end +end + +defmodule TimeTest do + use ExUnit.Case, async: true + doctest Time + + test "to_string/1" do + assert to_string(~T[23:00:07.005]) == "23:00:07.005" + end + + test "inspect/1" do + assert inspect(~T[23:00:07.005]) == "~T[23:00:07.005]" + end + + test "compare/2" do + time0 = ~T[01:01:01.0] + time1 = ~T[01:01:01.005] + time2 = ~T[01:01:01.0050] + time3 = ~T[23:01:01.0050] + assert Time.compare(time0, time1) == :lt + assert Time.compare(time1, time1) == :eq + assert Time.compare(time1, time2) == :eq + assert Time.compare(time1, time3) == :lt + assert Time.compare(time3, time2) == :gt + end +end + +defmodule NaiveDateTimeTest do + use ExUnit.Case, async: true + doctest NaiveDateTime + + test "to_string/1" do + assert to_string(~N[2000-01-01 23:00:07.005]) == "2000-01-01 23:00:07.005" + + ndt = %{~N[2000-01-01 23:00:07.005] | calendar: FakeCalendar} + assert to_string(ndt) == "boom" + end + + test "inspect/1" do + assert inspect(~N[2000-01-01 23:00:07.005]) == "~N[2000-01-01 23:00:07.005]" + + ndt = %{~N[2000-01-01 23:00:07.005] | calendar: FakeCalendar} + assert inspect(ndt) == "%NaiveDateTime{calendar: FakeCalendar, day: 1, hour: 23, " <> + "microsecond: {5000, 3}, minute: 0, month: 1, second: 7, year: 2000}" + end + + test "compare/2" do + ndt1 = ~N[2000-04-16 13:30:15.0049] + ndt2 = ~N[2000-04-16 13:30:15.0050] + ndt3 = ~N[2001-04-16 13:30:15.0050] + assert NaiveDateTime.compare(ndt1, ndt1) == :eq + assert NaiveDateTime.compare(ndt1, ndt2) == :lt + assert NaiveDateTime.compare(ndt2, ndt1) == :gt + assert NaiveDateTime.compare(ndt3, ndt1) == :gt + assert NaiveDateTime.compare(ndt3, ndt2) == :gt + end + + test "to_iso8601/1" do + ndt = ~N[2000-04-16 12:34:15.1234] + ndt = put_in ndt.calendar, FakeCalendar + + message = + "cannot convert #{inspect(ndt)} to target calendar Calendar.ISO, " <> + "reason: #{inspect(ndt.calendar)} and Calendar.ISO have different day rollover moments, " <> + "making this conversion ambiguous" + + assert_raise ArgumentError, message, fn -> + NaiveDateTime.to_iso8601(ndt) + end + end + + test "add/2 with other calendars" do + assert ~N[2000-01-01 12:34:15.123456] + |> NaiveDateTime.convert!(Calendar.Julian) + |> NaiveDateTime.add(10, :second) == + %NaiveDateTime{calendar: Calendar.Julian, year: 1999, month: 12, day: 19, + hour: 12, minute: 34, second: 25, microsecond: {123456, 6}} + end + + test "diff/2 with other calendars" do + assert ~N[2000-01-01 12:34:15.123456] + |> NaiveDateTime.convert!(Calendar.Julian) + |> NaiveDateTime.add(10, :second) + |> NaiveDateTime.diff(~N[2000-01-01 12:34:15.123456]) == 10 + end + + test "convert/2" do + assert NaiveDateTime.convert(~N[2000-01-01 12:34:15.1234], Calendar.Julian) == + {:ok, Calendar.Julian.naive_datetime(1999, 12, 19, 12, 34, 15, 123400)} + assert ~N[2000-01-01 12:34:15.123456] + |> NaiveDateTime.convert!(Calendar.Julian) + |> NaiveDateTime.convert!(Calendar.ISO) == + ~N[2000-01-01 12:34:15.123456] + assert NaiveDateTime.convert(~N[2016-02-03 00:00:01], FakeCalendar) == + {:error, :incompatible_calendars} + end +end + +defmodule DateTimeTest do + use ExUnit.Case, async: true + doctest DateTime + + test "to_string/1" do + datetime = %DateTime{ + year: 2000, month: 2, day: 29, zone_abbr: "BRM", + hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + utc_offset: -12600, std_offset: 3600, time_zone: "Brazil/Manaus" + } + assert to_string(datetime) == "2000-02-29 23:00:07-02:30 BRM Brazil/Manaus" + end + + test "from_iso8601/1 handles positive and negative offsets" do + assert DateTime.from_iso8601("2015-01-24T09:50:07-10:00") |> elem(1) == + %DateTime{microsecond: {0, 0}, month: 1, std_offset: 0, time_zone: "Etc/UTC", + utc_offset: 0, year: 2015, zone_abbr: "UTC", day: 24, hour: 19, + minute: 50, second: 7} + + assert DateTime.from_iso8601("2015-01-24T09:50:07+10:00") |> elem(1) == + %DateTime{microsecond: {0, 0}, month: 1, std_offset: 0, time_zone: "Etc/UTC", + utc_offset: 0, year: 2015, zone_abbr: "UTC", day: 23, hour: 23, + minute: 50, second: 7} + end + + test "from_unix/2" do + # with Unix times back to 0 Gregorian seconds + min_datetime = %DateTime{ + calendar: Calendar.ISO, day: 1, hour: 0, microsecond: {0, 0}, + minute: 0, month: 1, second: 0, std_offset: 0, time_zone: "Etc/UTC", + utc_offset: 0, year: 0, zone_abbr: "UTC" + } + assert DateTime.from_unix(-62167219200) == {:ok, min_datetime} + assert DateTime.from_unix(-62167219201) == {:error, :invalid_unix_time} + + max_datetime = %DateTime{ + calendar: Calendar.ISO, day: 31, hour: 23, microsecond: {0, 0}, + minute: 59, month: 12, second: 59, std_offset: 0, time_zone: "Etc/UTC", + utc_offset: 0, year: 9999, zone_abbr: "UTC" + } + + assert DateTime.from_unix(253402300799) == {:ok, max_datetime} + assert DateTime.from_unix(253402300800) == {:error, :invalid_unix_time} + end + + test "from_unix!/2" do + # with Unix times back to 0 Gregorian seconds + datetime = %DateTime{ + calendar: Calendar.ISO, day: 1, hour: 0, microsecond: {0, 0}, + minute: 0, month: 1, second: 0, std_offset: 0, time_zone: "Etc/UTC", + utc_offset: 0, year: 0, zone_abbr: "UTC" + } + assert DateTime.from_unix!(-62167219200) == datetime + + assert_raise ArgumentError, fn -> + DateTime.from_unix!(-62167219201) + end + end + + test "to_unix/2 works with Unix times back to 0 Gregorian seconds" do + # with Unix times back to 0 Gregorian seconds + gregorian_0 = %DateTime{calendar: Calendar.ISO, day: 1, hour: 0, microsecond: {0, 0}, + minute: 0, month: 1, second: 0, std_offset: 0, time_zone: "Etc/UTC", + utc_offset: 0, year: 0, zone_abbr: "UTC"} + assert DateTime.to_unix(gregorian_0) == -62167219200 + + before_gregorian_0 = %DateTime{gregorian_0 | year: -1} + assert_raise FunctionClauseError, fn -> + DateTime.to_unix(before_gregorian_0) + end + end + + test "compare/2" do + datetime1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET", + hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"} + datetime2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT", + hour: 23, minute: 0, second: 7, microsecond: {0, 0}, + utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"} + + assert DateTime.compare(datetime1, datetime1) == :eq + assert DateTime.compare(datetime1, datetime2) == :lt + assert DateTime.compare(datetime2, datetime1) == :gt + end +end diff --git a/lib/elixir/test/elixir/code_test.exs b/lib/elixir/test/elixir/code_test.exs index 645f784548c..09273769b48 100644 --- a/lib/elixir/test/elixir/code_test.exs +++ b/lib/elixir/test/elixir/code_test.exs @@ -2,12 +2,14 @@ Code.require_file "test_helper.exs", __DIR__ defmodule CodeTest do use ExUnit.Case, async: true + + doctest Code + import PathHelpers - def one, do: 1 def genmodule(name) do defmodule name do - Kernel.LexicalTracker.remotes(__MODULE__) + Kernel.LexicalTracker.remote_references(__MODULE__) end end @@ -19,56 +21,63 @@ defmodule CodeTest do Code.eval_quoted contents, [], file: "sample.ex", line: 13 - test :eval_string do - assert Code.eval_string("1 + 2") == {3, []} - assert {3, _} = Code.eval_string("a + b", [a: 1, b: 2], Macro.Env.location(__ENV__)) - end + describe "eval_string/1-3" do + test "correctly evaluates a string of code" do + assert Code.eval_string("1 + 2") == {3, []} + assert Code.eval_string("two = 1 + 1") == {2, [two: 2]} + end - test :eval_string_with_other_context do - assert Code.eval_string("var!(a, Sample) = 1") == {1, [{{:a,Sample},1}]} - end + test "supports a %Macro.Env{} struct as the third argument" do + assert {3, _} = Code.eval_string("a + b", [a: 1, b: 2], __ENV__) + end - test :eval_with_unnamed_scopes do - assert {%RuntimeError{}, [a: %RuntimeError{}]} = - Code.eval_string("a = (try do (raise \"hello\") rescue e -> e end)") - end + test "can return bindings from a different context" do + assert Code.eval_string("var!(a, Sample) = 1") == {1, [{{:a, Sample}, 1}]} + end - test :eval_with_scope do - assert Code.eval_string("one", [], delegate_locals_to: __MODULE__) == {1, []} - end + test "supports unnamed scopes" do + assert {%RuntimeError{}, [a: %RuntimeError{}]} = + Code.eval_string("a = (try do (raise \"hello\") rescue e -> e end)") + end - test :eval_options do - assert Code.eval_string("is_atom(:foo) and K.is_list([])", [], - functions: [{Kernel, [is_atom: 1]}], - macros: [{Kernel, [..: 2, and: 2]}], - aliases: [{K, Kernel}], - requires: [Kernel]) == {true, []} - end + test "supports the :requires option" do + assert Code.eval_string("Kernel.if true, do: :ok", [], requires: [Z, Kernel]) == {:ok, []} + end + + test "with many options" do + options = [ + functions: [{Kernel, [is_atom: 1]}], + macros: [{Kernel, [and: 2]}], + aliases: [{K, Kernel}], + requires: [Kernel], + ] + + code = "is_atom(:foo) and K.is_list([])" - test :eval_stacktrace do - try do - Code.eval_string("<>", a: :a, b: :b) - rescue - _ -> - assert System.stacktrace |> Enum.any?(&(elem(&1, 0) == __MODULE__)) + assert Code.eval_string(code, [], options) == {true, []} end - end - test :eval_with_requires do - assert Code.eval_string("Kernel.if true, do: :ok", [], requires: [Z, Kernel]) == {:ok, []} + test "yields the correct stacktrace" do + try do + Code.eval_string("<>", a: :a, b: :b) + rescue + _ -> + assert System.stacktrace |> Enum.any?(&(elem(&1, 0) == __MODULE__)) + end + end end - test :eval_quoted do + test "eval_quoted/1" do assert Code.eval_quoted(quote(do: 1 + 2)) == {3, []} assert CodeTest.Sample.eval_quoted_info() == {CodeTest.Sample, "sample.ex", 13} end - test :eval_quoted_with_env do + test "eval_quoted/2 with a %Macro.Env{} struct as the second argument" do alias :lists, as: MyList - assert Code.eval_quoted(quote(do: MyList.flatten [[1, 2, 3]]), [], __ENV__) == {[1, 2, 3],[]} + assert Code.eval_quoted(quote(do: MyList.flatten [[1, 2, 3]]), [], __ENV__) == {[1, 2, 3], []} end - test :eval_file do + test "eval_file/1" do assert Code.eval_file(fixture_path("code_sample.exs")) == {3, [var: 3]} assert_raise Code.LoadError, fn -> @@ -76,7 +85,7 @@ defmodule CodeTest do end end - test :require do + test "require_file/1" do Code.require_file fixture_path("code_sample.exs") assert fixture_path("code_sample.exs") in Code.loaded_files assert Code.require_file(fixture_path("code_sample.exs")) == nil @@ -86,23 +95,12 @@ defmodule CodeTest do assert Code.require_file(fixture_path("code_sample.exs")) != nil end - test :string_to_quoted do - assert Code.string_to_quoted("1 + 2") == {:ok, {:+, [line: 1], [1, 2]}} - assert Code.string_to_quoted!("1 + 2") == {:+, [line: 1], [1, 2]} - - assert Code.string_to_quoted("a.1") == - {:error, {1, "syntax error before: ", "1"}} - - assert_raise SyntaxError, fn -> - Code.string_to_quoted!("a.1") - end - end - - test :string_to_quoted_existing_atoms_only do - assert :badarg = catch_error(Code.string_to_quoted!(":thereisnosuchatom", existing_atoms_only: true)) + test "string_to_quoted/1" do + assert Code.string_to_quoted("1 + 2") == {:ok, {:+, [line: 1], [1, 2]}} + assert Code.string_to_quoted("a.1") == {:error, {1, "syntax error before: ", "1"}} end - test :string_to_quoted! do + test "string_to_quoted!/1 works as string_to_quoted/1 but raises on errors" do assert Code.string_to_quoted!("1 + 2") == {:+, [line: 1], [1, 2]} assert_raise SyntaxError, fn -> @@ -114,56 +112,67 @@ defmodule CodeTest do end end - test :compile_source do - assert __MODULE__.__info__(:compile)[:source] == String.to_char_list(__ENV__.file) + test "string_to_quoted!/2 raises with the :existing_atoms_only option" do + assert catch_error(Code.string_to_quoted!(":there_is_no_such_atom", existing_atoms_only: true)) == :badarg end - test :compile_info_returned_with_source_accessible_through_keyword_module do - compile = __MODULE__.__info__(:compile) - assert Keyword.get(compile, :source) != nil + test "compile source" do + assert __MODULE__.__info__(:compile)[:source] == String.to_charlist(__ENV__.file) end - test :compile_string_works_accross_lexical_scopes do - assert [{CompileCrossSample, _}] = Code.compile_string("CodeTest.genmodule CompileCrossSample") - after - :code.purge CompileCrossSample - :code.delete CompileCrossSample + test "compile info returned with source accessible through keyword module" do + compile = __MODULE__.__info__(:compile) + assert Keyword.get(compile, :source) != nil end - test :compile_string do - assert [{CompileStringSample, _}] = Code.compile_string("defmodule CompileStringSample, do: :ok") - after - :code.purge CompileSimpleSample - :code.delete CompileSimpleSample - end + describe "compile_string/1" do + test "compiles the given string" do + assert [{CompileStringSample, _}] = Code.compile_string("defmodule CompileStringSample, do: :ok") + after + :code.purge CompileSimpleSample + :code.delete CompileSimpleSample + end - test :compile_quoted do - assert [{CompileQuotedSample, _}] = Code.compile_string("defmodule CompileQuotedSample, do: :ok") - after - :code.purge CompileQuotedSample - :code.delete CompileQuotedSample + test "works across lexical scopes" do + assert [{CompileCrossSample, _}] = Code.compile_string("CodeTest.genmodule CompileCrossSample") + after + :code.purge CompileCrossSample + :code.delete CompileCrossSample + end end - test :ensure_loaded? do + test "ensure_loaded?/1" do assert Code.ensure_loaded?(__MODULE__) - refute Code.ensure_loaded?(Unknown.Module) + refute Code.ensure_loaded?(Code.NoFile) end - test :ensure_compiled? do + test "ensure_compiled?/1" do assert Code.ensure_compiled?(__MODULE__) - refute Code.ensure_compiled?(Unknown.Module) + refute Code.ensure_compiled?(Code.NoFile) + end + + test "compiler_options/1 validates options" do + message = "unknown compiler option: :not_a_valid_option" + assert_raise RuntimeError, message, fn -> + Code.compiler_options(not_a_valid_option: :foo) + end + + message = "compiler option :debug_info should be a boolean, got: :not_a_boolean" + assert_raise RuntimeError, message, fn -> + Code.compiler_options(debug_info: :not_a_boolean) + end end end defmodule Code.SyncTest do use ExUnit.Case - test :path_manipulation do + test "path manipulation" do path = Path.join(__DIR__, "fixtures") Code.prepend_path path - assert to_char_list(path) in :code.get_path + assert to_charlist(path) in :code.get_path Code.delete_path path - refute to_char_list(path) in :code.get_path + refute to_charlist(path) in :code.get_path end end diff --git a/lib/elixir/test/elixir/collectable_test.exs b/lib/elixir/test/elixir/collectable_test.exs new file mode 100644 index 00000000000..4d2908fb4bf --- /dev/null +++ b/lib/elixir/test/elixir/collectable_test.exs @@ -0,0 +1,7 @@ +Code.require_file "test_helper.exs", __DIR__ + +defmodule CollectableTest do + use ExUnit.Case, async: true + + doctest Collectable +end diff --git a/lib/elixir/test/elixir/dict_test.exs b/lib/elixir/test/elixir/dict_test.exs deleted file mode 100644 index 31e15b447c0..00000000000 --- a/lib/elixir/test/elixir/dict_test.exs +++ /dev/null @@ -1,415 +0,0 @@ -Code.require_file "test_helper.exs", __DIR__ - -# A TestDict implementation used only for testing. -defmodule TestDict do - defstruct list: [] - - def new(list \\ []) when is_list(list) do - %TestDict{list: list} - end - - def size(%TestDict{list: list}) do - length(list) - end - - def update(%TestDict{list: list} = map, key, initial, fun) do - %{map | list: update(list, key, initial, fun)} - end - - def update([{key, value}|list], key, _initial, fun) do - [{key, fun.(value)}|list] - end - - def update([{_, _} = e|list], key, initial, fun) do - [e|update(list, key, initial, fun)] - end - - def update([], key, initial, _fun) do - [{key, initial}] - end - - defimpl Enumerable do - def reduce(%{list: list}, acc, fun), do: Enumerable.List.reduce(list, acc, fun) - def member?(%{list: list}, other), do: Enumerable.List.member(list, other) - def count(%{list: list}), do: Enumerable.List.count(list) - end -end - -defmodule DictTest.Common do - defmacro __using__(_) do - quote location: :keep do - import Enum, only: [sort: 1] - - defp new_dict(list \\ [{"first_key", 1}, {"second_key", 2}]) do - Enum.into list, dict_impl.new - end - - defp new_dict(list, transform) do - Enum.into list, dict_impl.new, transform - end - - defp int_dict do - Enum.into [{1,1}], dict_impl.new - end - - test "access" do - dict = new_dict() - assert dict["first_key"] == 1 - assert dict["other_key"] == nil - end - - test "access uses match operation" do - dict = int_dict() - assert dict[1] == 1 - assert dict[1.0] == nil - end - - test "get/2 and get/3" do - dict = new_dict() - assert Dict.get(dict, "first_key") == 1 - assert Dict.get(dict, "second_key") == 2 - assert Dict.get(dict, "other_key") == nil - assert Dict.get(dict, "other_key", 3) == 3 - end - - test "get/2 with match" do - assert Dict.get(int_dict, 1) == 1 - assert Dict.get(int_dict, 1.0) == nil - end - - test "fetch/2" do - dict = new_dict() - assert Dict.fetch(dict, "first_key") == {:ok, 1} - assert Dict.fetch(dict, "second_key") == {:ok, 2} - assert Dict.fetch(dict, "other_key") == :error - end - - test "fetch/2 with match" do - assert Dict.fetch(int_dict, 1) == {:ok, 1} - assert Dict.fetch(int_dict, 1.0) == :error - end - - test "fetch!/2" do - dict = new_dict() - assert Dict.fetch!(dict, "first_key") == 1 - assert Dict.fetch!(dict, "second_key") == 2 - assert_raise KeyError, fn -> - Dict.fetch!(dict, "other_key") - end - end - - test "put/3" do - dict = new_dict() |> Dict.put("first_key", {1}) - assert Dict.get(dict, "first_key") == {1} - assert Dict.get(dict, "second_key") == 2 - end - - test "put/3 with_match" do - dict = int_dict() - assert Dict.get(Dict.put(dict, 1, :other), 1) == :other - assert Dict.get(Dict.put(dict, 1.0, :other), 1) == 1 - assert Dict.get(Dict.put(dict, 1, :other), 1.0) == nil - assert Dict.get(Dict.put(dict, 1.0, :other), 1.0) == :other - end - - test "put_new/3" do - dict = Dict.put_new(new_dict(), "first_key", {1}) - assert Dict.get(dict, "first_key") == 1 - end - - test "put_new/3 with_match" do - assert Dict.get(Dict.put_new(int_dict, 1, :other), 1) == 1 - assert Dict.get(Dict.put_new(int_dict, 1.0, :other), 1) == 1 - assert Dict.get(Dict.put_new(int_dict, 1, :other), 1.0) == nil - assert Dict.get(Dict.put_new(int_dict, 1.0, :other), 1.0) == :other - end - - test "keys/1" do - assert Enum.sort(Dict.keys(new_dict())) == ["first_key", "second_key"] - assert Dict.keys(new_dict([])) == [] - end - - test "values/1" do - assert Enum.sort(Dict.values(new_dict())) == [1, 2] - assert Dict.values(new_dict([])) == [] - end - - test "delete/2" do - dict = Dict.delete(new_dict(), "second_key") - assert Dict.size(dict) == 1 - assert Dict.has_key?(dict, "first_key") - refute Dict.has_key?(dict, "second_key") - - dict = Dict.delete(new_dict(), "other_key") - assert dict == new_dict() - assert Dict.size(dict) == 2 - end - - test "delete/2 with match" do - assert Dict.get(Dict.delete(int_dict, 1), 1) == nil - assert Dict.get(Dict.delete(int_dict, 1.0), 1) == 1 - end - - test "merge/2" do - dict = new_dict() - assert Dict.merge(new_dict([]), dict) == dict - assert Dict.merge(dict, new_dict([])) == dict - assert Dict.merge(dict, dict) == dict - assert Dict.merge(new_dict([]), new_dict([])) == new_dict([]) - - dict1 = new_dict [{"a", 1}, {"b", 2}, {"c", 3}] - dict2 = new_dict [{"a", 3}, {"c", :a}, {"d", 0}] - assert Dict.merge(dict1, dict2) |> Enum.sort == - [{"a", 3}, {"b", 2}, {"c", :a}, {"d", 0}] - end - - test "merge/2 with other dict" do - dict1 = new_dict [{"a", 1}, {"b", 2}, {"c", 3}] - dict2 = TestDict.new [{"a",3}, {"c",:a}, {"d",0}] - actual = Dict.merge(dict1, dict2) - assert Dict.merge(dict1, dict2) |> Enum.sort == - [{"a", 3}, {"b", 2}, {"c", :a}, {"d", 0}] - assert Dict.merge(dict2, dict1) |> Enum.sort == - [{"a", 1}, {"b", 2}, {"c", 3}, {"d", 0}] - end - - test "merge/3" do - dict1 = new_dict [{"a", 1}, {"b", 2}] - dict2 = new_dict [{"a", 3}, {"d", 4}] - actual = Dict.merge dict1, dict2, fn _k, v1, v2 -> v1 + v2 end - assert Enum.sort(actual) == [{"a", 4}, {"b", 2}, {"d", 4}] - end - - test "has_key?/2" do - dict = new_dict() - assert Dict.has_key?(dict, "first_key") - refute Dict.has_key?(dict, "other_key") - end - - test "has_key?/2 with match" do - assert Dict.has_key?(int_dict, 1) - refute Dict.has_key?(int_dict, 1.0) - end - - test "size/1" do - assert Dict.size(new_dict()) == 2 - assert Dict.size(new_dict([])) == 0 - end - - test "update!/3" do - dict = Dict.update!(new_dict(), "first_key", fn val -> -val end) - assert Dict.get(dict, "first_key") == -1 - - assert_raise KeyError, fn -> - Dict.update!(new_dict(), "non-existent", fn val -> -val end) - end - end - - test "update!/3 with match" do - assert Dict.get(Dict.update!(int_dict(), 1, &(&1 + 1)), 1) == 2 - end - - test "update/4" do - dict = Dict.update(new_dict(), "first_key", 0, fn val -> -val end) - assert Dict.get(dict, "first_key") == -1 - - dict = Dict.update(new_dict(), "non-existent", "...", fn val -> -val end) - assert Dict.get(dict, "non-existent") == "..." - end - - test "update/4 with match" do - dict = int_dict() - assert Dict.get(Dict.update(dict, 1.0, 2, &(&1 + 1)), 1) == 1 - assert Dict.get(Dict.update(dict, 1.0, 2, &(&1 + 1)), 1.0) == 2 - end - - test "pop/2 and pop/3" do - dict = new_dict() - - {v, actual} = Dict.pop(dict, "first_key") - assert v == 1 - assert actual == new_dict([{"second_key", 2}]) - - {v, actual} = Dict.pop(dict, "other_key") - assert v == nil - assert dict == actual - - {v, actual} = Dict.pop(dict, "other_key", "default") - assert v == "default" - assert dict == actual - end - - test "pop/2 and pop/3 with match" do - dict = int_dict() - - {v, actual} = Dict.pop(dict, 1) - assert v == 1 - assert Enum.sort(actual) == [] - - {v, actual} = Dict.pop(dict, 1.0) - assert v == nil - assert actual == dict - end - - test "split/2" do - dict = new_dict() - - {take, drop} = Dict.split(dict, []) - assert take == new_dict([]) - assert drop == dict - - {take, drop} = Dict.split(dict, ["unknown_key"]) - assert take == new_dict([]) - assert drop == dict - - split_keys = ["first_key", "second_key", "unknown_key"] - {take, drop} = Dict.split(dict, split_keys) - - take_expected = new_dict([]) - |> Dict.put("first_key", 1) - |> Dict.put("second_key", 2) - - drop_expected = new_dict([]) - |> Dict.delete("first_key") - |> Dict.delete("second_key") - - assert Enum.sort(take) == Enum.sort(take_expected) - assert Enum.sort(drop) == Enum.sort(drop_expected) - end - - test "split/2 with match" do - dict = int_dict() - {take, drop} = Dict.split(dict, [1]) - assert take == dict - assert drop == new_dict([]) - - {take, drop} = Dict.split(dict, [1.0]) - assert take == new_dict([]) - assert drop == dict - end - - test "split/2 with enum" do - dict = int_dict() - {take, drop} = Dict.split(dict, 1..3) - assert take == dict - assert drop == new_dict([]) - end - - test "take/2" do - dict = new_dict() - take = Dict.take(dict, ["unknown_key"]) - assert take == new_dict([]) - - take = Dict.take(dict, ["first_key"]) - assert take == new_dict([{"first_key", 1}]) - end - - test "take/2 with match" do - dict = int_dict() - assert Dict.take(dict, [1]) == dict - assert Dict.take(dict, [1.0]) == new_dict([]) - end - - test "take/2 with enum" do - dict = int_dict() - assert Dict.take(dict, 1..3) == dict - end - - test "drop/2" do - dict = new_dict() - drop = Dict.drop(dict, ["unknown_key"]) - assert drop == dict - - drop = Dict.drop(dict, ["first_key"]) - assert drop == new_dict([{"second_key", 2}]) - end - - test "drop/2 with match" do - dict = int_dict() - assert Dict.drop(dict, [1]) == new_dict([]) - assert Dict.drop(dict, [1.0]) == dict - end - - test "drop/2 with enum" do - dict = int_dict() - assert Dict.drop(dict, 1..3) == new_dict([]) - end - - test "equal?/2" do - dict1 = new_dict(a: 2, b: 3, f: 5, c: 123) - dict2 = new_dict(a: 2, b: 3, f: 5, c: 123) - assert dict_impl.equal?(dict1, dict2) - assert Dict.equal?(dict1, dict2) - - dict2 = Dict.put(dict2, :a, 3) - refute dict_impl.equal?(dict1, dict2) - refute Dict.equal?(dict1, dict2) - - dict3 = [a: 2, b: 3, f: 5, c: 123, z: 666] - refute Dict.equal?(dict1, dict3) - refute Dict.equal?(dict3, dict1) - end - - test "equal?/2 with match" do - dict1 = new_dict([{1,1}]) - dict2 = new_dict([{1.0,1}]) - assert Dict.equal?(dict1, dict1) - refute Dict.equal?(dict1, dict2) - end - - test "equal?/2 with other dict" do - dict = new_dict([{1,1}]) - assert Dict.equal?(dict, TestDict.new([{1,1}])) - refute Dict.equal?(dict, TestDict.new([{1.0,1}])) - end - - test "is enumerable" do - dict = new_dict() - assert Enum.empty?(new_dict([])) - refute Enum.empty?(dict) - assert Enum.member?(dict, {"first_key", 1}) - refute Enum.member?(dict, {"first_key", 2}) - assert Enum.count(dict) == 2 - assert Enum.reduce(dict, 0, fn({k, v}, acc) -> v + acc end) == 3 - end - - test "is collectable" do - dict = new_dict() - assert Dict.size(dict) == 2 - assert Enum.sort(dict) == [{"first_key", 1}, {"second_key", 2}] - - dict = new_dict([{1}, {2}, {3}], fn {x} -> {<>, x} end) - assert Dict.size(dict) == 3 - assert Enum.sort(dict) == [{"A", 1}, {"B", 2}, {"C", 3}] - - assert Collectable.empty(new_dict) == new_dict([]) - end - - test "is zippable" do - dict = new_dict() - list = Dict.to_list(dict) - assert Enum.zip(list, list) == Enum.zip(dict, dict) - - dict = new_dict(1..120, fn i -> {i, i} end) - list = Dict.to_list(dict) - assert Enum.zip(list, list) == Enum.zip(dict, dict) - end - end - end -end - -defmodule Dict.HashDictTest do - use ExUnit.Case, async: true - use DictTest.Common - - doctest Dict - defp dict_impl, do: HashDict -end - -defmodule Dict.MapDictTest do - use ExUnit.Case, async: true - use DictTest.Common - - doctest Dict - defp dict_impl, do: Map -end diff --git a/lib/elixir/test/elixir/enum_test.exs b/lib/elixir/test/elixir/enum_test.exs index 779d73605fa..c47b8a0492e 100644 --- a/lib/elixir/test/elixir/enum_test.exs +++ b/lib/elixir/test/elixir/enum_test.exs @@ -1,53 +1,43 @@ Code.require_file "test_helper.exs", __DIR__ -defmodule EnumTest.List do +defmodule EnumTest do use ExUnit.Case, async: true + doctest Enum - test :empty? do - assert Enum.empty?([]) - refute Enum.empty?([1, 2, 3]) - refute Enum.empty?(1..3) - end - - test :member? do - assert Enum.member?([1, 2, 3], 2) - refute Enum.member?([], 0) - refute Enum.member?([1, 2, 3], 0) - assert Enum.member?(1..3, 2) - refute Enum.member?(1..3, 0) - end - - test :count do - assert Enum.count([1, 2, 3]) == 3 - assert Enum.count([]) == 0 - end + defp assert_runs_enumeration_only_once(enum_fun) do + enumerator = Stream.map([:element], fn element -> + send(self(), element) + element + end) - test :count_fun do - assert Enum.count([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == 1 - assert Enum.count([], fn(x) -> rem(x, 2) == 0 end) == 0 + enum_fun.(enumerator) + assert_received :element + refute_received :element end - test :all? do - assert Enum.all?([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) - refute Enum.all?([2, 3, 4], fn(x) -> rem(x, 2) == 0 end) - + test "all?/2" do assert Enum.all?([2, 4, 6]) refute Enum.all?([2, nil, 4]) - assert Enum.all?([]) + + assert Enum.all?([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) + refute Enum.all?([2, 3, 4], fn(x) -> rem(x, 2) == 0 end) end - test :any? do + test "any?/2" do refute Enum.any?([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) assert Enum.any?([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) refute Enum.any?([false, false, false]) assert Enum.any?([false, true, false]) + assert Enum.any?([:foo, false, false]) + refute Enum.any?([false, nil, false]) + refute Enum.any?([]) end - test :at do + test "at/3" do assert Enum.at([2, 4, 6], 0) == 2 assert Enum.at([2, 4, 6], 2) == 6 assert Enum.at([2, 4, 6], 4) == nil @@ -56,42 +46,98 @@ defmodule EnumTest.List do assert Enum.at([2, 4, 6], -4) == nil end - test :concat_1 do + test "chunk/2" do + assert Enum.chunk([1, 2, 3, 4, 5], 2) == [[1, 2], [3, 4]] + end + + test "chunk/4" do + assert Enum.chunk([1, 2, 3, 4, 5], 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]] + assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2) == [[1, 2, 3], [3, 4, 5]] + assert Enum.chunk([1, 2, 3, 4, 5, 6], 2, 3) == [[1, 2], [4, 5]] + assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]] + assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) == [[1, 2, 3], [4, 5, 6]] + assert Enum.chunk([1, 2, 3, 4, 5], 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]] + end + + test "chunk_by/2" do + assert Enum.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1)) == [[1], [2, 2], [3], [4, 4, 6], [7, 7]] + assert Enum.chunk_by([1, 2, 3, 4], fn _ -> true end) == [[1, 2, 3, 4]] + assert Enum.chunk_by([], fn _ -> true end) == [] + assert Enum.chunk_by([1], fn _ -> true end) == [[1]] + end + + test "chunk_by/4" do + chunk_fun = fn i, acc -> + if rem(i, 2) == 0 do + {:cont, Enum.reverse([i | acc]), []} + else + {:cont, [i | acc]} + end + end + + after_fun = fn + [] -> {:cont, []} + acc -> {:cont, Enum.reverse(acc), []} + end + + assert Enum.chunk_by([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [], chunk_fun, after_fun) == + [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]] + assert Enum.chunk_by(0..10, [], chunk_fun, after_fun) == + [[0], [1, 2], [3, 4], [5, 6], [7, 8], [9, 10]] + assert Enum.chunk_by(0..11, [], chunk_fun, after_fun) == + [[0], [1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11]] + end + + test "concat/1" do assert Enum.concat([[1, [2], 3], [4], [5, 6]]) == [1, [2], 3, 4, 5, 6] - assert Enum.concat(1..3, []) == [1,2,3] assert Enum.concat([[], []]) == [] assert Enum.concat([[]]) == [] assert Enum.concat([]) == [] - - assert Enum.concat([1..5, fn acc, _ -> acc end, [1]]) == [1,2,3,4,5,1] end - test :concat_2 do + test "concat/2" do assert Enum.concat([], [1]) == [1] assert Enum.concat([1, [2], 3], [4, 5]) == [1, [2], 3, 4, 5] - assert Enum.concat(1..3, []) == [1,2,3] + + assert Enum.concat([1, 2], 3..5) == [1, 2, 3, 4, 5] assert Enum.concat([], []) == [] + assert Enum.concat([], 1..3) == [1, 2, 3] assert Enum.concat(fn acc, _ -> acc end, [1]) == [1] end - test :fetch! do - assert Enum.fetch!([2, 4, 6], 0) == 2 - assert Enum.fetch!([2, 4, 6], 2) == 6 - assert Enum.fetch!([2, 4, 6], -2) == 4 + test "count/1" do + assert Enum.count([1, 2, 3]) == 3 + assert Enum.count([]) == 0 + assert Enum.count([1, true, false, nil]) == 4 + end - assert_raise Enum.OutOfBoundsError, fn -> - Enum.fetch!([2, 4, 6], 4) - end + test "count/2" do + assert Enum.count([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == 1 + assert Enum.count([], fn(x) -> rem(x, 2) == 0 end) == 0 + assert Enum.count([1, true, false, nil], & &1) == 2 + end - assert_raise Enum.OutOfBoundsError, fn -> - Enum.fetch!([2, 4, 6], -4) - end + test "dedup/1" do + assert Enum.dedup([1, 1, 2, 1, 1, 2, 1]) == [1, 2, 1, 2, 1] + assert Enum.dedup([2, 1, 1, 2, 1]) == [2, 1, 2, 1] + assert Enum.dedup([1, 2, 3, 4]) == [1, 2, 3, 4] + assert Enum.dedup([1, 1.0, 2.0, 2]) == [1, 1.0, 2.0, 2] + assert Enum.dedup([]) == [] + assert Enum.dedup([nil, nil, true, {:value, true}]) == [nil, true, {:value, true}] + assert Enum.dedup([nil]) == [nil] end - test :drop do + test "dedup_by/2" do + assert Enum.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end) + == [{1, :x}, {2, :y}, {1, :x}] + + assert Enum.dedup_by([5, 1, 2, 3, 2, 1], fn x -> x > 2 end) == [5, 1, 3, 2] + end + + test "drop/2" do assert Enum.drop([1, 2, 3], 0) == [1, 2, 3] assert Enum.drop([1, 2, 3], 1) == [2, 3] assert Enum.drop([1, 2, 3], 2) == [3] @@ -103,184 +149,482 @@ defmodule EnumTest.List do assert Enum.drop([], 3) == [] end - test :drop_while do + test "drop_every/2" do + assert Enum.drop_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2) == [2, 4, 6, 8, 10] + assert Enum.drop_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3) == [2, 3, 5, 6, 8, 9] + assert Enum.drop_every([], 2) == [] + assert Enum.drop_every([1, 2], 2) == [2] + assert Enum.drop_every([1, 2, 3], 0) == [1, 2, 3] + assert_raise FunctionClauseError, fn -> + Enum.drop_every([1, 2, 3], -1) + end + end + + test "drop_while/2" do assert Enum.drop_while([1, 2, 3, 4, 3, 2, 1], fn(x) -> x <= 3 end) == [4, 3, 2, 1] assert Enum.drop_while([1, 2, 3], fn(_) -> false end) == [1, 2, 3] assert Enum.drop_while([1, 2, 3], fn(x) -> x <= 3 end) == [] assert Enum.drop_while([], fn(_) -> false end) == [] end - test :find do - assert Enum.find([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil - assert Enum.find([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0 - assert Enum.find([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 3 - end - - test :find_value do - assert Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil - assert Enum.find_value([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0 - assert Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) + test "each/2" do + try do + assert Enum.each([], fn(x) -> x end) == :ok + assert Enum.each([1, 2, 3], fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok + assert Process.get(:enum_test_each) == 6 + after + Process.delete(:enum_test_each) + end end - test :find_index do - assert Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil - assert Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 1 + test "empty?/1" do + assert Enum.empty?([]) + refute Enum.empty?([1, 2, 3]) + refute Enum.empty?(1..3) end - test :each do - assert Enum.each([], fn(x) -> x end) == :ok - assert Enum.each([1, 2, 3], fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok - assert Process.get(:enum_test_each) == 6 - after - Process.delete(:enum_test_each) - end + test "fetch/2" do + assert Enum.fetch([66], 0) == {:ok, 66} + assert Enum.fetch([66], -1) == {:ok, 66} + assert Enum.fetch([66], 1) == :error + assert Enum.fetch([66], -2) == :error - test :fetch do assert Enum.fetch([2, 4, 6], 0) == {:ok, 2} + assert Enum.fetch([2, 4, 6], -1) == {:ok, 6} assert Enum.fetch([2, 4, 6], 2) == {:ok, 6} assert Enum.fetch([2, 4, 6], 4) == :error assert Enum.fetch([2, 4, 6], -2) == {:ok, 4} assert Enum.fetch([2, 4, 6], -4) == :error + + assert Enum.fetch([], 0) == :error + assert Enum.fetch([], 1) == :error end - test :filter do + test "fetch!/2" do + assert Enum.fetch!([2, 4, 6], 0) == 2 + assert Enum.fetch!([2, 4, 6], 2) == 6 + assert Enum.fetch!([2, 4, 6], -2) == 4 + + assert_raise Enum.OutOfBoundsError, fn -> + Enum.fetch!([2, 4, 6], 4) + end + + assert_raise Enum.OutOfBoundsError, fn -> + Enum.fetch!([2, 4, 6], -4) + end + end + + test "filter/2" do assert Enum.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [2] assert Enum.filter([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6] - end - test :filter_with_match do + assert Enum.filter([1, 2, false, 3, nil], & &1) == [1, 2, 3] assert Enum.filter([1, 2, 3], &match?(1, &1)) == [1] assert Enum.filter([1, 2, 3], &match?(x when x < 3, &1)) == [1, 2] - assert Enum.filter([1, 2, 3], &match?(_, &1)) == [1, 2, 3] + assert Enum.filter([1, 2, 3], fn _ -> true end) == [1, 2, 3] end - test :filter_map do - assert Enum.filter_map([1, 2, 3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4] - assert Enum.filter_map([2, 4, 6], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12] + test "find/3" do + assert Enum.find([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil + assert Enum.find([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0 + assert Enum.find([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 3 end - test :flat_map do + test "find_index/2" do + assert Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil + assert Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 1 + assert Stream.take(1..3, 3) |> Enum.find_index(fn _ -> false end) == nil + assert Stream.take(1..6, 6) |> Enum.find_index(fn x -> x == 5 end) == 4 + end + + test "find_value/2" do + assert Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil + assert Enum.find_value([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0 + assert Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) + end + + test "flat_map/2" do assert Enum.flat_map([], fn(x) -> [x, x] end) == [] assert Enum.flat_map([1, 2, 3], fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3] assert Enum.flat_map([1, 2, 3], fn(x) -> x..x+1 end) == [1, 2, 2, 3, 3, 4] end - test :flat_map_reduce do + test "flat_map_reduce/3" do assert Enum.flat_map_reduce([1, 2, 3], 0, &{[&1, &2], &1 + &2}) == {[1, 0, 2, 1, 3, 3], 6} - - assert Enum.flat_map_reduce(1..100, 0, fn i, acc -> - if acc < 3, do: {[i], acc + 1}, else: {:halt, acc} - end) == {[1,2,3], 3} end - test :group_by do - assert Enum.group_by([], fn -> nil end) == %{} - assert Enum.group_by(1..6, &rem(&1, 3)) == - %{0 => [6, 3], 1 => [4, 1], 2 => [5, 2]} + test "group_by/3" do + assert Enum.group_by([], fn _ -> raise "oops" end) == %{} + assert Enum.group_by([1, 2, 3], &rem(&1, 2)) == %{0 => [2], 1 => [1, 3]} + end - result = Enum.group_by(1..6, %{3 => :default}, &rem(&1, 3)) - assert result[0] == [6, 3] - assert result[3] == :default + test "intersperse/2" do + assert Enum.intersperse([], true) == [] + assert Enum.intersperse([1], true) == [1] + assert Enum.intersperse([1, 2, 3], true) == [1, true, 2, true, 3] end - test :into do + test "into/2" do assert Enum.into([a: 1, b: 2], %{}) == %{a: 1, b: 2} assert Enum.into([a: 1, b: 2], %{c: 3}) == %{a: 1, b: 2, c: 3} assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2] - assert Enum.into([1, 2, 3], "numbers: ", &to_string/1) == "numbers: 123" - assert Enum.into([1, 2, 3], fn - func, {:cont, x} when is_function(func) -> [x] - list, {:cont, x} -> [x|list] - list, _ -> list - end) == [3, 2, 1] + assert Enum.into(1..3, []) == [1, 2, 3] + assert Enum.into(["H", "i"], "") == "Hi" end - test :intersperse do - assert Enum.intersperse([], true) == [] - assert Enum.intersperse([1], true) == [1] - assert Enum.intersperse([1,2,3], true) == [1, true, 2, true, 3] + test "into/3" do + assert Enum.into([1, 2, 3], [], fn x -> x * 2 end) == [2, 4, 6] + assert Enum.into([1, 2, 3], "numbers: ", &to_string/1) == "numbers: 123" + assert_raise FunctionClauseError, fn -> + Enum.into([2, 3], %{a: 1}, &(&1)) + end end - test :join do + test "join/2" do assert Enum.join([], " = ") == "" assert Enum.join([1, 2, 3], " = ") == "1 = 2 = 3" assert Enum.join([1, "2", 3], " = ") == "1 = 2 = 3" assert Enum.join([1, 2, 3]) == "123" assert Enum.join(["", "", 1, 2, "", 3, "", "\n"], ";") == ";;1;2;;3;;\n" assert Enum.join([""]) == "" + + assert Enum.join(fn(acc, _) -> acc end, ".") == "" + end + + test "map/2" do + assert Enum.map([], fn x -> x * 2 end) == [] + assert Enum.map([1, 2, 3], fn x -> x * 2 end) == [2, 4, 6] end - test :map_join do + test "map_every/3" do + assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2, fn x -> x * 2 end) == [2, 2, 6, 4, 10, 6, 14, 8, 18, 10] + assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3, fn x -> x * 2 end) == [2, 2, 3, 8, 5, 6, 14, 8, 9, 20] + assert Enum.map_every([], 2, fn x -> x * 2 end) == [] + assert Enum.map_every([1, 2], 2, fn x -> x * 2 end) == [2, 2] + assert Enum.map_every([1, 2, 3], 0, fn _x -> raise :i_should_have_never_been_invoked end) == [1, 2, 3] + assert Enum.map_every(1..3, 1, fn x -> x * 2 end) == [2, 4, 6] + assert_raise FunctionClauseError, fn -> + Enum.map_every([1, 2, 3], -1, fn x -> x * 2 end) + end + assert_raise FunctionClauseError, fn -> + Enum.map_every(1..10, 3.33, fn x -> x * 2 end) + end + assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 9, fn x -> x + 1000 end) == [1001, 2, 3, 4, 5, 6, 7, 8, 9, 1010] + assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10, fn x -> x + 1000 end) == [1001, 2, 3, 4, 5, 6, 7, 8, 9, 10] + assert Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 100, fn x -> x + 1000 end) == [1001, 2, 3, 4, 5, 6, 7, 8, 9, 10] + end + + test "map_join/3" do assert Enum.map_join([], " = ", &(&1 * 2)) == "" assert Enum.map_join([1, 2, 3], " = ", &(&1 * 2)) == "2 = 4 = 6" assert Enum.map_join([1, 2, 3], &(&1 * 2)) == "246" assert Enum.map_join(["", "", 1, 2, "", 3, "", "\n"], ";", &(&1)) == ";;1;2;;3;;\n" assert Enum.map_join([""], "", &(&1)) == "" + assert Enum.map_join(fn(acc, _) -> acc end, ".", &(&1 + 0)) == "" end - test :join_empty do - fun = fn (acc, _) -> acc end - assert Enum.join(fun, ".") == "" - assert Enum.map_join(fun, ".", &(&1 + 0)) == "" + test "map_reduce/3" do + assert Enum.map_reduce([], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[], 1} + assert Enum.map_reduce([1, 2, 3], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7} end - test :map do - assert Enum.map([], fn x -> x * 2 end) == [] - assert Enum.map([1, 2, 3], fn x -> x * 2 end) == [2, 4, 6] + test "max/1" do + assert Enum.max([1]) == 1 + assert Enum.max([1, 2, 3]) == 3 + assert Enum.max([1, [], :a, {}]) == [] + assert_raise Enum.EmptyError, fn -> + Enum.max([]) + end end - test :map_reduce do - assert Enum.map_reduce([], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[], 1} - assert Enum.map_reduce([1, 2, 3], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7} + test "max/2" do + assert Enum.max([1], fn -> nil end) == 1 + assert Enum.max([1, 2, 3], fn -> nil end) == 3 + assert Enum.max([1, [], :a, {}], fn -> nil end) == [] + assert Enum.max([], fn -> :empty_value end) == :empty_value + assert Enum.max(%{}, fn -> :empty_value end) == :empty_value + assert_runs_enumeration_only_once(&Enum.max(&1, fn -> nil end)) end - test :partition do - assert Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]} - assert Enum.partition([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == {[2, 4, 6], []} + test "max_by/2" do + assert Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "aaa" + assert_raise Enum.EmptyError, fn -> + Enum.max_by([], fn(x) -> String.length(x) end) + end + assert_raise Enum.EmptyError, fn -> + Enum.max_by(%{}, &(&1)) + end end - test :reduce do - assert Enum.reduce([], 1, fn(x, acc) -> x + acc end) == 1 - assert Enum.reduce([1, 2, 3], 1, fn(x, acc) -> x + acc end) == 7 + test "max_by/3" do + assert Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end, fn -> nil end) == "aaa" + assert Enum.max_by([], fn(x) -> String.length(x) end, fn -> :empty_value end) == :empty_value + assert Enum.max_by(%{}, &(&1), fn -> :empty_value end) == :empty_value + assert Enum.max_by(%{}, &(&1), fn -> {:a, :tuple} end) == {:a, :tuple} + assert_runs_enumeration_only_once(&Enum.max_by(&1, fn e -> e end, fn -> nil end)) + end + + test "member?/2" do + assert Enum.member?([1, 2, 3], 2) + refute Enum.member?([], 0) + refute Enum.member?([1, 2, 3], 0) + end + + test "min/1" do + assert Enum.min([1]) == 1 + assert Enum.min([1, 2, 3]) == 1 + assert Enum.min([[], :a, {}]) == :a + assert_raise Enum.EmptyError, fn -> + Enum.min([]) + end + end + + test "min/2" do + assert Enum.min([1], fn -> nil end) == 1 + assert Enum.min([1, 2, 3], fn -> nil end) == 1 + assert Enum.min([[], :a, {}], fn -> nil end) == :a + assert Enum.min([], fn -> :empty_value end) == :empty_value + assert Enum.min(%{}, fn -> :empty_value end) == :empty_value + assert_runs_enumeration_only_once(&Enum.min(&1, fn -> nil end)) + end + + test "min_by/2" do + assert Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "a" + assert_raise Enum.EmptyError, fn -> + Enum.min_by([], fn(x) -> String.length(x) end) + end + assert_raise Enum.EmptyError, fn -> + Enum.min_by(%{}, &(&1)) + end + end + + test "min_by/3" do + assert Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end, fn -> nil end) == "a" + assert Enum.min_by([], fn(x) -> String.length(x) end, fn -> :empty_value end) == :empty_value + assert Enum.min_by(%{}, &(&1), fn -> :empty_value end) == :empty_value + assert Enum.min_by(%{}, &(&1), fn -> {:a, :tuple} end) == {:a, :tuple} + assert_runs_enumeration_only_once(&Enum.min_by(&1, fn e -> e end, fn -> nil end)) + end + + test "min_max/1" do + assert Enum.min_max([1]) == {1, 1} + assert Enum.min_max([2, 3, 1]) == {1, 3} + assert Enum.min_max([[], :a, {}]) == {:a, []} + assert_raise Enum.EmptyError, fn -> + Enum.min_max([]) + end + end + + test "min_max/2" do + assert Enum.min_max([1], fn -> nil end) == {1, 1} + assert Enum.min_max([2, 3, 1], fn -> nil end) == {1, 3} + assert Enum.min_max([[], :a, {}], fn -> nil end) == {:a, []} + assert Enum.min_max([], fn -> {:empty_min, :empty_max} end) == {:empty_min, :empty_max} + assert Enum.min_max(%{}, fn -> {:empty_min, :empty_max} end) == {:empty_min, :empty_max} + assert_runs_enumeration_only_once(&Enum.min_max(&1, fn -> nil end)) + end + + test "min_max_by/2" do + assert Enum.min_max_by(["aaa", "a", "aa"], fn(x) -> String.length(x) end) == {"a", "aaa"} + assert_raise Enum.EmptyError, fn -> + Enum.min_max_by([], fn(x) -> String.length(x) end) + end + end + test "min_max_by/3" do + assert Enum.min_max_by(["aaa", "a", "aa"], fn(x) -> String.length(x) end, fn -> nil end) == {"a", "aaa"} + assert Enum.min_max_by([], fn(x) -> String.length(x) end, fn -> {:no_min, :no_max} end) == {:no_min, :no_max} + assert Enum.min_max_by(%{}, fn(x) -> String.length(x) end, fn -> {:no_min, :no_max} end) == {:no_min, :no_max} + assert_runs_enumeration_only_once(&Enum.min_max_by(&1, fn x -> x end, fn -> nil end)) + end + + test "split_with/2" do + assert Enum.split_with([], fn(x) -> rem(x, 2) == 0 end) == {[], []} + assert Enum.split_with([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]} + assert Enum.split_with([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == {[2, 4, 6], []} + + assert Enum.split_with(1..5, fn(x) -> rem(x, 2) == 0 end) == {[2, 4], [1, 3, 5]} + assert Enum.split_with(-3..0, fn(x) -> x > 0 end) == {[], [-3, -2, -1, 0]} + + assert Enum.split_with(%{}, fn(x) -> rem(x, 2) == 0 end) == {[], []} + assert Enum.split_with(%{a: 1, b: 2, c: 3}, fn({_k, v}) -> rem(v, 2) == 0 end) == {[b: 2], [a: 1, c: 3]} + assert Enum.split_with(%{b: 2, d: 4, f: 6}, fn({_k, v}) -> rem(v, 2) == 0 end) == {[b: 2, d: 4, f: 6], []} + end + + test "random/1" do + # corner cases, independent of the seed + assert_raise Enum.EmptyError, fn -> Enum.random([]) end + assert Enum.random([1]) == 1 + + # set a fixed seed so the test can be deterministic + # please note the order of following assertions is important + seed1 = {1406, 407414, 139258} + seed2 = {1306, 421106, 567597} + :rand.seed(:exsplus, seed1) + assert Enum.random([1, 2]) == 2 + assert Enum.random([1, 2, 3]) == 1 + assert Enum.random([1, 2, 3, 4]) == 1 + assert Enum.random([1, 2, 3, 4, 5]) == 2 + :rand.seed(:exsplus, seed2) + assert Enum.random([1, 2]) == 2 + assert Enum.random([1, 2, 3]) == 3 + assert Enum.random([1, 2, 3, 4]) == 2 + assert Enum.random([1, 2, 3, 4, 5]) == 3 + end + + test "reduce/2" do assert Enum.reduce([1, 2, 3], fn(x, acc) -> x + acc end) == 6 + assert_raise Enum.EmptyError, fn -> Enum.reduce([], fn(x, acc) -> x + acc end) end + + assert_raise Enum.EmptyError, fn -> + Enum.reduce(%{}, fn(_, acc) -> acc end) + end end - test :reject do + test "reduce/3" do + assert Enum.reduce([], 1, fn(x, acc) -> x + acc end) == 1 + assert Enum.reduce([1, 2, 3], 1, fn(x, acc) -> x + acc end) == 7 + end + + test "reduce_while/3" do + assert Enum.reduce_while([1, 2, 3], 1, fn i, acc -> {:cont, acc + i} end) == 7 + assert Enum.reduce_while([1, 2, 3], 1, fn _i, acc -> {:halt, acc} end) == 1 + assert Enum.reduce_while([], 0, fn _i, acc -> {:cont, acc} end) == 0 + end + + test "reject/2" do assert Enum.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [1, 3] assert Enum.reject([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == [] + assert Enum.reject([1, true, nil, false, 2], &(&1)) == [nil, false] end - test :reverse do + test "reverse/1" do assert Enum.reverse([]) == [] assert Enum.reverse([1, 2, 3]) == [3, 2, 1] + assert Enum.reverse([5..5]) == [5..5] + end + + test "reverse/2" do assert Enum.reverse([1, 2, 3], [4, 5, 6]) == [3, 2, 1, 4, 5, 6] + assert Enum.reverse([1, 2, 3], []) == [3, 2, 1] + assert Enum.reverse([5..5], [5]) == [5..5, 5] end - test :scan do - assert Enum.scan([1,2,3,4,5], &(&1 + &2)) == [1,3,6,10,15] + test "reverse_slice/3" do + assert Enum.reverse_slice([], 1, 2) == [] + assert Enum.reverse_slice([1, 2, 3], 0, 0) == [1, 2, 3] + assert Enum.reverse_slice([1, 2, 3], 0, 1) == [1, 2, 3] + assert Enum.reverse_slice([1, 2, 3], 0, 2) == [2, 1, 3] + assert Enum.reverse_slice([1, 2, 3], 0, 20000000) == [3, 2, 1] + assert Enum.reverse_slice([1, 2, 3], 100, 2) == [1, 2, 3] + assert Enum.reverse_slice([1, 2, 3], 10, 10) == [1, 2, 3] + end + + test "scan/2" do + assert Enum.scan([1, 2, 3, 4, 5], &(&1 + &2)) == [1, 3, 6, 10, 15] assert Enum.scan([], &(&1 + &2)) == [] + end - assert Enum.scan([1,2,3,4,5], 0, &(&1 + &2)) == [1,3,6,10,15] + test "scan/3" do + assert Enum.scan([1, 2, 3, 4, 5], 0, &(&1 + &2)) == [1, 3, 6, 10, 15] assert Enum.scan([], 0, &(&1 + &2)) == [] end - test :shuffle do + test "shuffle/1" do # set a fixed seed so the test can be deterministic - :random.seed(1374, 347975, 449264) - assert Enum.shuffle([1, 2, 3, 4, 5]) == [2, 4, 1, 5, 3] + :rand.seed(:exsplus, {1374, 347975, 449264}) + assert Enum.shuffle([1, 2, 3, 4, 5]) == [2, 1, 3, 5, 4] + end + + test "slice/2" do + list = [1, 2, 3, 4, 5] + assert Enum.slice(list, 0..0) == [1] + assert Enum.slice(list, 0..1) == [1, 2] + assert Enum.slice(list, 0..2) == [1, 2, 3] + assert Enum.slice(list, 1, 2) == [2, 3] + assert Enum.slice(list, 1, 0) == [] + assert Enum.slice(list, 2, 5) == [3, 4, 5] + assert Enum.slice(list, 2, 6) == [3, 4, 5] + assert Enum.slice(list, 5, 5) == [] + assert Enum.slice(list, 6, 5) == [] + assert Enum.slice(list, 6, 0) == [] + assert Enum.slice(list, -6, 0) == [] + assert Enum.slice(list, -6, 5) == [] + assert Enum.slice(list, -2, 5) == [4, 5] + assert Enum.slice(list, -3, 1) == [3] + assert_raise FunctionClauseError, fn -> + Enum.slice(list, 0, -1) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(list, 0.99, 0) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(list, 0, 0.99) + end end - test :sort do + test "slice/3" do + list = [1, 2, 3, 4, 5] + assert Enum.slice(list, 0, 0) == [] + assert Enum.slice(list, 0, 1) == [1] + assert Enum.slice(list, 0, 2) == [1, 2] + assert Enum.slice(list, 1, 2) == [2, 3] + assert Enum.slice(list, 1, 0) == [] + assert Enum.slice(list, 2, 5) == [3, 4, 5] + assert Enum.slice(list, 2, 6) == [3, 4, 5] + assert Enum.slice(list, 5, 5) == [] + assert Enum.slice(list, 6, 5) == [] + assert Enum.slice(list, 6, 0) == [] + assert Enum.slice(list, -6, 0) == [] + assert Enum.slice(list, -6, 5) == [] + assert Enum.slice(list, -2, 5) == [4, 5] + assert Enum.slice(list, -3, 1) == [3] + assert_raise FunctionClauseError, fn -> + Enum.slice(list, 0, -1) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(list, 0.99, 0) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(list, 0, 0.99) + end + end + + test "sort/1" do assert Enum.sort([5, 3, 2, 4, 1]) == [1, 2, 3, 4, 5] + end + + test "sort/2" do assert Enum.sort([5, 3, 2, 4, 1], &(&1 > &2)) == [5, 4, 3, 2, 1] end - test :split do + test "sort_by/3" do + collection = [ + [other_data: 1, sorted_data: 5], + [other_data: 3, sorted_data: 4], + [other_data: 4, sorted_data: 3], + [other_data: 2, sorted_data: 2], + [other_data: 5, sorted_data: 1] + ] + + assert Enum.sort_by( + collection, + &(&1[:sorted_data]) + ) == [ + [other_data: 5, sorted_data: 1], + [other_data: 2, sorted_data: 2], + [other_data: 4, sorted_data: 3], + [other_data: 3, sorted_data: 4], + [other_data: 1, sorted_data: 5] + ] + assert Enum.sort_by(collection, &(&1[:sorted_data]), &>=/2) == collection + end + + test "split/2" do assert Enum.split([1, 2, 3], 0) == {[], [1, 2, 3]} assert Enum.split([1, 2, 3], 1) == {[1], [2, 3]} assert Enum.split([1, 2, 3], 2) == {[1, 2], [3]} @@ -293,7 +637,7 @@ defmodule EnumTest.List do assert Enum.split([1, 2, 3], -10) == {[], [1, 2, 3]} end - test :split_while do + test "split_while/2" do assert Enum.split_while([1, 2, 3], fn(_) -> false end) == {[], [1, 2, 3]} assert Enum.split_while([1, 2, 3], fn(_) -> true end) == {[1, 2, 3], []} assert Enum.split_while([1, 2, 3], fn(x) -> x > 2 end) == {[], [1, 2, 3]} @@ -302,20 +646,25 @@ defmodule EnumTest.List do assert Enum.split_while([], fn(_) -> true end) == {[], []} end - test :sum do + test "sum/1" do assert Enum.sum([]) == 0 assert Enum.sum([1]) == 1 assert Enum.sum([1, 2, 3]) == 6 assert Enum.sum([1.1, 2.2, 3.3]) == 6.6 + assert Enum.sum([-3, -2, -1, 0, 1, 2, 3]) == 0 + assert Enum.sum(42..42) == 42 + assert Enum.sum(11..17) == 98 + assert Enum.sum(17..11) == 98 + assert Enum.sum(11..-17) == Enum.sum(-17..11) assert_raise ArithmeticError, fn -> Enum.sum([{}]) end assert_raise ArithmeticError, fn -> - Enum.sum([1,{}]) + Enum.sum([1, {}]) end end - test :take do + test "take/2" do assert Enum.take([1, 2, 3], 0) == [] assert Enum.take([1, 2, 3], 1) == [1] assert Enum.take([1, 2, 3], 2) == [1, 2] @@ -327,199 +676,156 @@ defmodule EnumTest.List do assert Enum.take([], 3) == [] end - test :take_every do + test "take_every/2" do assert Enum.take_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2) == [1, 3, 5, 7, 9] + assert Enum.take_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3) == [1, 4, 7, 10] assert Enum.take_every([], 2) == [] assert Enum.take_every([1, 2], 2) == [1] assert Enum.take_every([1, 2, 3], 0) == [] + assert Enum.take_every(1..3, 1) == [1, 2, 3] + assert_raise FunctionClauseError, fn -> + Enum.take_every([1, 2, 3], -1) + end + assert_raise FunctionClauseError, fn -> + Enum.take_every(1..10, 3.33) + end + end + + test "take_random/2" do + assert Enum.take_random(-42..-42, 1) == [-42] + + # corner cases, independent of the seed + assert_raise FunctionClauseError, fn -> Enum.take_random([1, 2], -1) end + assert Enum.take_random([], 0) == [] + assert Enum.take_random([], 3) == [] + assert Enum.take_random([1], 0) == [] + assert Enum.take_random([1], 2) == [1] + assert Enum.take_random([1, 2], 0) == [] + + # set a fixed seed so the test can be deterministic + # please note the order of following assertions is important + seed1 = {1406, 407414, 139258} + seed2 = {1406, 421106, 567597} + :rand.seed(:exsplus, seed1) + assert Enum.take_random([1, 2, 3], 1) == [2] + assert Enum.take_random([1, 2, 3], 2) == [3, 1] + assert Enum.take_random([1, 2, 3], 3) == [1, 3, 2] + assert Enum.take_random([1, 2, 3], 4) == [2, 3, 1] + :rand.seed(:exsplus, seed2) + assert Enum.take_random([1, 2, 3], 1) == [3] + assert Enum.take_random([1, 2, 3], 2) == [1, 2] + assert Enum.take_random([1, 2, 3], 3) == [1, 2, 3] + assert Enum.take_random([1, 2, 3], 4) == [2, 1, 3] + assert Enum.take_random([1, 2, 3], 129) == [3, 2, 1] + + # assert that every item in the sample comes from the input list + list = for _<-1..100, do: make_ref() + for x <- Enum.take_random(list, 50) do + assert x in list + end + + assert_raise FunctionClauseError, fn -> + Enum.take_random(1..10, -1) + end + assert_raise FunctionClauseError, fn -> + Enum.take_random(1..10, 10.0) + end + assert_raise FunctionClauseError, fn -> + Enum.take_random(1..10, 128.1) + end end - test :take_while do + test "take_while/2" do assert Enum.take_while([1, 2, 3], fn(x) -> x > 3 end) == [] assert Enum.take_while([1, 2, 3], fn(x) -> x <= 1 end) == [1] assert Enum.take_while([1, 2, 3], fn(x) -> x <= 3 end) == [1, 2, 3] assert Enum.take_while([], fn(_) -> true end) == [] end - test :to_list do + test "to_list/1" do assert Enum.to_list([]) == [] - assert Enum.to_list(1 .. 3) == [1, 2, 3] end - test :traverse do - assert Enum.traverse([1, 2, 3], &(&1 * &1)) == [1, 4, 9] - assert Enum.traverse(%{a: 1, b: 2}, fn {k, v} -> {k, v*2} end) == %{a: 2, b: 4} + test "uniq/1" do + assert Enum.uniq([5, 1, 2, 3, 2, 1]) == [5, 1, 2, 3] end - test :uniq do - assert Enum.uniq([1, 2, 3, 2, 1]) == [1, 2, 3] - assert Enum.uniq([1, 2, 3, 2, 1], fn x -> x end) == [1, 2, 3] + test "uniq_by/2" do + assert Enum.uniq_by([1, 2, 3, 2, 1], fn x -> x end) == [1, 2, 3] end - test :zip do - assert Enum.zip([:a, :b], [1, 2]) == [{:a, 1}, {:b, 2}] - assert Enum.zip([:a, :b], [1, 2, 3, 4]) == [{:a, 1}, {:b, 2}] - assert Enum.zip([:a, :b, :c, :d], [1, 2]) == [{:a, 1}, {:b, 2}] - assert Enum.zip([], [1]) == [] - assert Enum.zip([1], []) == [] - assert Enum.zip([], []) == [] - end + test "unzip/1" do + assert Enum.unzip([{:a, 1}, {:b, 2}, {:c, 3}]) == {[:a, :b, :c], [1, 2, 3]} + assert Enum.unzip([]) == {[], []} + assert Enum.unzip(%{a: 1, b: 2}) == {[:a, :b], [1, 2]} + assert Enum.unzip([foo: "a", bar: "b"]) == {[:foo, :bar], ["a", "b"]} - test :with_index do - assert Enum.with_index([]) == [] - assert Enum.with_index([1,2,3]) == [{1,0},{2,1},{3,2}] + assert_raise FunctionClauseError, fn -> Enum.unzip([{:a, 1}, {:b, 2, "foo"}]) end + assert_raise FunctionClauseError, fn -> Enum.unzip([{1, 2, {3, {4, 5}}}]) end + assert_raise FunctionClauseError, fn -> Enum.unzip([1, 2, 3]) end end - test :max do - assert Enum.max([1]) == 1 - assert Enum.max([1, 2, 3]) == 3 - assert Enum.max([1, [], :a, {}]) == [] - assert_raise Enum.EmptyError, fn -> - Enum.max([]) - end - end - - test :max_by do - assert Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "aaa" - assert_raise Enum.EmptyError, fn -> - Enum.max_by([], fn(x) -> String.length(x) end) - end + test "with_index/2" do + assert Enum.with_index([]) == [] + assert Enum.with_index([1, 2, 3]) == [{1, 0}, {2, 1}, {3, 2}] + assert Enum.with_index([1, 2, 3], 10) == [{1, 10}, {2, 11}, {3, 12}] end - test :min do - assert Enum.min([1]) == 1 - assert Enum.min([1, 2, 3]) == 1 - assert Enum.min([[], :a, {}]) == :a - assert_raise Enum.EmptyError, fn -> - Enum.min([]) - end - end + test "zip/2" do + assert Enum.zip([:a, :b], [1, 2]) == [{:a, 1}, {:b, 2}] + assert Enum.zip([:a, :b], [1, 2, 3, 4]) == [{:a, 1}, {:b, 2}] + assert Enum.zip([:a, :b, :c, :d], [1, 2]) == [{:a, 1}, {:b, 2}] - test :min_by do - assert Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "a" - assert_raise Enum.EmptyError, fn -> - Enum.min_by([], fn(x) -> String.length(x) end) - end + assert Enum.zip([], [1]) == [] + assert Enum.zip([1], []) == [] + assert Enum.zip([], []) == [] end - test :chunk do - assert Enum.chunk([1, 2, 3, 4, 5], 2) == [[1, 2], [3, 4]] - assert Enum.chunk([1, 2, 3, 4, 5], 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]] - assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2) == [[1, 2, 3], [3, 4, 5]] - assert Enum.chunk([1, 2, 3, 4, 5, 6], 2, 3) == [[1, 2], [4, 5]] - assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]] - assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) == [[1, 2, 3], [4, 5, 6]] - assert Enum.chunk([1, 2, 3, 4, 5], 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]] - end + test "zip/1" do + assert Enum.zip([[:a, :b], [1, 2], ["foo", "bar"]]) == [{:a, 1, "foo"}, {:b, 2, "bar"}] + assert Enum.zip([[:a, :b], [1, 2, 3, 4], ["foo", "bar", "baz", "qux"]]) == [{:a, 1, "foo"}, {:b, 2, "bar"}] + assert Enum.zip([[:a, :b, :c, :d], [1, 2], ["foo", "bar", "baz", "qux"]]) == [{:a, 1, "foo"}, {:b, 2, "bar"}] + assert Enum.zip([[:a, :b, :c, :d], [1, 2, 3, 4], ["foo", "bar"]]) == [{:a, 1, "foo"}, {:b, 2, "bar"}] + assert Enum.zip([1..10, ["foo", "bar"]]) == [{1, "foo"}, {2, "bar"}] - test :chunk_by do - assert Enum.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1)) == [[1], [2, 2], [3], [4, 4, 6], [7, 7]] - assert Enum.chunk_by([1, 2, 3, 4], fn _ -> true end) == [[1, 2, 3, 4]] - assert Enum.chunk_by([], fn _ -> true end) == [] - assert Enum.chunk_by([1], fn _ -> true end) == [[1]] - end + assert Enum.zip([]) == [] + assert Enum.zip([[]]) == [] + assert Enum.zip([[1]]) == [{1}] - test :slice do - assert Enum.slice([1,2,3,4,5], 0, 0) == [] - assert Enum.slice([1,2,3,4,5], 0, 1) == [1] - assert Enum.slice([1,2,3,4,5], 0, 2) == [1, 2] - assert Enum.slice([1,2,3,4,5], 1, 2) == [2, 3] - assert Enum.slice([1,2,3,4,5], 1, 0) == [] - assert Enum.slice([1,2,3,4,5], 2, 5) == [3, 4, 5] - assert Enum.slice([1,2,3,4,5], 2, 6) == [3, 4, 5] - assert Enum.slice([1,2,3,4,5], 5, 5) == [] - assert Enum.slice([1,2,3,4,5], 6, 5) == [] - assert Enum.slice([1,2,3,4,5], 6, 0) == [] - assert Enum.slice([1,2,3,4,5], -6, 0) == [] - assert Enum.slice([1,2,3,4,5], -6, 5) == [] - assert Enum.slice([1,2,3,4,5], -2, 5) == [4, 5] - assert Enum.slice([1,2,3,4,5], -3, 1) == [3] - end - - test :slice_range do - assert Enum.slice([1,2,3,4,5], 0..0) == [1] - assert Enum.slice([1,2,3,4,5], 0..1) == [1, 2] - assert Enum.slice([1,2,3,4,5], 0..2) == [1, 2, 3] - assert Enum.slice([1,2,3,4,5], 1..2) == [2, 3] - assert Enum.slice([1,2,3,4,5], 1..0) == [] - assert Enum.slice([1,2,3,4,5], 2..5) == [3, 4, 5] - assert Enum.slice([1,2,3,4,5], 2..6) == [3, 4, 5] - assert Enum.slice([1,2,3,4,5], 4..4) == [5] - assert Enum.slice([1,2,3,4,5], 5..5) == [] - assert Enum.slice([1,2,3,4,5], 6..5) == [] - assert Enum.slice([1,2,3,4,5], 6..0) == [] - assert Enum.slice([1,2,3,4,5], -6..0) == [] - assert Enum.slice([1,2,3,4,5], -6..5) == [] - assert Enum.slice([1,2,3,4,5], -5..-1) == [1, 2, 3, 4, 5] - assert Enum.slice([1,2,3,4,5], -5..-3) == [1, 2, 3] - assert Enum.slice([1,2,3,4,5], -6..-1) == [] - assert Enum.slice([1,2,3,4,5], -6..-3) == [] + assert Enum.zip([[], [], [], []]) == [] end end defmodule EnumTest.Range do use ExUnit.Case, async: true - test :all? do - range = 0..5 - refute Enum.all?(range, fn(x) -> rem(x, 2) == 0 end) - - range = 0..1 - assert Enum.all?(range, fn(x) -> x < 2 end) - assert Enum.all?(range) - - range = 1..0 - assert Enum.all?(range) + test "all?/2" do + assert Enum.all?(0..1) + assert Enum.all?(1..0) + refute Enum.all?(0..5, fn(x) -> rem(x, 2) == 0 end) + assert Enum.all?(0..1, fn(x) -> x < 2 end) end - test :any? do - range = 0..5 - refute Enum.any?(range, &(&1 > 10)) - - range = 0..5 - assert Enum.any?(range, &(&1 > 3)) - - range = 1..0 - assert Enum.any?(range) + test "any?/2" do + assert Enum.any?(1..0) + refute Enum.any?(0..5, &(&1 > 10)) + assert Enum.any?(0..5, &(&1 > 3)) end - test :fetch! do - assert Enum.fetch!(2..6, 0) == 2 - assert Enum.fetch!(2..6, 4) == 6 - assert Enum.fetch!(2..6, -1) == 6 - assert Enum.fetch!(2..6, -2) == 5 - assert Enum.fetch!(-2..-6, 0) == -2 - assert Enum.fetch!(-2..-6, 4) == -6 - - assert_raise Enum.OutOfBoundsError, fn -> - Enum.fetch!(2..6, 8) - end - - assert_raise Enum.OutOfBoundsError, fn -> - Enum.fetch!(-2..-6, 8) - end - - assert_raise Enum.OutOfBoundsError, fn -> - Enum.fetch!(2..6, -8) - end - end - - test :count do - range = 1..5 - assert Enum.count(range) == 5 - range = 1..1 - assert Enum.count(range) == 1 + test "at/3" do + assert Enum.at(2..6, 0) == 2 + assert Enum.at(2..6, 4) == 6 + assert Enum.at(2..6, 6) == nil + assert Enum.at(2..6, 6, :none) == :none + assert Enum.at(2..6, -2) == 5 + assert Enum.at(2..6, -8) == nil end - test :count_fun do - range = 1..5 - assert Enum.count(range, fn(x) -> rem(x, 2) == 0 end) == 2 - range = 1..1 - assert Enum.count(range, fn(x) -> rem(x, 2) == 0 end) == 0 + test "chunk/2" do + assert Enum.chunk(1..5, 2) == [[1, 2], [3, 4]] end - test :chunk do - assert Enum.chunk(1..5, 2) == [[1, 2], [3, 4]] + test "chunk/4" do assert Enum.chunk(1..5, 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]] assert Enum.chunk(1..6, 3, 2) == [[1, 2, 3], [3, 4, 5]] assert Enum.chunk(1..6, 2, 3) == [[1, 2], [4, 5]] @@ -527,251 +833,354 @@ defmodule EnumTest.Range do assert Enum.chunk(1..5, 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]] end - test :chunk_by do + test "chunk_by/2" do assert Enum.chunk_by(1..4, fn _ -> true end) == [[1, 2, 3, 4]] assert Enum.chunk_by(1..4, &(rem(&1, 2) == 1)) == [[1], [2], [3], [4]] end - test :drop do - range = 1..3 - assert Enum.drop(range, 0) == [1, 2, 3] - assert Enum.drop(range, 1) == [2, 3] - assert Enum.drop(range, 2) == [3] - assert Enum.drop(range, 3) == [] - assert Enum.drop(range, 4) == [] - assert Enum.drop(range, -1) == [1, 2] - assert Enum.drop(range, -2) == [1] - assert Enum.drop(range, -4) == [] - - range = 1..0 - assert Enum.drop(range, 3) == [] + test "concat/1" do + assert Enum.concat([1..2, 4..6]) == [1, 2, 4, 5, 6] + assert Enum.concat([1..5, fn acc, _ -> acc end, [1]]) == [1, 2, 3, 4, 5, 1] end - test :drop_while do - range = 0..6 - assert Enum.drop_while(range, fn(x) -> x <= 3 end) == [4, 5, 6] - assert Enum.drop_while(range, fn(_) -> false end) == [0, 1, 2, 3, 4, 5, 6] + test "concat/2" do + assert Enum.concat(1..3, 4..5) == [1, 2, 3, 4, 5] + assert Enum.concat(1..3, [4, 5]) == [1, 2, 3, 4, 5] + assert Enum.concat(1..3, []) == [1, 2, 3] + assert Enum.concat(1..3, 0..0) == [1, 2, 3, 0] + end - range = 0..3 - assert Enum.drop_while(range, fn(x) -> x <= 3 end) == [] + test "count/1" do + assert Enum.count(1..5) == 5 + assert Enum.count(1..1) == 1 + end - range = 1..0 - assert Enum.drop_while(range, fn(_) -> false end) == [1, 0] + test "count/2" do + assert Enum.count(1..5, fn(x) -> rem(x, 2) == 0 end) == 2 + assert Enum.count(1..1, fn(x) -> rem(x, 2) == 0 end) == 0 end - test :find do - range = 2..6 - assert Enum.find(range, fn(x) -> rem(x, 2) == 0 end) == 2 - assert Enum.find(range, fn(x) -> rem(x, 2) == 1 end) == 3 - assert Enum.find(range, fn _ -> false end) == nil - assert Enum.find(range, 0, fn _ -> false end) == 0 + test "dedup/1" do + assert Enum.dedup(1..3) == [1, 2, 3] end - test :find_value do - range = 2..6 - assert Enum.find_value(range, fn(x) -> rem(x, 2) == 1 end) + test "dedup_by/2" do + assert Enum.dedup_by(1..3, fn _ -> 1 end) == [1] end - test :find_index do - range = 2..6 - assert Enum.find_index(range, fn(x) -> rem(x, 2) == 1 end) == 1 + test "drop/2" do + assert Enum.drop(1..3, 0) == [1, 2, 3] + assert Enum.drop(1..3, 1) == [2, 3] + assert Enum.drop(1..3, 2) == [3] + assert Enum.drop(1..3, 3) == [] + assert Enum.drop(1..3, 4) == [] + assert Enum.drop(1..3, -1) == [1, 2] + assert Enum.drop(1..3, -2) == [1] + assert Enum.drop(1..3, -4) == [] + assert Enum.drop(1..0, 3) == [] end - test :empty? do - range = 1..0 - refute Enum.empty?(range) + test "drop_every/2" do + assert Enum.drop_every(1..10, 2) == [2, 4, 6, 8, 10] + assert Enum.drop_every(1..10, 3) == [2, 3, 5, 6, 8, 9] + assert Enum.drop_every(0..0, 2) == [] + assert Enum.drop_every(1..2, 2) == [2] + assert Enum.drop_every(1..3, 0) == [1, 2, 3] + assert Enum.drop_every(1..3, 1) == [] + assert_raise FunctionClauseError, fn -> + Enum.drop_every(1..10, 3.33) + end + end - range = 1..2 - refute Enum.empty?(range) + test "drop_while/2" do + assert Enum.drop_while(0..6, fn(x) -> x <= 3 end) == [4, 5, 6] + assert Enum.drop_while(0..6, fn(_) -> false end) == [0, 1, 2, 3, 4, 5, 6] + assert Enum.drop_while(0..3, fn(x) -> x <= 3 end) == [] + assert Enum.drop_while(1..0, fn(_) -> nil end) == [1, 0] end - test :each do + test "each/2" do try do - range = 1..0 - assert Enum.each(range, fn(x) -> x end) == :ok - - range = 1..3 - assert Enum.each(range, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok + assert Enum.each(1..0, fn(x) -> x end) == :ok + assert Enum.each(1..3, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok assert Process.get(:enum_test_each) == 6 after Process.delete(:enum_test_each) end try do - range = -1..-3 - assert Enum.each(range, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok + assert Enum.each(-1..-3, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok assert Process.get(:enum_test_each) == -6 after Process.delete(:enum_test_each) end end - test :filter do - range = 1..3 - assert Enum.filter(range, fn(x) -> rem(x, 2) == 0 end) == [2] + test "empty?/1" do + refute Enum.empty?(1..0) + refute Enum.empty?(1..2) + end + + test "fetch/2" do + # ascending order + assert Enum.fetch(-10..20, 4) == {:ok, -6} + assert Enum.fetch(-10..20, -4) == {:ok, 17} + # ascending order, first + assert Enum.fetch(-10..20, 0) == {:ok, -10} + assert Enum.fetch(-10..20, -31) == {:ok, -10} + # ascending order, last + assert Enum.fetch(-10..20, -1) == {:ok, 20} + assert Enum.fetch(-10..20, 30) == {:ok, 20} + # ascending order, out of bound + assert Enum.fetch(-10..20, 31) == :error + assert Enum.fetch(-10..20, -32) == :error + + # descending order + assert Enum.fetch(20..-10, 4) == {:ok, 16} + assert Enum.fetch(20..-10, -4) == {:ok, -7} + # descending order, first + assert Enum.fetch(20..-10, 0) == {:ok, 20} + assert Enum.fetch(20..-10, -31) == {:ok, 20} + # descending order, last + assert Enum.fetch(20..-10, -1) == {:ok, -10} + assert Enum.fetch(20..-10, 30) == {:ok, -10} + # descending order, out of bound + assert Enum.fetch(20..-10, 31) == :error + assert Enum.fetch(20..-10, -32) == :error + + # edge cases + assert Enum.fetch(42..42, 0) == {:ok, 42} + assert Enum.fetch(42..42, -1) == {:ok, 42} + assert Enum.fetch(42..42, 2) == :error + assert Enum.fetch(42..42, -2) == :error + end + + test "fetch!/2" do + assert Enum.fetch!(2..6, 0) == 2 + assert Enum.fetch!(2..6, 4) == 6 + assert Enum.fetch!(2..6, -1) == 6 + assert Enum.fetch!(2..6, -2) == 5 + assert Enum.fetch!(-2..-6, 0) == -2 + assert Enum.fetch!(-2..-6, 4) == -6 + + assert_raise Enum.OutOfBoundsError, fn -> + Enum.fetch!(2..6, 8) + end + + assert_raise Enum.OutOfBoundsError, fn -> + Enum.fetch!(-2..-6, 8) + end + + assert_raise Enum.OutOfBoundsError, fn -> + Enum.fetch!(2..6, -8) + end + end + + test "filter/2" do + assert Enum.filter(1..3, fn(x) -> rem(x, 2) == 0 end) == [2] + assert Enum.filter(1..6, fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6] - range = 1..6 - assert Enum.filter(range, fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6] + assert Enum.filter(1..3, &match?(1, &1)) == [1] + assert Enum.filter(1..3, &match?(x when x < 3, &1)) == [1, 2] + assert Enum.filter(1..3, fn _ -> true end) == [1, 2, 3] end - test :filter_with_match do - range = 1..3 - assert Enum.filter(range, &match?(1, &1)) == [1] - assert Enum.filter(range, &match?(x when x < 3, &1)) == [1, 2] - assert Enum.filter(range, &match?(_, &1)) == [1, 2, 3] + test "find/3" do + assert Enum.find(2..6, fn(x) -> rem(x, 2) == 0 end) == 2 + assert Enum.find(2..6, fn(x) -> rem(x, 2) == 1 end) == 3 + assert Enum.find(2..6, fn _ -> false end) == nil + assert Enum.find(2..6, 0, fn _ -> false end) == 0 end - test :filter_map do - range = 1..3 - assert Enum.filter_map(range, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4] + test "find_index/2" do + assert Enum.find_index(2..6, fn(x) -> rem(x, 2) == 1 end) == 1 + end + + test "find_value/3" do + assert Enum.find_value(2..6, fn(x) -> rem(x, 2) == 1 end) + end - range = 2..6 - assert Enum.filter_map(range, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12] + test "flat_map/2" do + assert Enum.flat_map(1..3, fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3] end - test :flat_map do - range = 1..3 - assert Enum.flat_map(range, fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3] + test "flat_map_reduce/3" do + assert Enum.flat_map_reduce(1..100, 0, fn i, acc -> + if acc < 3, do: {[i], acc + 1}, else: {:halt, acc} + end) == {[1, 2, 3], 3} end - test :intersperse do - range = 1..0 - assert Enum.intersperse(range, true) == [1, true, 0] + test "group_by/3" do + assert Enum.group_by(1..6, &rem(&1, 3)) == + %{0 => [3, 6], 1 => [1, 4], 2 => [2, 5]} + assert Enum.group_by(1..6, &rem(&1, 3), &(&1 * 2)) == + %{0 => [6, 12], 1 => [2, 8], 2 => [4, 10]} + end - range = 1..3 - assert Enum.intersperse(range, false) == [1, false, 2, false, 3] + test "intersperse/2" do + assert Enum.intersperse(1..0, true) == [1, true, 0] + assert Enum.intersperse(1..3, false) == [1, false, 2, false, 3] end - test :into do - assert Enum.into([a: 1, b: 2], %{}) == %{a: 1, b: 2} - assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2] + test "into/2" do assert Enum.into(3..5, [1, 2]) == [1, 2, 3, 4, 5] assert Enum.into(1..5, []) == [1, 2, 3, 4, 5] + end + + test "into/3" do assert Enum.into(1..5, [], fn x -> x * 2 end) == [2, 4, 6, 8, 10] assert Enum.into(1..3, "numbers: ", &to_string/1) == "numbers: 123" end - test :join do - range = 1..0 - assert Enum.join(range, " = ") == "1 = 0" - - range = 1..3 - assert Enum.join(range, " = ") == "1 = 2 = 3" - assert Enum.join(range) == "123" + test "join/2" do + assert Enum.join(1..0, " = ") == "1 = 0" + assert Enum.join(1..3, " = ") == "1 = 2 = 3" + assert Enum.join(1..3) == "123" end - test :map_join do - range = 1..0 - assert Enum.map_join(range, " = ", &(&1 * 2)) == "2 = 0" - - range = 1..3 - assert Enum.map_join(range, " = ", &(&1 * 2)) == "2 = 4 = 6" - assert Enum.map_join(range, &(&1 * 2)) == "246" + test "map/2" do + assert Enum.map(1..3, fn x -> x * 2 end) == [2, 4, 6] + assert Enum.map(-1..-3, fn x -> x * 2 end) == [-2, -4, -6] end - test :map do - range = 1..3 - assert Enum.map(range, fn x -> x * 2 end) == [2, 4, 6] - - range = -1..-3 - assert Enum.map(range, fn x -> x * 2 end) == [-2, -4, -6] + test "map_every/3" do + assert Enum.map_every(1..10, 2, fn x -> x * 2 end) == [2, 2, 6, 4, 10, 6, 14, 8, 18, 10] + assert Enum.map_every(-1..-10, 2, fn x -> x * 2 end) == [-2, -2, -6, -4, -10, -6, -14, -8, -18, -10] + assert Enum.map_every(1..2, 2, fn x -> x * 2 end) == [2, 2] + assert Enum.map_every(1..3, 0, fn x -> x * 2 end) == [1, 2, 3] + assert_raise FunctionClauseError, fn -> + Enum.map_every(1..3, -1, fn x -> x * 2 end) + end end - test :map_reduce do - range = 1..0 - assert Enum.map_reduce(range, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 0], 2} + test "map_join/3" do + assert Enum.map_join(1..0, " = ", &(&1 * 2)) == "2 = 0" + assert Enum.map_join(1..3, " = ", &(&1 * 2)) == "2 = 4 = 6" + assert Enum.map_join(1..3, &(&1 * 2)) == "246" + end - range = 1..3 - assert Enum.map_reduce(range, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7} + test "map_reduce/3" do + assert Enum.map_reduce(1..0, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 0], 2} + assert Enum.map_reduce(1..3, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7} end - test :max do + test "max/1" do assert Enum.max(1..1) == 1 assert Enum.max(1..3) == 3 assert Enum.max(3..1) == 3 end - test :max_by do + test "max_by/2" do assert Enum.max_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1 assert Enum.max_by(1..3, fn(x) -> :math.pow(-2, x) end) == 2 end - test :min do - assert Enum.min([1]) == 1 - assert Enum.min([1, 2, 3]) == 1 - assert Enum.min([[], :a, {}]) == :a + test "member?/2" do + assert Enum.member?(1..3, 2) + refute Enum.member?(1..3, 0) end - test :min_by do + test "min/1" do + assert Enum.min(1..1) == 1 + assert Enum.min(1..3) == 1 + end + + test "min_by/2" do assert Enum.min_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1 assert Enum.min_by(1..3, fn(x) -> :math.pow(-2, x) end) == 3 end - test :partition do - range = 1..3 - assert Enum.partition(range, fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]} + test "min_max/1" do + assert Enum.min_max(1..1) == {1, 1} + assert Enum.min_max(1..3) == {1, 3} + assert Enum.min_max(3..1) == {1, 3} + end + + test "min_max_by/2" do + assert Enum.min_max_by(1..1, fn(x) -> x end) == {1, 1} + assert Enum.min_max_by(1..3, fn(x) -> x end) == {1, 3} end - test :reduce do - range = 1..0 - assert Enum.reduce(range, 1, fn(x, acc) -> x + acc end) == 2 + test "split_with/2" do + assert Enum.split_with(1..3, fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]} + end - range = 1..3 - assert Enum.reduce(range, 1, fn(x, acc) -> x + acc end) == 7 + test "random/1" do + # corner cases, independent of the seed + assert Enum.random(1..1) == 1 - range = 1..3 - assert Enum.reduce(range, fn(x, acc) -> x + acc end) == 6 + # set a fixed seed so the test can be deterministic + # please note the order of following assertions is important + seed1 = {1406, 407414, 139258} + seed2 = {1306, 421106, 567597} + :rand.seed(:exsplus, seed1) + assert Enum.random(1..2) == 1 + assert Enum.random(1..3) == 2 + assert Enum.random(3..1) == 3 + :rand.seed(:exsplus, seed2) + assert Enum.random(1..2) == 1 + assert Enum.random(1..3) == 3 end - test :reject do - range = 1..3 - assert Enum.reject(range, fn(x) -> rem(x, 2) == 0 end) == [1, 3] + test "reduce/2" do + assert Enum.reduce(1..3, fn(x, acc) -> x + acc end) == 6 + end - range = 1..6 - assert Enum.reject(range, fn(x) -> rem(x, 2) == 0 end) == [1, 3, 5] + test "reduce/3" do + assert Enum.reduce(1..0, 1, fn(x, acc) -> x + acc end) == 2 + assert Enum.reduce(1..3, 1, fn(x, acc) -> x + acc end) == 7 end - test :reverse do - assert Enum.reverse([]) == [] - assert Enum.reverse([1, 2, 3]) == [3, 2, 1] - assert Enum.reverse([1, 2, 3], [4, 5, 6]) == [3, 2, 1, 4, 5, 6] + test "reduce_while/3" do + assert Enum.reduce_while(1..100, 0, fn i, acc -> + if i <= 3, do: {:cont, acc + i}, else: {:halt, acc} + end) == 6 + end + + test "reject/2" do + assert Enum.reject(1..3, fn(x) -> rem(x, 2) == 0 end) == [1, 3] + assert Enum.reject(1..6, fn(x) -> rem(x, 2) == 0 end) == [1, 3, 5] + end + test "reverse/1" do assert Enum.reverse(0..0) == [0] assert Enum.reverse(1..3) == [3, 2, 1] + assert Enum.reverse(-3..5) == [5, 4, 3, 2, 1, 0, -1, -2, -3] + assert Enum.reverse(5..5) == [5] + end + + test "reverse/2" do assert Enum.reverse(1..3, 4..6) == [3, 2, 1, 4, 5, 6] assert Enum.reverse([1, 2, 3], 4..6) == [3, 2, 1, 4, 5, 6] assert Enum.reverse(1..3, [4, 5, 6]) == [3, 2, 1, 4, 5, 6] + assert Enum.reverse(-3..5, MapSet.new([-3, -2])) == [5, 4, 3, 2, 1, 0, -1, -2, -3, -3, -2] + assert Enum.reverse(5..5, [5]) == [5, 5] end - test :scan do - assert Enum.scan(1..5, &(&1 + &2)) == [1,3,6,10,15] - assert Enum.scan(1..5, 0, &(&1 + &2)) == [1,3,6,10,15] + test "reverse_slice/3" do + assert Enum.reverse_slice(1..6, 2, 0) == [1, 2, 3, 4, 5, 6] + assert Enum.reverse_slice(1..6, 2, 2) == [1, 2, 4, 3, 5, 6] + assert Enum.reverse_slice(1..6, 2, 4) == [1, 2, 6, 5, 4, 3] + assert Enum.reverse_slice(1..6, 2, 10000000) == [1, 2, 6, 5, 4, 3] + assert Enum.reverse_slice(1..6, 10000000, 4) == [1, 2, 3, 4, 5, 6] + assert Enum.reverse_slice(1..6, 50, 50) == [1, 2, 3, 4, 5, 6] end - test :shuffle do - # set a fixed seed so the test can be deterministic - :random.seed(1374, 347975, 449264) - assert Enum.shuffle(1..5) == [2, 4, 1, 5, 3] + test "scan/2" do + assert Enum.scan(1..5, &(&1 + &2)) == [1, 3, 6, 10, 15] end - test :slice do - assert Enum.slice(1..5, 0, 0) == [] - assert Enum.slice(1..5, 0, 1) == [1] - assert Enum.slice(1..5, 0, 2) == [1, 2] - assert Enum.slice(1..5, 1, 2) == [2, 3] - assert Enum.slice(1..5, 1, 0) == [] - assert Enum.slice(1..5, 2, 5) == [3, 4, 5] - assert Enum.slice(1..5, 2, 6) == [3, 4, 5] - assert Enum.slice(1..5, 5, 5) == [] - assert Enum.slice(1..5, 6, 5) == [] - assert Enum.slice(1..5, 6, 0) == [] - assert Enum.slice(1..5, -6, 0) == [] - assert Enum.slice(1..5, -6, 5) == [] - assert Enum.slice(1..5, -2, 5) == [4, 5] - assert Enum.slice(1..5, -3, 1) == [3] + test "scan/3" do + assert Enum.scan(1..5, 0, &(&1 + &2)) == [1, 3, 6, 10, 15] + end + + test "shuffle/1" do + # set a fixed seed so the test can be deterministic + :rand.seed(:exsplus, {1374, 347975, 449264}) + assert Enum.shuffle(1..5) == [2, 1, 3, 5, 4] end - test :slice_range do + test "slice/2" do assert Enum.slice(1..5, 0..0) == [1] assert Enum.slice(1..5, 0..1) == [1, 2] assert Enum.slice(1..5, 0..2) == [1, 2, 3] @@ -788,87 +1197,209 @@ defmodule EnumTest.Range do assert Enum.slice(1..5, -5..-1) == [1, 2, 3, 4, 5] assert Enum.slice(1..5, -5..-3) == [1, 2, 3] assert Enum.slice(1..5, -6..-1) == [] - assert Enum.slice(1..5, -6..-3) == [] - end + assert_raise ArgumentError, fn -> + x = 1.1 + Enum.slice(1..5, x..2) + end + assert_raise ArgumentError, fn -> + x = 1.9 + Enum.slice(1..5, 1..x) + end - test :sort do + assert Enum.slice(5..1, 0..0) == [5] + assert Enum.slice(5..1, 0..1) == [5, 4] + assert Enum.slice(5..1, 0..2) == [5, 4, 3] + assert Enum.slice(5..1, 1..2) == [4, 3] + assert Enum.slice(5..1, 1..0) == [] + assert Enum.slice(5..1, 2..5) == [3, 2, 1] + assert Enum.slice(5..1, 2..6) == [3, 2, 1] + assert Enum.slice(5..1, 4..4) == [1] + assert Enum.slice(5..1, 5..5) == [] + assert Enum.slice(5..1, 6..5) == [] + assert Enum.slice(5..1, 6..0) == [] + assert Enum.slice(5..1, -6..0) == [] + assert Enum.slice(5..1, -6..5) == [] + assert Enum.slice(5..1, -5..-1) == [5, 4, 3, 2, 1] + assert Enum.slice(5..1, -5..-3) == [5, 4, 3] + assert Enum.slice(5..1, -6..-1) == [] + end + + test "slice/3" do + assert Enum.slice(1..5, 0, 0) == [] + assert Enum.slice(1..5, 0, 1) == [1] + assert Enum.slice(1..5, 0, 2) == [1, 2] + assert Enum.slice(1..5, 1, 2) == [2, 3] + assert Enum.slice(1..5, 1, 0) == [] + assert Enum.slice(1..5, 2, 3) == [3, 4, 5] + assert Enum.slice(1..5, 2, 6) == [3, 4, 5] + assert Enum.slice(1..5, 5, 5) == [] + assert Enum.slice(1..5, 6, 5) == [] + assert Enum.slice(1..5, 6, 0) == [] + assert Enum.slice(1..5, -6, 0) == [] + assert Enum.slice(1..5, -6, 5) == [] + assert Enum.slice(1..5, -2, 5) == [4, 5] + assert Enum.slice(1..5, -3, 1) == [3] + assert_raise FunctionClauseError, fn -> + Enum.slice(1..5, 0, -1) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(1..5, 0.99, 0) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(1..5, 0, 0.99) + end + + assert Enum.slice(5..1, 0, 0) == [] + assert Enum.slice(5..1, 0, 1) == [5] + assert Enum.slice(5..1, 0, 2) == [5, 4] + assert Enum.slice(5..1, 1, 2) == [4, 3] + assert Enum.slice(5..1, 1, 0) == [] + assert Enum.slice(5..1, 2, 3) == [3, 2, 1] + assert Enum.slice(5..1, 2, 6) == [3, 2, 1] + assert Enum.slice(5..1, 4, 4) == [1] + assert Enum.slice(5..1, 5, 5) == [] + assert Enum.slice(5..1, 6, 5) == [] + assert Enum.slice(5..1, 6, 0) == [] + assert Enum.slice(5..1, -6, 0) == [] + assert Enum.slice(5..1, -6, 5) == [] + end + + test "sort/1" do assert Enum.sort(3..1) == [1, 2, 3] assert Enum.sort(2..1) == [1, 2] assert Enum.sort(1..1) == [1] + end + test "sort/2" do assert Enum.sort(3..1, &(&1 > &2)) == [3, 2, 1] assert Enum.sort(2..1, &(&1 > &2)) == [2, 1] assert Enum.sort(1..1, &(&1 > &2)) == [1] end - test :split do - range = 1..3 - assert Enum.split(range, 0) == {[], [1, 2, 3]} - assert Enum.split(range, 1) == {[1], [2, 3]} - assert Enum.split(range, 2) == {[1, 2], [3]} - assert Enum.split(range, 3) == {[1, 2, 3], []} - assert Enum.split(range, 4) == {[1, 2, 3], []} - assert Enum.split(range, -1) == {[1, 2], [3]} - assert Enum.split(range, -2) == {[1], [2, 3]} - assert Enum.split(range, -3) == {[], [1, 2, 3]} - assert Enum.split(range, -10) == {[], [1, 2, 3]} - - range = 1..0 - assert Enum.split(range, 3) == {[1, 0], []} + test "sort_by/2" do + assert Enum.sort_by(3..1, & &1) == [1, 2, 3] end - test :split_while do - range = 1..3 - assert Enum.split_while(range, fn(_) -> false end) == {[], [1, 2, 3]} - assert Enum.split_while(range, fn(_) -> true end) == {[1, 2, 3], []} - assert Enum.split_while(range, fn(x) -> x > 2 end) == {[], [1, 2, 3]} - assert Enum.split_while(range, fn(x) -> x > 3 end) == {[], [1, 2, 3]} - assert Enum.split_while(range, fn(x) -> x < 3 end) == {[1, 2], [3]} + test "split/2" do + assert Enum.split(1..3, 0) == {[], [1, 2, 3]} + assert Enum.split(1..3, 1) == {[1], [2, 3]} + assert Enum.split(1..3, 2) == {[1, 2], [3]} + assert Enum.split(1..3, 3) == {[1, 2, 3], []} + assert Enum.split(1..3, 4) == {[1, 2, 3], []} + assert Enum.split(1..3, -1) == {[1, 2], [3]} + assert Enum.split(1..3, -2) == {[1], [2, 3]} + assert Enum.split(1..3, -3) == {[], [1, 2, 3]} + assert Enum.split(1..3, -10) == {[], [1, 2, 3]} + assert Enum.split(1..0, 3) == {[1, 0], []} + end - range = 1..0 - assert Enum.split_while(range, fn(_) -> true end) == {[1, 0], []} + test "split_while/2" do + assert Enum.split_while(1..3, fn(_) -> false end) == {[], [1, 2, 3]} + assert Enum.split_while(1..3, fn(_) -> nil end) == {[], [1, 2, 3]} + assert Enum.split_while(1..3, fn(_) -> true end) == {[1, 2, 3], []} + assert Enum.split_while(1..3, fn(x) -> x > 2 end) == {[], [1, 2, 3]} + assert Enum.split_while(1..3, fn(x) -> x > 3 end) == {[], [1, 2, 3]} + assert Enum.split_while(1..3, fn(x) -> x < 3 end) == {[1, 2], [3]} + assert Enum.split_while(1..3, fn(x) -> x end) == {[1, 2, 3], []} + assert Enum.split_while(1..0, fn(_) -> true end) == {[1, 0], []} end - test :sum do + test "sum/1" do + assert Enum.sum(0..0) == 0 assert Enum.sum(1..1) == 1 assert Enum.sum(1..3) == 6 + assert Enum.sum(0..100) == 5050 + assert Enum.sum(10..100) == 5005 + assert Enum.sum(100..10) == 5005 + assert Enum.sum(-10..-20) == -165 + assert Enum.sum(-10..2) == -52 + end + + test "take/2" do + assert Enum.take(1..3, 0) == [] + assert Enum.take(1..3, 1) == [1] + assert Enum.take(1..3, 2) == [1, 2] + assert Enum.take(1..3, 3) == [1, 2, 3] + assert Enum.take(1..3, 4) == [1, 2, 3] + assert Enum.take(1..3, -1) == [3] + assert Enum.take(1..3, -2) == [2, 3] + assert Enum.take(1..3, -4) == [1, 2, 3] + assert Enum.take(1..0, 3) == [1, 0] + end + + test "take_every/2" do + assert Enum.take_every(1..10, 2) == [1, 3, 5, 7, 9] + assert Enum.take_every(1..2, 2) == [1] + assert Enum.take_every(1..3, 0) == [] + assert_raise FunctionClauseError, fn -> + Enum.take_every(1..3, -1) + end end - test :take do - range = 1..3 - assert Enum.take(range, 0) == [] - assert Enum.take(range, 1) == [1] - assert Enum.take(range, 2) == [1, 2] - assert Enum.take(range, 3) == [1, 2, 3] - assert Enum.take(range, 4) == [1, 2, 3] - assert Enum.take(range, -1) == [3] - assert Enum.take(range, -2) == [2, 3] - assert Enum.take(range, -4) == [1, 2, 3] + test "take_random/2" do + # corner cases, independent of the seed + assert_raise FunctionClauseError, fn -> Enum.take_random(1..2, -1) end + assert Enum.take_random(1..1, 0) == [] + assert Enum.take_random(1..1, 1) == [1] + assert Enum.take_random(1..1, 2) == [1] + assert Enum.take_random(1..2, 0) == [] - range = 1..0 - assert Enum.take(range, 3) == [1, 0] + # set a fixed seed so the test can be deterministic + # please note the order of following assertions is important + seed1 = {1406, 407414, 139258} + seed2 = {1406, 421106, 567597} + :rand.seed(:exsplus, seed1) + assert Enum.take_random(1..3, 1) == [2] + assert Enum.take_random(1..3, 2) == [3, 1] + assert Enum.take_random(1..3, 3) == [1, 3, 2] + assert Enum.take_random(1..3, 4) == [2, 3, 1] + assert Enum.take_random(3..1, 1) == [3] + :rand.seed(:exsplus, seed2) + assert Enum.take_random(1..3, 1) == [3] + assert Enum.take_random(1..3, 2) == [1, 2] + assert Enum.take_random(1..3, 3) == [1, 2, 3] + assert Enum.take_random(1..3, 4) == [2, 1, 3] + + # make sure optimizations don't change fixed seeded tests + :rand.seed(:exsplus, {101, 102, 103}) + one = Enum.take_random(1..100, 1) + :rand.seed(:exsplus, {101, 102, 103}) + two = Enum.take_random(1..100, 2) + assert hd(one) == hd(two) + end + + test "take_while/2" do + assert Enum.take_while(1..3, fn(x) -> x > 3 end) == [] + assert Enum.take_while(1..3, fn(x) -> x <= 1 end) == [1] + assert Enum.take_while(1..3, fn(x) -> x <= 3 end) == [1, 2, 3] + assert Enum.take_while(1..3, fn(x) -> x end) == [1, 2, 3] + assert Enum.take_while(1..3, fn(_) -> nil end) == [] + end + + test "to_list/1" do + assert Enum.to_list([1, 2, 3]) == [1, 2, 3] + assert Enum.to_list(MapSet.new(1..3)) == [1, 2, 3] + assert Enum.to_list(1..3) == [1, 2, 3] + end + + test "uniq/1" do + assert Enum.uniq(1..3) == [1, 2, 3] end - test :take_every do - assert Enum.take_every(1..10, 2) == [1, 3, 5, 7, 9] - assert Enum.take_every(1..2, 2) == [1] - assert Enum.take_every(1..3, 0) == [] + test "uniq_by/2" do + assert Enum.uniq_by(1..3, fn x -> x end) == [1, 2, 3] end - test :take_while do - range = 1..3 - assert Enum.take_while(range, fn(x) -> x > 3 end) == [] - assert Enum.take_while(range, fn(x) -> x <= 1 end) == [1] - assert Enum.take_while(range, fn(x) -> x <= 3 end) == [1, 2, 3] - assert Enum.take_while([], fn(_) -> true end) == [] + test "unzip/1" do + assert_raise FunctionClauseError, fn -> Enum.unzip(1..3) end end - test :uniq do - assert Enum.uniq(1..3) == [1, 2, 3] - assert Enum.uniq(1..3, fn x -> x end) == [1, 2, 3] + test "with_index/2" do + assert Enum.with_index(1..3) == [{1, 0}, {2, 1}, {3, 2}] + assert Enum.with_index(1..3, 3) == [{1, 3}, {2, 4}, {3, 5}] end - test :zip do + test "zip/2" do assert Enum.zip([:a, :b], 1..2) == [{:a, 1}, {:b, 2}] assert Enum.zip([:a, :b], 1..4) == [{:a, 1}, {:b, 2}] assert Enum.zip([:a, :b, :c, :d], 1..2) == [{:a, 1}, {:b, 2}] @@ -881,9 +1412,118 @@ defmodule EnumTest.Range do assert Enum.zip(1..4, 1..2) == [{1, 1}, {2, 2}] assert Enum.zip(1..2, 1..4) == [{1, 1}, {2, 2}] end +end + +defmodule EnumTest.Map do + # Some cases are inlined for ranges which means we need + # to verify them using maps or mapsets. + use ExUnit.Case, async: true - test :with_index do - assert Enum.with_index(1..3) == [{1,0},{2,1},{3,2}] + test "take_random/2" do + # corner cases, independent of the seed + assert_raise FunctionClauseError, fn -> Enum.take_random(1..2, -1) end + assert Enum.take_random(%{a: 1}, 0) == [] + assert Enum.take_random(%{a: 1}, 2) == [a: 1] + assert Enum.take_random(%{a: 1, b: 2}, 0) == [] + + # set a fixed seed so the test can be deterministic + # please note the order of following assertions is important + map = %{a: 1, b: 2, c: 3} + seed1 = {1406, 407414, 139258} + seed2 = {1406, 421106, 567597} + :rand.seed(:exsplus, seed1) + assert Enum.take_random(map, 1) == [b: 2] + assert Enum.take_random(map, 2) == [c: 3, a: 1] + assert Enum.take_random(map, 3) == [a: 1, c: 3, b: 2] + assert Enum.take_random(map, 4) == [b: 2, c: 3, a: 1] + :rand.seed(:exsplus, seed2) + assert Enum.take_random(map, 1) == [c: 3] + assert Enum.take_random(map, 2) == [a: 1, b: 2] + assert Enum.take_random(map, 3) == [a: 1, b: 2, c: 3] + assert Enum.take_random(map, 4) == [b: 2, a: 1, c: 3] + end + + test "reverse/1" do + assert Enum.reverse(%{}) == [] + assert Enum.reverse(MapSet.new) == [] + assert Enum.reverse(%{a: 1, b: 2, c: 3}) == [c: 3, b: 2, a: 1] + end + + test "reverse/2" do + assert Enum.reverse([a: 1, b: 2, c: 3, a: 1], %{x: 1, y: 2, z: 3}) == + [a: 1, c: 3, b: 2, a: 1, x: 1, y: 2, z: 3] + assert Enum.reverse([], %{a: 1}) == [a: 1] + assert Enum.reverse([], %{}) == [] + assert Enum.reverse(%{a: 1}, []) == [a: 1] + assert Enum.reverse(MapSet.new, %{}) == [] + end + + test "fetch/2" do + map = %{a: 1, b: 2, c: 3, d: 4, e: 5} + assert Enum.fetch(map, 0) == {:ok, {:a, 1}} + assert Enum.fetch(map, -2) == {:ok, {:d, 4}} + assert Enum.fetch(map, -6) == :error + assert Enum.fetch(map, 5) == :error + assert Enum.fetch(%{}, 0) == :error + + assert Stream.take(map, 3) |> Enum.fetch(3) == :error + assert Stream.take(map, 5) |> Enum.fetch(4) == {:ok, {:e, 5}} + end + + test "slice/2" do + map = %{a: 1, b: 2, c: 3, d: 4, e: 5} + assert Enum.slice(map, 0..0) == [a: 1] + assert Enum.slice(map, 0..1) == [a: 1, b: 2] + assert Enum.slice(map, 0..2) == [a: 1, b: 2, c: 3] + end + + test "slice/3" do + map = %{a: 1, b: 2, c: 3, d: 4, e: 5} + assert Enum.slice(map, 1, 2) == [b: 2, c: 3] + assert Enum.slice(map, 1, 0) == [] + assert Enum.slice(map, 2, 5) == [c: 3, d: 4, e: 5] + assert Enum.slice(map, 2, 6) == [c: 3, d: 4, e: 5] + assert Enum.slice(map, 5, 5) == [] + assert Enum.slice(map, 6, 5) == [] + assert Enum.slice(map, 6, 0) == [] + assert Enum.slice(map, -6, 0) == [] + assert Enum.slice(map, -6, 5) == [] + assert Enum.slice(map, -2, 5) == [d: 4, e: 5] + assert Enum.slice(map, -3, 1) == [c: 3] + assert_raise FunctionClauseError, fn -> + Enum.slice(map, 0, -1) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(map, 0.99, 0) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(map, 0, 0.99) + end + + assert Enum.slice(map, 0, 0) == [] + assert Enum.slice(map, 0, 1) == [a: 1] + assert Enum.slice(map, 0, 2) == [a: 1, b: 2] + assert Enum.slice(map, 1, 2) == [b: 2, c: 3] + assert Enum.slice(map, 1, 0) == [] + assert Enum.slice(map, 2, 5) == [c: 3, d: 4, e: 5] + assert Enum.slice(map, 2, 6) == [c: 3, d: 4, e: 5] + assert Enum.slice(map, 5, 5) == [] + assert Enum.slice(map, 6, 5) == [] + assert Enum.slice(map, 6, 0) == [] + assert Enum.slice(map, -6, 0) == [] + assert Enum.slice(map, -6, 5) == [] + assert Enum.slice(map, -2, 5) == [d: 4, e: 5] + assert Enum.slice(map, -3, 1) == [c: 3] + + assert_raise FunctionClauseError, fn -> + Enum.slice(map, 0, -1) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(map, 0.99, 0) + end + assert_raise FunctionClauseError, fn -> + Enum.slice(map, 0, 0.99) + end end end @@ -893,14 +1533,14 @@ defmodule EnumTest.SideEffects do import ExUnit.CaptureIO import PathHelpers - test "take with side effects" do + test "take/2 with side effects" do stream = Stream.unfold(1, fn x -> IO.puts x; {x, x + 1} end) assert capture_io(fn -> Enum.take(stream, 1) end) == "1\n" end - test "take does not consume next without a need" do + test "take/2 does not consume next without a need" do path = tmp_path("oneliner.txt") File.mkdir(Path.dirname(path)) @@ -917,7 +1557,7 @@ defmodule EnumTest.SideEffects do end end - test "take with no item works as no-op" do + test "take/2 with no item works as no-op" do iterator = File.stream!(fixture_path("unknown.txt")) assert Enum.take(iterator, 0) == [] diff --git a/lib/elixir/test/elixir/exception_test.exs b/lib/elixir/test/elixir/exception_test.exs index d474028aae7..4aa22e7faaa 100644 --- a/lib/elixir/test/elixir/exception_test.exs +++ b/lib/elixir/test/elixir/exception_test.exs @@ -1,84 +1,178 @@ Code.require_file "test_helper.exs", __DIR__ -defmodule Kernel.ExceptionTest do +defmodule ExceptionTest do use ExUnit.Case, async: true - test "raise preserves the stacktrace" do + doctest Exception + + test "raising preserves the stacktrace" do stacktrace = try do raise "a" rescue _ -> - [top|_] = System.stacktrace + [top | _] = System.stacktrace top end - file = __ENV__.file |> Path.relative_to_cwd |> String.to_char_list - assert {Kernel.ExceptionTest, :"test raise preserves the stacktrace", _, - [file: ^file, line: 9]} = stacktrace + file = __ENV__.file |> Path.relative_to_cwd |> String.to_charlist + assert {__MODULE__, :"test raising preserves the stacktrace", _, + [file: ^file, line: 11]} = stacktrace end - test "exception?" do + test "exception?/1" do assert Exception.exception?(%RuntimeError{}) refute Exception.exception?(%Regex{}) refute Exception.exception?({}) end - test "message" do + test "message/1" do defmodule BadException do - def message(_) do - raise "oops" + def message(exception) do + if exception.raise do + raise "oops" + end end end - message = ~r/Got RuntimeError with message \"oops\" while retrieving message for/ + assert Exception.message(%{__struct__: BadException, __exception__: true, raise: true}) =~ + "got RuntimeError with message \"oops\" while retrieving Exception.message/1 " <> + "for %{__exception__: true, __struct__: ExceptionTest.BadException, raise: true}" - assert_raise ArgumentError, message, fn -> - Exception.message(%{__struct__: BadException, __exception__: true}) - end + assert Exception.message(%{__struct__: BadException, __exception__: true, raise: false}) =~ + "got nil while retrieving Exception.message/1 " <> + "for %{__exception__: true, __struct__: ExceptionTest.BadException, raise: false}" end - require Record - - test "normalize" do + test "normalize/2" do assert Exception.normalize(:throw, :badarg) == :badarg assert Exception.normalize(:exit, :badarg) == :badarg - assert Exception.normalize({:EXIT, self}, :badarg) == :badarg + assert Exception.normalize({:EXIT, self()}, :badarg) == :badarg assert Exception.normalize(:error, :badarg).__struct__ == ArgumentError assert Exception.normalize(:error, %ArgumentError{}).__struct__ == ArgumentError end - test "format_banner" do + test "format/2 without stacktrace" do + stacktrace = try do throw(:stack) catch :stack -> System.stacktrace() end + assert Exception.format(:error, :badarg) == + "** (ArgumentError) argument error\n" <> Exception.format_stacktrace(stacktrace) + end + + test "format/2 with empty stacktrace" do + assert Exception.format(:error, :badarg, []) == "** (ArgumentError) argument error" + end + + test "format/2 with EXIT (has no stacktrace)" do + assert Exception.format({:EXIT, self()}, :badarg) == "** (EXIT from #{inspect self()}) :badarg" + end + + test "format_banner/2" do assert Exception.format_banner(:error, :badarg) == "** (ArgumentError) argument error" assert Exception.format_banner(:throw, :badarg) == "** (throw) :badarg" assert Exception.format_banner(:exit, :badarg) == "** (exit) :badarg" - assert Exception.format_banner({:EXIT, self}, :badarg) == "** (EXIT from #{inspect self}) :badarg" + assert Exception.format_banner({:EXIT, self()}, :badarg) == "** (EXIT from #{inspect self()}) :badarg" end - test "format without stacktrace" do - stacktrace = try do throw(:stack) catch :stack -> System.stacktrace() end - assert Exception.format(:error, :badarg) == "** (ArgumentError) argument error" <> - "\n" <> Exception.format_stacktrace(stacktrace) + test "format_stacktrace/1 from file" do + assert_raise ArgumentError, fn -> + Code.eval_string("def foo do end", [], file: "my_file") + end + + assert Exception.format_stacktrace(System.stacktrace) =~ "my_file:1: (file)" end - test "format with empty stacktrace" do - assert Exception.format(:error, :badarg, []) == "** (ArgumentError) argument error" + test "format_stacktrace/1 from module" do + assert_raise ArgumentError, fn -> + Code.eval_string("defmodule FmtStack do raise ArgumentError, ~s(oops) end", [], file: "my_file") + end + + assert Exception.format_stacktrace(System.stacktrace) =~ "my_file:1: (module)" + end + + test "format_stacktrace_entry/1 with no file or line" do + assert Exception.format_stacktrace_entry({Foo, :bar, [1, 2, 3], []}) == "Foo.bar(1, 2, 3)" + assert Exception.format_stacktrace_entry({Foo, :bar, [], []}) == "Foo.bar()" + assert Exception.format_stacktrace_entry({Foo, :bar, 1, []}) == "Foo.bar/1" + end + + test "format_stacktrace_entry/1 with file and line" do + assert Exception.format_stacktrace_entry({Foo, :bar, [], [file: 'file.ex', line: 10]}) == "file.ex:10: Foo.bar()" + assert Exception.format_stacktrace_entry({Foo, :bar, [1, 2, 3], [file: 'file.ex', line: 10]}) == "file.ex:10: Foo.bar(1, 2, 3)" + assert Exception.format_stacktrace_entry({Foo, :bar, 1, [file: 'file.ex', line: 10]}) == "file.ex:10: Foo.bar/1" + end + + test "format_stacktrace_entry/1 with file no line" do + assert Exception.format_stacktrace_entry({Foo, :bar, [], [file: 'file.ex']}) == "file.ex: Foo.bar()" + assert Exception.format_stacktrace_entry({Foo, :bar, [], [file: 'file.ex', line: 0]}) == "file.ex: Foo.bar()" + assert Exception.format_stacktrace_entry({Foo, :bar, [1, 2, 3], [file: 'file.ex']}) == "file.ex: Foo.bar(1, 2, 3)" + assert Exception.format_stacktrace_entry({Foo, :bar, 1, [file: 'file.ex']}) == "file.ex: Foo.bar/1" end - test "format with EXIT has no stacktrace" do - try do throw(:stack) catch :stack -> System.stacktrace() end - assert Exception.format({:EXIT, self}, :badarg) == "** (EXIT from #{inspect self}) :badarg" + test "format_stacktrace_entry/1 with application" do + assert Exception.format_stacktrace_entry({Exception, :bar, [], [file: 'file.ex']}) == + "(elixir) file.ex: Exception.bar()" + assert Exception.format_stacktrace_entry({Exception, :bar, [], [file: 'file.ex', line: 10]}) == + "(elixir) file.ex:10: Exception.bar()" + assert Exception.format_stacktrace_entry({:lists, :bar, [1, 2, 3], []}) == + "(stdlib) :lists.bar(1, 2, 3)" end - test "format_exit" do + test "format_stacktrace_entry/1 with fun" do + assert Exception.format_stacktrace_entry({fn(x) -> x end, [1], []}) =~ ~r/#Function<.+>\(1\)/ + assert Exception.format_stacktrace_entry({fn(x, y) -> {x, y} end, 2, []}) =~ ~r"#Function<.+>/2" + end + + test "format_mfa/3" do + # Let's create this atom so that String.to_existing_atom/1 inside + # format_mfa/3 doesn't raise. + _ = :"some function" + + assert Exception.format_mfa(Foo, nil, 1) == "Foo.nil/1" + assert Exception.format_mfa(Foo, :bar, 1) == "Foo.bar/1" + assert Exception.format_mfa(Foo, :bar, []) == "Foo.bar()" + assert Exception.format_mfa(nil, :bar, []) == "nil.bar()" + assert Exception.format_mfa(:foo, :bar, [1, 2]) == ":foo.bar(1, 2)" + assert Exception.format_mfa(Foo, :b@r, 1) == "Foo.\"b@r\"/1" + assert Exception.format_mfa(Foo, :"bar baz", 1) == "Foo.\"bar baz\"/1" + assert Exception.format_mfa(Foo, :"-func/2-fun-0-", 4) == "anonymous fn/4 in Foo.func/2" + assert Exception.format_mfa(Foo, :"-some function/2-fun-0-", 4) == "anonymous fn/4 in Foo.\"some function\"/2" + assert Exception.format_mfa(Foo, :"42", 1) == "Foo.\"42\"/1" + assert Exception.format_mfa(Foo, :Bar, [1, 2]) == "Foo.\"Bar\"(1, 2)" + assert Exception.format_mfa(Foo, :%{}, [1, 2]) == "Foo.\"%{}\"(1, 2)" + assert Exception.format_mfa(Foo, :..., 1) == "Foo.\"...\"/1" + end + + if :erlang.system_info(:otp_release) >= '20' do + test "format_mfa/3 with unicode" do + assert Exception.format_mfa(Foo, :"olá", [1, 2]) == "Foo.olá(1, 2)" + assert Exception.format_mfa(Foo, :"Olá", [1, 2]) == "Foo.\"Olá\"(1, 2)" + assert Exception.format_mfa(Foo, :"Ólá", [1, 2]) == "Foo.\"Ólá\"(1, 2)" + + hello_world = String.to_atom("こんにちは世界") + assert Exception.format_mfa(Foo, hello_world, [1, 2]) == "Foo.こんにちは世界(1, 2)" + + nfd = :unicode.characters_to_nfd_binary("olá") + assert Exception.format_mfa(Foo, String.to_atom(nfd), [1, 2]) == "Foo.\"#{nfd}\"(1, 2)" + end + end + + test "format_fa/2" do + assert Exception.format_fa(fn -> nil end, 1) =~ + ~r"#Function<\d+\.\d+/0 in ExceptionTest\.\"test format_fa/2\"/1>/1" + end + + ## Format exits + + test "format_exit/1" do assert Exception.format_exit(:bye) == ":bye" assert Exception.format_exit(:noconnection) == "no connection" assert Exception.format_exit({:nodedown, :"node@host"}) == "no connection to node@host" assert Exception.format_exit(:timeout) == "time out" - assert Exception.format_exit(:noproc) == "no process" + assert Exception.format_exit(:noproc) |> String.starts_with?("no process:") assert Exception.format_exit(:killed) == "killed" assert Exception.format_exit(:normal) == "normal" assert Exception.format_exit(:shutdown) == "shutdown" + assert Exception.format_exit(:calling_self) == "process attempted to call itself" assert Exception.format_exit({:shutdown, :bye}) == "shutdown: :bye" - assert Exception.format_exit({:badarg,[{:not_a_real_module, :function, 0, []}]}) == + assert Exception.format_exit({:badarg, [{:not_a_real_module, :function, 0, []}]}) == "an exception was raised:\n ** (ArgumentError) argument error\n :not_a_real_module.function/0" assert Exception.format_exit({:bad_call, :request}) == "bad call: :request" assert Exception.format_exit({:bad_cast, :request}) == "bad cast: :request" @@ -94,7 +188,7 @@ defmodule Kernel.ExceptionTest do def init(fun), do: fun.() end - test "format_exit with supervisor errors" do + test "format_exit/1 with supervisor errors" do trap = Process.flag(:trap_exit, true) {:error, reason} = __MODULE__.Sup.start_link(fn() -> :foo end) @@ -182,41 +276,31 @@ defmodule Kernel.ExceptionTest do Process.flag(:trap_exit, trap) end - test "format_exit with call" do + test "format_exit/1 with call" do reason = try do :gen_server.call(:does_not_exist, :hello) catch :exit, reason -> reason end - assert Exception.format_exit(reason) == - "exited in: :gen_server.call(:does_not_exist, :hello)\n ** (EXIT) no process" - end - - test "format_exit with call with exception" do - # Fake reason to prevent error_logger printing to stdout - fsm_reason = {%ArgumentError{}, [{:not_a_real_module, :function, 0, []}]} - reason = try do - :gen_fsm.sync_send_event(spawn(fn() -> - :timer.sleep(200) ; exit(fsm_reason) - end), :hello) - catch - :exit, reason -> reason - end - - formatted = Exception.format_exit(reason) - assert formatted =~ ~r"exited in: :gen_fsm\.sync_send_event\(#PID<\d+\.\d+\.\d+>, :hello\)" - assert formatted =~ ~r"\s{4}\*\* \(EXIT\) an exception was raised:\n" - assert formatted =~ ~r"\s{8}\*\* \(ArgumentError\) argument error\n" - assert formatted =~ ~r"\s{12}:not_a_real_module\.function/0" + expected_to_start_with = "exited in: :gen_server.call(:does_not_exist, :hello)\n ** (EXIT) no process:" + assert Exception.format_exit(reason) |> String.starts_with?(expected_to_start_with) end - test "format_exit with nested calls" do + test "format_exit/1 with nested calls" do + Process.flag(:trap_exit, true) # Fake reason to prevent error_logger printing to stdout - event_fun = fn() -> :timer.sleep(200) ; exit(:normal) end - server_pid = spawn(fn()-> :gen_event.call(spawn(event_fun), :handler, :hello) end) + exit_fun = fn() -> receive do: (_ -> exit(:normal)) end + outer_pid = + spawn_link(fn()-> + Process.flag(:trap_exit, true) + receive do + _ -> + :gen_event.call(spawn_link(exit_fun), :handler, :hello) + end + end) reason = try do - :gen_server.call(server_pid, :hi) + :gen_server.call(outer_pid, :hi) catch :exit, reason -> reason end @@ -227,13 +311,18 @@ defmodule Kernel.ExceptionTest do assert formatted =~ ~r"\s{8}\*\* \(EXIT\) normal" end - test "format_exit with nested calls and exception" do + test "format_exit/1 with nested calls and exception" do + Process.flag(:trap_exit, true) # Fake reason to prevent error_logger printing to stdout - event_reason = {%ArgumentError{}, [{:not_a_real_module, :function, 0, []}]} - event_fun = fn() -> :timer.sleep(200) ; exit(event_reason) end - server_pid = spawn(fn()-> :gen_event.call(spawn(event_fun), :handler, :hello) end) + exit_reason = {%ArgumentError{}, [{:not_a_real_module, :function, 0, []}]} + exit_fun = fn() -> receive do: (_ -> exit(exit_reason)) end + outer_pid = + spawn_link(fn()-> + Process.flag(:trap_exit, true) + :gen_event.call(spawn_link(exit_fun), :handler, :hello) + end) reason = try do - :gen_server.call(server_pid, :hi) + :gen_server.call(outer_pid, :hi) catch :exit, reason -> reason end @@ -246,83 +335,172 @@ defmodule Kernel.ExceptionTest do assert formatted =~ ~r"\s{16}:not_a_real_module\.function/0" end - test "format_stacktrace_entry with no file or line" do - assert Exception.format_stacktrace_entry({Foo, :bar, [1, 2, 3], []}) == "Foo.bar(1, 2, 3)" - assert Exception.format_stacktrace_entry({Foo, :bar, [], []}) == "Foo.bar()" - assert Exception.format_stacktrace_entry({Foo, :bar, 1, []}) == "Foo.bar/1" - end + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + describe "blaming" do + test "annotates function clause errors" do + args = [%{}, :key, nil] + {exception, stack} = Exception.blame(:error, :function_clause, [{Keyword, :pop, args, [line: 13]}]) + assert %FunctionClauseError{kind: :def, args: ^args, clauses: [_]} = exception + assert stack == [{Keyword, :pop, 3, [line: 13]}] + end - test "format_stacktrace_entry with file and line" do - assert Exception.format_stacktrace_entry({Foo, :bar, [], [file: 'file.ex', line: 10]}) == "file.ex:10: Foo.bar()" - assert Exception.format_stacktrace_entry({Foo, :bar, [1, 2, 3], [file: 'file.ex', line: 10]}) == "file.ex:10: Foo.bar(1, 2, 3)" - assert Exception.format_stacktrace_entry({Foo, :bar, 1, [file: 'file.ex', line: 10]}) == "file.ex:10: Foo.bar/1" - end + test "does not annotate throws/exits" do + stack = [{Keyword, :pop, [%{}, :key, nil], [line: 13]}] + assert Exception.blame(:throw, :function_clause, stack) == {:function_clause, stack} + assert Exception.blame(:exit, :function_clause, stack) == {:function_clause, stack} + end - test "format_stacktrace_entry with file no line" do - assert Exception.format_stacktrace_entry({Foo, :bar, [], [file: 'file.ex']}) == "file.ex: Foo.bar()" - assert Exception.format_stacktrace_entry({Foo, :bar, [], [file: 'file.ex', line: 0]}) == "file.ex: Foo.bar()" - assert Exception.format_stacktrace_entry({Foo, :bar, [1, 2, 3], [file: 'file.ex']}) == "file.ex: Foo.bar(1, 2, 3)" - assert Exception.format_stacktrace_entry({Foo, :bar, 1, [file: 'file.ex']}) == "file.ex: Foo.bar/1" - end + test "annotates args and clauses from mfa" do + {:ok, :def, clauses} = Exception.blame_mfa(Keyword, :pop, [%{}, :key, nil]) + assert annotated_clauses_to_string(clauses) == [ + "{[+keywords+, +key+, +default+], [-is_list(keywords)-]}" + ] + + {:ok, :def, clauses} = Exception.blame_mfa(Keyword, :fetch, [[], "oops"]) + assert annotated_clauses_to_string(clauses) == [ + "{[+keywords+, +key+], [+is_list(keywords)+ and -is_atom(key)-]}" + ] + + {:ok, :def, clauses} = Exception.blame_mfa(Path, :type, [self()]) + assert annotated_clauses_to_string(clauses) == [ + "{[+name+], [-is_list(name)-, -is_binary(name)-]}" + ] + + {:ok, :def, clauses} = Exception.blame_mfa(Access, :fetch, [self(), "oops"]) + assert annotated_clauses_to_string(clauses) == [ + "{[-%struct{} = container-, +key+], []}", + "{[+map+, +key+], [-is_map(map)-]}", + "{[+list+, +key+], [-is_list(list)- and -is_atom(key)-]}", + "{[+list+, +key+], [-is_list(list)-]}", + "{[-nil-, +_key+], []}" + ] + + {:ok, :defmacro, clauses} = Exception.blame_mfa(Kernel, :!, [true]) + assert annotated_clauses_to_string(clauses) == [ + "{[-{:!, _, [value]}-], []}", + "{[+value+], []}" + ] + end + end - test "format_stacktrace_entry with application" do - assert Exception.format_stacktrace_entry({Exception, :bar, [], [file: 'file.ex']}) == - "(elixir) file.ex: Exception.bar()" - assert Exception.format_stacktrace_entry({Exception, :bar, [], [file: 'file.ex', line: 10]}) == - "(elixir) file.ex:10: Exception.bar()" - assert Exception.format_stacktrace_entry({:lists, :bar, [1, 2, 3], []}) == - "(stdlib) :lists.bar(1, 2, 3)" + defp annotated_clauses_to_string(clauses) do + Enum.map(clauses, fn args_and_clauses -> + Macro.to_string(args_and_clauses, fn + %{match?: true, node: node}, _string -> + "+" <> Macro.to_string(node) <> "+" + %{match?: false, node: node}, _string -> + "-" <> Macro.to_string(node) <> "-" + _node, string -> + string + end) + end) + end end - test "format_stacktrace_entry with fun" do - assert Exception.format_stacktrace_entry({fn(x) -> x end, [1], []}) =~ ~r/#Function<.+>\(1\)/ - assert Exception.format_stacktrace_entry({fn(x, y) -> {x, y} end, 2, []}) =~ ~r"#Function<.+>/2" - end + ## Exception messages - test "format_mfa" do - assert Exception.format_mfa(Foo, nil, 1) == "Foo.nil/1" - assert Exception.format_mfa(Foo, :bar, 1) == "Foo.bar/1" - assert Exception.format_mfa(Foo, :bar, []) == "Foo.bar()" - assert Exception.format_mfa(nil, :bar, []) == "nil.bar()" - assert Exception.format_mfa(:foo, :bar, [1, 2]) == ":foo.bar(1, 2)" - assert Exception.format_mfa(Foo, :"bar baz", 1) == "Foo.\"bar baz\"/1" - assert Exception.format_mfa(Foo, :"-func/2-fun-0-", 4) == "anonymous fn/4 in Foo.func/2" - end + describe "exception messages" do + import Exception, only: [message: 1] - test "format_fa" do - assert Exception.format_fa(fn -> end, 1) =~ - ~r"#Function<\d\.\d+/0 in Kernel\.ExceptionTest\.test format_fa/1>/1" - end + test "RuntimeError" do + assert %RuntimeError{} |> message == "runtime error" + assert %RuntimeError{message: "unexpected roquefort"} |> message == "unexpected roquefort" + end - import Exception, only: [message: 1] + test "ArithmeticError" do + assert %ArithmeticError{} |> message == "bad argument in arithmetic expression" + assert %ArithmeticError{message: "unexpected camembert"} |> message == "unexpected camembert" + end - test "runtime error message" do - assert %RuntimeError{} |> message == "runtime error" - assert %RuntimeError{message: "exception"} |> message == "exception" - end + test "ArgumentError" do + assert %ArgumentError{} |> message == "argument error" + assert %ArgumentError{message: "unexpected comté"} |> message == "unexpected comté" + end - test "argument error message" do - assert %ArgumentError{} |> message == "argument error" - assert %ArgumentError{message: "exception"} |> message == "exception" - end + test "Enum.OutOfBoundsError" do + assert %Enum.OutOfBoundsError{} |> message == "out of bounds error" + assert %Enum.OutOfBoundsError{message: "the brie is not on the table"} |> message == "the brie is not on the table" + end - test "undefined function message" do - assert %UndefinedFunctionError{} |> message == "undefined function" - assert %UndefinedFunctionError{module: Foo, function: :bar, arity: 1} |> message == - "undefined function: Foo.bar/1" - assert %UndefinedFunctionError{module: nil, function: :bar, arity: 0} |> message == - "undefined function: nil.bar/0" - end + test "Enum.EmptyError" do + assert %Enum.EmptyError{} |> message == "empty error" + assert %Enum.EmptyError{message: "there is no saint-nectaire left!"} |> message == "there is no saint-nectaire left!" + end - test "function clause message" do - assert %FunctionClauseError{} |> message == - "no function clause matches" - assert %FunctionClauseError{module: Foo, function: :bar, arity: 1} |> message == - "no function clause matching in Foo.bar/1" - end + test "UndefinedFunctionError" do + assert %UndefinedFunctionError{} |> message == "undefined function" + assert %UndefinedFunctionError{module: Kernel, function: :bar, arity: 1} |> message == + "function Kernel.bar/1 is undefined or private" + assert %UndefinedFunctionError{module: Foo, function: :bar, arity: 1} |> message == + "function Foo.bar/1 is undefined (module Foo is not available)" + assert %UndefinedFunctionError{module: nil, function: :bar, arity: 0} |> message == + "function nil.bar/0 is undefined or private" + end + + test "UndefinedFunctionError with suggestions" do + assert %UndefinedFunctionError{module: Enum, function: :map, arity: 1} |> message == """ + function Enum.map/1 is undefined or private. Did you mean one of: + + * map/2 + """ + assert %UndefinedFunctionError{module: Enum, function: :man, arity: 1} |> message == """ + function Enum.man/1 is undefined or private. Did you mean one of: + + * map/2 + * max/1 + * max/2 + * min/1 + * min/2 + """ + assert %UndefinedFunctionError{module: :erlang, function: :gt_cookie, arity: 0} |> message == """ + function :erlang.gt_cookie/0 is undefined or private. Did you mean one of: + + * get_cookie/0 + * set_cookie/2 + """ + end + + test "UndefinedFunctionError when the mfa is a macro but require wasn't called" do + _ = Code.ensure_loaded(Integer) + assert %UndefinedFunctionError{module: Integer, function: :is_odd, arity: 1} |> message == + "function Integer.is_odd/1 is undefined or private. However there is " <> + "a macro with the same name and arity. Be sure to require Integer if " <> + "you intend to invoke this macro" + end + + test "FunctionClauseError" do + assert %FunctionClauseError{} |> message == + "no function clause matches" + assert %FunctionClauseError{module: Foo, function: :bar, arity: 1} |> message == + "no function clause matching in Foo.bar/1" + end + + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + test "FunctionClauseError with blame" do + {exception, _} = Exception.blame(:error, :function_clause, [{Access, :fetch, [:foo, :bar], [line: 13]}]) + assert message(exception) =~ """ + no function clause matching in Access.fetch/2 - test "erlang error message" do - assert %ErlangError{original: :sample} |> message == - "erlang error: :sample" + The following arguments were given to Access.fetch/2: + + # 1 + :foo + + # 2 + :bar + + Attempted function clauses (showing 5 out of 5): + + def fetch(-%struct{} = container-, +key+) + """ + end + end + + test "ErlangError" do + assert %ErlangError{original: :sample} |> message == + "Erlang error: :sample" + end end end diff --git a/lib/elixir/test/elixir/file_test.exs b/lib/elixir/test/elixir/file_test.exs index 3c43287e693..16642a188a4 100644 --- a/lib/elixir/test/elixir/file_test.exs +++ b/lib/elixir/test/elixir/file_test.exs @@ -11,20 +11,337 @@ defmodule Elixir.FileCase do end setup do - File.mkdir_p!(tmp_path) - on_exit(fn -> File.rm_rf(tmp_path) end) + File.mkdir_p!(tmp_path()) + on_exit(fn -> File.rm_rf(tmp_path()) end) :ok end end defmodule FileTest do use Elixir.FileCase - import Regex, only: [escape: 1] + + defmodule Rename do + # Following Erlang's underlying implementation + # + # Renaming files + # :ok -> rename file to existing file default behaviour + # {:error, :eisdir} -> rename file to existing empty dir + # {:error, :eisdir} -> rename file to existing non-empty dir + # :ok -> rename file to non-existing location + # {:error, :eexist} -> rename file to existing file + # :ok -> rename file to itself + + # Renaming dirs + # {:error, :enotdir} -> rename dir to existing file + # :ok -> rename dir to non-existing leaf location + # {:error, ??} -> rename dir to non-existing parent location + # :ok -> rename dir to itself + # :ok -> rename dir to existing empty dir default behaviour + # {:error, :eexist} -> rename dir to existing empty dir + # {:error, :einval} -> rename parent dir to existing sub dir + # {:error, :einval} -> rename parent dir to non-existing sub dir + # {:error, :eexist} -> rename dir to existing non-empty dir + + # other tests + # {:error, :enoent} -> rename unknown source + # :ok -> rename preserves mode + use Elixir.FileCase + + test "rename file to existing file default behaviour" do + src = tmp_fixture_path("file.txt") + dest = tmp_path("tmp.file") + + File.write!(dest, "hello") + + try do + assert File.exists?(dest) + assert File.rename(src, dest) == :ok + refute File.exists?(src) + assert File.read!(dest) == "FOO\n" + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename file to existing empty dir" do + src = tmp_fixture_path("file.txt") + dest = tmp_path("tmp") + + try do + File.mkdir(dest) + assert File.rename(src, dest) == {:error, :eisdir} + assert File.exists?(src) + refute File.exists?(tmp_path("tmp/file.txt")) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename file to existing non-empty dir" do + src = tmp_fixture_path("file.txt") + dest = tmp_path("tmp") + + try do + File.mkdir_p(Path.join(dest, "a")) + assert File.rename(src, dest) == {:error, :eisdir} + assert File.exists?(src) + refute File.exists?(Path.join(dest, "file.txt")) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename file to non-existing location" do + src = tmp_fixture_path("file.txt") + dest = tmp_path("tmp.file") + + try do + refute File.exists?(dest) + assert File.rename(src, dest) == :ok + assert File.exists?(dest) + refute File.exists?(src) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename file to existing file" do + src = tmp_fixture_path("file.txt") + dest = tmp_path("tmp.file") + + File.write!(dest, "hello") + + try do + assert File.exists?(dest) + assert File.rename(src, dest) == :ok + refute File.exists?(src) + assert File.read!(dest) == "FOO\n" + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename file to itself" do + src = tmp_fixture_path("file.txt") + dest = src + + try do + assert File.exists?(src) + assert File.rename(src, dest) == :ok + assert File.exists?(src) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename dir to existing file" do + src = tmp_fixture_path("cp_r") + dest = tmp_path("tmp.file") + try do + File.touch(dest) + assert File.rename(src, dest) == {:error, :enotdir} + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename dir to non-existing leaf location" do + src = tmp_fixture_path("cp_r") + dest = tmp_path("tmp") + + try do + refute File.exists?(tmp_path("tmp/a/1.txt")) + refute File.exists?(tmp_path("tmp/a/a/2.txt")) + refute File.exists?(tmp_path("tmp/b/3.txt")) + + assert File.rename(src, dest) == :ok + {:ok, files} = File.ls(dest) + assert length(files) == 2 + assert "a" in files + + {:ok, files} = File.ls(tmp_path("tmp/a")) + assert length(files) == 2 + assert "1.txt" in files + + assert File.exists?(tmp_path("tmp/a/1.txt")) + assert File.exists?(tmp_path("tmp/a/a/2.txt")) + assert File.exists?(tmp_path("tmp/b/3.txt")) + + refute File.exists?(src) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename dir to non-existing parent location" do + src = tmp_fixture_path("cp_r") + dest = tmp_path("tmp/a/b") + + try do + assert File.rename(src, dest) == {:error, :enoent} + assert File.exists?(src) + refute File.exists?(dest) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename dir to itself" do + src = tmp_fixture_path("cp_r") + dest = src + + try do + assert File.exists?(src) + assert File.rename(src, dest) == :ok + assert File.exists?(src) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename parent dir to existing sub dir" do + src = tmp_fixture_path("cp_r") + dest = tmp_path("cp_r/a") + try do + assert File.exists?(src) + assert File.rename(src, dest) == {:error, :einval} + assert File.exists?(src) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename parent dir to non-existing sub dir" do + src = tmp_fixture_path("cp_r") + dest = tmp_path("cp_r/x") + try do + assert File.exists?(src) + assert File.rename(src, dest) == {:error, :einval} + assert File.exists?(src) + refute File.exists?(dest) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename dir to existing empty dir default behaviour" do + src = tmp_fixture_path("cp_r") + dest = tmp_path("tmp") + + File.mkdir(dest) + + try do + refute File.exists?(tmp_path("tmp/a")) + + assert File.rename(src, dest) == :ok + {:ok, files} = File.ls(dest) + assert length(files) == 2 + assert "a" in files + + {:ok, files} = File.ls(tmp_path("tmp/a")) + assert length(files) == 2 + assert "1.txt" in files + + assert File.exists?(tmp_path("tmp/a/1.txt")) + assert File.exists?(tmp_path("tmp/a/a/2.txt")) + assert File.exists?(tmp_path("tmp/b/3.txt")) + + refute File.exists?(src) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename dir to existing empty dir" do + src = tmp_fixture_path("cp_r") + dest = tmp_path("tmp") + + File.mkdir(dest) + + try do + assert File.exists?(dest) + assert File.rename(src, dest) == :ok + refute File.exists?(src) + assert File.exists?(tmp_path("tmp/a")) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename dir to existing non-empty dir" do + src = tmp_fixture_path("cp_r") + dest = tmp_path("tmp") + + File.mkdir_p(tmp_path("tmp/x")) + + try do + assert File.exists?(tmp_path("tmp/x")) + assert File.exists?(src) + refute File.exists?(tmp_path("tmp/a")) + + assert File.rename(src, dest) == {:error, :eexist} + + assert File.exists?(tmp_path("tmp/x")) + assert File.exists?(src) + refute File.exists?(tmp_path("tmp/a")) + after + File.rm_rf src + File.rm_rf dest + end + end + + test "rename unknown source" do + src = fixture_path("unknown") + dest = tmp_path("tmp") + try do + assert File.rename(src, dest) == {:error, :enoent} + after + File.rm_rf dest + end + end + + test "rename preserves mode" do + File.mkdir_p!(tmp_path("tmp")) + src = tmp_fixture_path("cp_mode") + dest = tmp_path("tmp/cp_mode") + + try do + %File.Stat{mode: src_mode} = File.stat! src + File.rename(src, dest) + %File.Stat{mode: dest_mode} = File.stat! dest + assert src_mode == dest_mode + after + File.rm_rf src + File.rm_rf dest + end + end + + def tmp_fixture_path(extra) do + src = fixture_path(extra) + dest = tmp_path(extra) + File.cp_r(src, dest) + dest + end + end defmodule Cp do use Elixir.FileCase - test :cp_with_src_file_and_dest_file do + test "cp with src file and dest file" do src = fixture_path("file.txt") dest = tmp_path("sample.txt") @@ -39,7 +356,7 @@ defmodule FileTest do end end - test :cp_with_src_file_and_dest_dir do + test "cp with src file and dest dir" do src = fixture_path("file.txt") dest = tmp_path("tmp") @@ -52,7 +369,7 @@ defmodule FileTest do end end - test :cp_with_src_file_and_dest_unknown do + test "cp with src file and dest unknown" do src = fixture_path("file.txt") dest = tmp_path("tmp.file") @@ -65,13 +382,13 @@ defmodule FileTest do end end - test :cp_with_src_dir do + test "cp with src dir" do src = fixture_path("cp_r") dest = tmp_path("tmp.file") assert File.cp(src, dest) == {:error, :eisdir} end - test :cp_with_conflict do + test "cp with conflict" do src = fixture_path("file.txt") dest = tmp_path("tmp.file") @@ -86,7 +403,7 @@ defmodule FileTest do end end - test :cp_with_conflict_with_function do + test "cp with conflict with function" do src = fixture_path("file.txt") dest = tmp_path("tmp.file") @@ -105,7 +422,7 @@ defmodule FileTest do end end - test :cp_with_src_file_and_dest_file! do + test "cp! with src file and dest file" do src = fixture_path("file.txt") dest = tmp_path("sample.txt") @@ -120,16 +437,30 @@ defmodule FileTest do end end - test :cp_with_src_dir! do + test "cp! with src dir" do src = fixture_path("cp_r") dest = tmp_path("tmp.file") - assert_raise File.CopyError, "could not copy recursively from #{src} to #{dest}: " <> - "illegal operation on a directory", fn -> + assert_raise File.CopyError, "could not copy from #{inspect(src)} " <> + "to #{inspect(dest)}: illegal operation on a directory", fn -> File.cp!(src, dest) end end - test :cp_r_with_src_file_and_dest_file do + test "copy file to itself" do + src = dest = tmp_path("tmp.file") + + File.write!(src, "here") + + try do + assert File.cp(src, dest) == :ok + assert File.read!(dest) == "here" + assert File.cp_r(src, dest) == {:ok, []} + after + File.rm(dest) + end + end + + test "cp_r with src file and dest file" do src = fixture_path("file.txt") dest = tmp_path("sample.txt") @@ -144,7 +475,7 @@ defmodule FileTest do end end - test :cp_r_with_src_file_and_dest_dir do + test "cp_r with src file and dest dir" do src = fixture_path("file.txt") dest = tmp_path("tmp") @@ -157,7 +488,7 @@ defmodule FileTest do end end - test :cp_r_with_src_file_and_dest_unknown do + test "cp_r with src file and dest unknown" do src = fixture_path("file.txt") dest = tmp_path("tmp.file") @@ -170,7 +501,7 @@ defmodule FileTest do end end - test :cp_r_with_src_dir_and_dest_dir do + test "cp_r with src dir and dest dir" do src = fixture_path("cp_r") dest = tmp_path("tmp") @@ -194,7 +525,7 @@ defmodule FileTest do end end - test :cp_r_with_src_dir_and_dest_file do + test "cp_r with src dir and dest file" do src = fixture_path("cp_r") dest = tmp_path("tmp.file") @@ -206,7 +537,7 @@ defmodule FileTest do end end - test :cp_r_with_src_dir_and_dest_unknown do + test "cp_r with src dir and dest unknown" do src = fixture_path("cp_r") dest = tmp_path("tmp") @@ -226,13 +557,13 @@ defmodule FileTest do end end - test :cp_r_with_src_unknown do + test "cp_r with src unknown" do src = fixture_path("unknown") dest = tmp_path("tmp") assert File.cp_r(src, dest) == {:error, :enoent, src} end - test :cp_r_with_dir_and_file_conflict do + test "cp_r with dir and file conflict" do src = fixture_path("cp_r") dest = tmp_path("tmp") @@ -245,9 +576,9 @@ defmodule FileTest do end end - test :cp_r_with_src_dir_and_dest_dir_using_lists do - src = fixture_path("cp_r") |> to_char_list - dest = tmp_path("tmp") |> to_char_list + test "cp_r with src dir and dest dir using lists" do + src = fixture_path("cp_r") |> to_charlist + dest = tmp_path("tmp") |> to_charlist File.mkdir(dest) @@ -268,7 +599,7 @@ defmodule FileTest do end end - test :cp_r_with_src_with_file_conflict do + test "cp_r with src with file conflict" do src = fixture_path("cp_r") dest = tmp_path("tmp") @@ -284,7 +615,7 @@ defmodule FileTest do end end - test :cp_r_with_src_with_file_conflict_callback do + test "cp_r with src with file conflict callback" do src = fixture_path("cp_r") dest = tmp_path("tmp") @@ -304,7 +635,7 @@ defmodule FileTest do end end - test :cp_r! do + test "cp_r!" do src = fixture_path("cp_r") dest = tmp_path("tmp") @@ -325,15 +656,15 @@ defmodule FileTest do end end - test :cp_r_with_src_unknown! do + test "cp_r with src_unknown!" do src = fixture_path("unknown") dest = tmp_path("tmp") - assert_raise File.CopyError, "could not copy recursively from #{src} to #{dest}. #{src}: no such file or directory", fn -> + assert_raise File.CopyError, "could not copy recursively from #{inspect(src)} to #{inspect(dest)}. #{src}: no such file or directory", fn -> File.cp_r!(src, dest) end end - test :cp_preserves_mode do + test "cp preserves mode" do File.mkdir_p!(tmp_path("tmp")) src = fixture_path("cp_mode") dest = tmp_path("tmp/cp_mode") @@ -358,15 +689,15 @@ defmodule FileTest do defmodule Queries do use ExUnit.Case - test :regular do + test "regular" do assert File.regular?(__ENV__.file) - assert File.regular?(String.to_char_list(__ENV__.file)) + assert File.regular?(String.to_charlist(__ENV__.file)) refute File.regular?("#{__ENV__.file}.unknown") end - test :exists do + test "exists" do assert File.exists?(__ENV__.file) - assert File.exists?(fixture_path) + assert File.exists?(fixture_path()) assert File.exists?(fixture_path("file.txt")) refute File.exists?(fixture_path("missing.txt")) @@ -374,16 +705,16 @@ defmodule FileTest do end end - test :ls do - {:ok, value} = File.ls(fixture_path) + test "ls" do + {:ok, value} = File.ls(fixture_path()) assert "code_sample.exs" in value assert "file.txt" in value {:error, :enoent} = File.ls(fixture_path("non-existent-subdirectory")) end - test :ls! do - value = File.ls!(fixture_path) + test "ls!" do + value = File.ls!(fixture_path()) assert "code_sample.exs" in value assert "file.txt" in value @@ -395,30 +726,30 @@ defmodule FileTest do defmodule OpenReadWrite do use Elixir.FileCase - test :read_with_binary do + test "read with binary" do assert {:ok, "FOO\n"} = File.read(fixture_path("file.txt")) assert {:error, :enoent} = File.read(fixture_path("missing.txt")) end - test :read_with_list do + test "read with list" do assert {:ok, "FOO\n"} = File.read(Path.expand('fixtures/file.txt', __DIR__)) assert {:error, :enoent} = File.read(Path.expand('fixtures/missing.txt', __DIR__)) end - test :read_with_utf8 do + test "read with UTF-8" do assert {:ok, "Русский\n日\n"} = File.read(Path.expand('fixtures/utf8.txt', __DIR__)) end - test :read! do + test "read!" do assert File.read!(fixture_path("file.txt")) == "FOO\n" - expected_message = "could not read file fixtures/missing.txt: no such file or directory" + expected_message = "could not read file \"fixtures/missing.txt\": no such file or directory" assert_raise File.Error, expected_message, fn -> File.read!("fixtures/missing.txt") end end - test :write_ascii_content do + test "write ASCII content" do fixture = tmp_path("tmp_test.txt") try do refute File.exists?(fixture) @@ -429,7 +760,7 @@ defmodule FileTest do end end - test :write_utf8 do + test "write UTF-8" do fixture = tmp_path("tmp_test.txt") try do refute File.exists?(fixture) @@ -440,7 +771,7 @@ defmodule FileTest do end end - test :write_with_options do + test "write with options" do fixture = tmp_path("tmp_test.txt") try do refute File.exists?(fixture) @@ -452,31 +783,31 @@ defmodule FileTest do end end - test :open_file_without_modes do + test "open file without modes" do {:ok, file} = File.open(fixture_path("file.txt")) assert IO.gets(file, "") == "FOO\n" assert File.close(file) == :ok end - test :open_file_with_char_list do - {:ok, file} = File.open(fixture_path("file.txt"), [:char_list]) + test "open file with charlist" do + {:ok, file} = File.open(fixture_path("file.txt"), [:charlist]) assert IO.gets(file, "") == 'FOO\n' assert File.close(file) == :ok end - test :open_utf8_by_default do + test "open UTF-8 by default" do {:ok, file} = File.open(fixture_path("utf8.txt"), [:utf8]) assert IO.gets(file, "") == "Русский\n" assert File.close(file) == :ok end - test :open_readonly_by_default do + test "open readonly by default" do {:ok, file} = File.open(fixture_path("file.txt")) assert_raise ArgumentError, fn -> IO.write(file, "foo") end assert File.close(file) == :ok end - test :open_with_write_permission do + test "open with write permission" do fixture = tmp_path("tmp_text.txt") try do {:ok, file} = File.open(fixture, [:write]) @@ -488,7 +819,7 @@ defmodule FileTest do end end - test :open_with_binwrite_permission do + test "open with binwrite permission" do fixture = tmp_path("tmp_text.txt") try do {:ok, file} = File.open(fixture, [:write]) @@ -500,35 +831,35 @@ defmodule FileTest do end end - test :open_utf8_and_charlist do - {:ok, file} = File.open(fixture_path("utf8.txt"), [:char_list, :utf8]) + test "open UTF-8 and charlist" do + {:ok, file} = File.open(fixture_path("utf8.txt"), [:charlist, :utf8]) assert IO.gets(file, "") == [1056, 1091, 1089, 1089, 1082, 1080, 1081, 10] assert File.close(file) == :ok end - test :open_respects_encoding do + test "open respects encoding" do {:ok, file} = File.open(fixture_path("utf8.txt"), [{:encoding, :latin1}]) assert IO.gets(file, "") == <<195, 144, 194, 160, 195, 145, 194, 131, 195, 145, 194, 129, 195, 145, 194, 129, 195, 144, 194, 186, 195, 144, 194, 184, 195, 144, 194, 185, 10>> assert File.close(file) == :ok end - test :open_a_missing_file do + test "open a missing file" do assert File.open('missing.txt') == {:error, :enoent} end - test :open_a_file_with_function do + test "open a file with function" do file = fixture_path("file.txt") assert File.open(file, &IO.read(&1, :line)) == {:ok, "FOO\n"} end - test :open_a_missing_file! do - message = "could not open missing.txt: no such file or directory" + test "open! a missing file" do + message = "could not open \"missing.txt\": no such file or directory" assert_raise File.Error, message, fn -> File.open!('missing.txt') end end - test :open_a_file_with_function! do + test "open! a file with function" do file = fixture_path("file.txt") assert File.open!(file, &IO.read(&1, :line)) == "FOO\n" end @@ -537,7 +868,7 @@ defmodule FileTest do defmodule Mkdir do use Elixir.FileCase - test :mkdir_with_binary do + test "mkdir with binary" do fixture = tmp_path("tmp_test") try do refute File.exists?(fixture) @@ -548,8 +879,8 @@ defmodule FileTest do end end - test :mkdir_with_list do - fixture = tmp_path("tmp_test") |> to_char_list + test "mkdir with list" do + fixture = tmp_path("tmp_test") |> to_charlist try do refute File.exists?(fixture) assert File.mkdir(fixture) == :ok @@ -559,7 +890,7 @@ defmodule FileTest do end end - test :mkdir_with_invalid_path do + test "mkdir with invalid path" do fixture = fixture_path("file.txt") invalid = Path.join fixture, "test" assert File.exists?(fixture) @@ -567,7 +898,7 @@ defmodule FileTest do refute File.exists?(invalid) end - test :mkdir! do + test "mkdir!" do fixture = tmp_path("tmp_test") try do refute File.exists?(fixture) @@ -578,16 +909,16 @@ defmodule FileTest do end end - test :mkdir_with_invalid_path! do + test "mkdir! with invalid path" do fixture = fixture_path("file.txt") invalid = Path.join fixture, "test" assert File.exists?(fixture) - assert_raise File.Error, ~r"^could not make directory #{escape invalid}: (not a directory|no such file or directory)", fn -> + assert_raise File.Error, ~r"\Acould not make directory #{inspect invalid}: (not a directory|no such file or directory)", fn -> File.mkdir!(invalid) end end - test :mkdir_p_with_one_directory do + test "mkdir_p with one directory" do fixture = tmp_path("tmp_test") try do refute File.exists?(fixture) @@ -598,7 +929,7 @@ defmodule FileTest do end end - test :mkdir_p_with_nested_directory_and_binary do + test "mkdir_p with nested directory and binary" do base = tmp_path("tmp_test") fixture = Path.join(base, "test") refute File.exists?(base) @@ -612,8 +943,8 @@ defmodule FileTest do end end - test :mkdir_p_with_nested_directory_and_list do - base = tmp_path("tmp_test") |> to_char_list + test "mkdir_p with nested directory and list" do + base = tmp_path("tmp_test") |> to_charlist fixture = Path.join(base, "test") refute File.exists?(base) @@ -626,7 +957,7 @@ defmodule FileTest do end end - test :mkdir_p_with_nested_directory_and_existing_parent do + test "mkdir_p with nested directory and existing parent" do base = tmp_path("tmp_test") fixture = Path.join(base, "test") @@ -641,14 +972,14 @@ defmodule FileTest do end end - test :mkdir_p_with_invalid_path do + test "mkdir_p with invalid path" do assert File.exists?(fixture_path("file.txt")) invalid = Path.join fixture_path("file.txt"), "test/foo" assert io_error? File.mkdir(invalid) refute File.exists?(invalid) end - test :mkdir_p! do + test "mkdir_p!" do fixture = tmp_path("tmp_test") try do refute File.exists?(fixture) @@ -659,11 +990,11 @@ defmodule FileTest do end end - test :mkdir_p_with_invalid_path! do + test "mkdir_p! with invalid path" do fixture = fixture_path("file.txt") invalid = Path.join fixture, "test" assert File.exists?(fixture) - assert_raise File.Error, ~r"^could not make directory \(with -p\) #{escape invalid}: (not a directory|no such file or directory)", fn -> + assert_raise File.Error, ~r"\Acould not make directory \(with -p\) #{inspect invalid}: (not a directory|no such file or directory)", fn -> File.mkdir_p!(invalid) end end @@ -677,32 +1008,32 @@ defmodule FileTest do defmodule Rm do use Elixir.FileCase - test :rm_file do + test "rm file" do fixture = tmp_path("tmp_test.txt") File.write(fixture, "test") assert File.exists?(fixture) assert File.rm(fixture) == :ok refute File.exists?(fixture) end - - test :rm_read_only_file do + + test "rm read only file" do fixture = tmp_path("tmp_test.txt") File.write(fixture, "test") assert File.exists?(fixture) - File.chmod(fixture, 0100444) + File.chmod(fixture, 0o100444) assert File.rm(fixture) == :ok refute File.exists?(fixture) end - test :rm_file_with_dir do - assert File.rm(fixture_path) == {:error, :eperm} + test "rm file with dir" do + assert File.rm(fixture_path()) == {:error, :eperm} end - test :rm_nonexistent_file do + test "rm nonexistent file" do assert File.rm('missing.txt') == {:error, :enoent} end - test :rm! do + test "rm!" do fixture = tmp_path("tmp_test.txt") File.write(fixture, "test") assert File.exists?(fixture) @@ -710,13 +1041,13 @@ defmodule FileTest do refute File.exists?(fixture) end - test :rm_with_invalid_file! do - assert_raise File.Error, "could not remove file missing.file: no such file or directory", fn -> + test "rm! with invalid file" do + assert_raise File.Error, "could not remove file \"missing.file\": no such file or directory", fn -> File.rm!("missing.file") end end - test :rmdir do + test "rmdir" do fixture = tmp_path("tmp_test") File.mkdir_p(fixture) assert File.dir?(fixture) @@ -724,11 +1055,11 @@ defmodule FileTest do refute File.exists?(fixture) end - test :rmdir_with_file do + test "rmdir with file" do assert io_error? File.rmdir(fixture_path("file.txt")) end - test :rmdir! do + test "rmdir!" do fixture = tmp_path("tmp_test") File.mkdir_p(fixture) assert File.dir?(fixture) @@ -736,14 +1067,33 @@ defmodule FileTest do refute File.exists?(fixture) end - test :rmdir_with_file! do + test "rmdir! with file" do fixture = fixture_path("file.txt") - assert_raise File.Error, ~r"^could not remove directory #{escape fixture}: (not a directory|I/O error)", fn -> + assert_raise File.Error, ~r"\Acould not remove directory #{inspect fixture}: (not a directory|I/O error)", fn -> File.rmdir!(fixture) end end - test :rm_rf do + test "rmdir! error messages" do + fixture = tmp_path("tmp_test") + File.mkdir_p(fixture) + File.touch(fixture <> "/file") + + # directory is not empty + assert_raise File.Error, "could not remove directory #{inspect fixture}: directory is not empty", fn -> + File.rmdir!(fixture) + end + + # directory does not exist + non_existent_dir = fixture <> "/non_existent_dir" + assert_raise File.Error, ~r"\Acould not remove directory #{inspect non_existent_dir}: (not a directory|no such file or directory)", fn -> + File.rmdir!(non_existent_dir) + end + + File.rm_rf(fixture) + end + + test "rm_rf" do fixture = tmp_path("tmp") File.mkdir(fixture) File.cp_r!(fixture_path("cp_r"), fixture) @@ -763,7 +1113,7 @@ defmodule FileTest do refute File.exists?(fixture) end - test :rm_rf_with_symlink do + test "rm_rf with symlink" do from = tmp_path("tmp/from") to = tmp_path("tmp/to") @@ -771,7 +1121,7 @@ defmodule FileTest do File.write!(Path.join(to, "hello"), "world") :file.make_symlink(to, from) - if File.exists?(from) or not is_win? do + if File.exists?(from) or not windows?() do assert File.exists?(from) {:ok, files} = File.rm_rf(from) @@ -784,8 +1134,8 @@ defmodule FileTest do File.rm(tmp_path("tmp/from")) end - test :rm_rf_with_char_list do - fixture = tmp_path("tmp") |> to_char_list + test "rm_rf with charlist" do + fixture = tmp_path("tmp") |> to_charlist File.mkdir(fixture) File.cp_r!(fixture_path("cp_r"), fixture) @@ -804,23 +1154,23 @@ defmodule FileTest do refute File.exists?(fixture) end - test :rm_rf_with_file do + test "rm_rf with file" do fixture = tmp_path("tmp") File.write(fixture, "hello") assert File.rm_rf(fixture) == {:ok, [fixture]} end - test :rm_rf_with_unknown do + test "rm_rf with unknown" do fixture = tmp_path("tmp.unknown") assert File.rm_rf(fixture) == {:ok, []} end - test :rm_rf_with_invalid do + test "rm_rf with invalid" do fixture = fixture_path "file.txt/path" assert File.rm_rf(fixture) == {:ok, []} end - test :rm_rf! do + test "rm_rf!" do fixture = tmp_path("tmp") File.mkdir(fixture) File.cp_r!(fixture_path("cp_r"), fixture) @@ -840,7 +1190,7 @@ defmodule FileTest do refute File.exists?(fixture) end - test :rm_rf_with_invalid! do + test "rm_rf! with invalid path" do fixture = fixture_path "file.txt/path" assert File.rm_rf!(fixture) == [] end @@ -850,26 +1200,114 @@ defmodule FileTest do end end - test :stat do + test "stat" do {:ok, info} = File.stat(__ENV__.file) assert info.mtime end - test :stat! do + test "stat!" do assert File.stat!(__ENV__.file).mtime end - test :stat_with_invalid_file do + test "stat with invalid file" do assert {:error, _} = File.stat("./invalid_file") end - test :stat_with_invalid_file! do + test "stat! with invalid_file" do assert_raise File.Error, fn -> File.stat!("./invalid_file") end end - test :io_stream_utf8 do + test "lstat" do + {:ok, info} = File.lstat(__ENV__.file) + assert info.mtime + end + + test "lstat!" do + assert File.lstat!(__ENV__.file).mtime + end + + test "lstat with invalid file" do + invalid_file = tmp_path("invalid_file") + assert {:error, _} = File.lstat(invalid_file) + end + + test "lstat! with invalid file" do + invalid_file = tmp_path("invalid_file") + assert_raise File.Error, fn -> + File.lstat!(invalid_file) + end + end + + test "lstat with dangling symlink" do + invalid_file = tmp_path("invalid_file") + dest = tmp_path("dangling_symlink") + File.ln_s(invalid_file, dest) + try do + assert {:ok, info } = File.lstat(dest) + assert info.type == :symlink + after + File.rm(dest) + end + end + + test "lstat! with dangling symlink" do + invalid_file = tmp_path("invalid_file") + dest = tmp_path("dangling_symlink") + File.ln_s(invalid_file, dest) + try do + assert File.lstat!(dest).type == :symlink + after + File.rm(dest) + end + end + + test "read_link with regular file" do + dest = tmp_path("symlink") + File.touch(dest) + try do + assert File.read_link(dest) == {:error, :einval} + after + File.rm(dest) + end + end + + test "read_link with nonexistent file" do + dest = tmp_path("does_not_exist") + assert File.read_link(dest) == {:error, :enoent} + end + + test "read_link! with nonexistent file" do + dest = tmp_path("does_not_exist") + assert_raise File.Error, fn -> File.read_link!(dest) end + end + + unless windows?() do + test "read_link with symlink" do + target = tmp_path("does_not_need_to_exist") + dest = tmp_path("symlink") + File.ln_s(target, dest) + try do + assert File.read_link(dest) == {:ok, target} + after + File.rm(dest) + end + end + + test "read_link! with symlink" do + target = tmp_path("does_not_need_to_exist") + dest = tmp_path("symlink") + File.ln_s(target, dest) + try do + assert File.read_link!(dest) == target + after + File.rm(dest) + end + end + end + + test "IO stream UTF-8" do src = File.open! fixture_path("file.txt"), [:utf8] dest = tmp_path("tmp_test.txt") @@ -884,7 +1322,7 @@ defmodule FileTest do end end - test :io_stream do + test "IO stream" do src = File.open! fixture_path("file.txt") dest = tmp_path("tmp_test.txt") @@ -899,7 +1337,7 @@ defmodule FileTest do end end - test :stream_map do + test "stream map" do src = fixture_path("file.txt") stream = File.stream!(src) assert %File.Stream{} = stream @@ -915,7 +1353,39 @@ defmodule FileTest do assert stream.line_or_bytes == 10 end - test :stream_line_utf8 do + test "stream count" do + src = fixture_path("file.txt") + stream = File.stream!(src) + assert Enum.count(stream) == 1 + + stream = File.stream!(src, [:utf8]) + assert Enum.count(stream) == 1 + + stream = File.stream!(src, [], 2) + assert Enum.count(stream) == 2 + end + + test "stream keeps BOM" do + src = fixture_path("utf8_bom.txt") + bom_line = + src + |> File.stream!() + |> Enum.take(1) + + assert [<<239, 187, 191>> <> "Русский\n"] == bom_line + end + + test "trim BOM via option" do + src = fixture_path("utf8_bom.txt") + bom_line = + src + |> File.stream!([:trim_bom]) + |> Enum.take(1) + + assert ["Русский\n"] == bom_line + end + + test "stream line UTF-8" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") @@ -932,7 +1402,7 @@ defmodule FileTest do end end - test :stream_bytes_utf8 do + test "stream bytes UTF-8" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") @@ -949,7 +1419,7 @@ defmodule FileTest do end end - test :stream_line do + test "stream line" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") @@ -966,7 +1436,7 @@ defmodule FileTest do end end - test :stream_bytes do + test "stream bytes" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") @@ -983,7 +1453,7 @@ defmodule FileTest do end end - test :stream_into do + test "stream into" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") @@ -1002,7 +1472,7 @@ defmodule FileTest do end end - test :stream_into_append do + test "stream into append" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") @@ -1023,9 +1493,33 @@ defmodule FileTest do end end - test :ln_s do + test "ln" do existing = fixture_path("file.txt") new = tmp_path("tmp_test.txt") + try do + refute File.exists?(new) + assert File.ln(existing, new) == :ok + assert File.read(new) == {:ok, "FOO\n"} + after + File.rm(new) + end + end + + test "ln with existing destination" do + existing = fixture_path("file.txt") + assert File.ln(existing, existing) == {:error, :eexist} + end + + test "ln! with existing destination" do + assert_raise File.LinkError, fn -> + existing = fixture_path("file.txt") + File.ln!(existing, existing) + end + end + + test "ln_s" do + existing = fixture_path("file.txt") + new = tmp_path("tmp_test.txt") try do refute File.exists?(new) assert File.ln_s(existing, new) == :ok @@ -1035,12 +1529,19 @@ defmodule FileTest do end end - test :ln_s_with_existing_destination do - existing = fixture_path("file.txt") + test "ln_s with existing destination" do + existing = fixture_path("file.txt") assert File.ln_s(existing, existing) == {:error, :eexist} end - test :copy do + test "ln_s! with existing destination" do + existing = fixture_path("file.txt") + assert_raise File.LinkError, fn -> + File.ln_s!(existing, existing) + end + end + + test "copy" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") try do @@ -1052,7 +1553,46 @@ defmodule FileTest do end end - test :copy_with_bytes_count do + test "copy with an io_device" do + {:ok, src} = File.open(fixture_path("file.txt")) + dest = tmp_path("tmp_test.txt") + try do + refute File.exists?(dest) + assert File.copy(src, dest) == {:ok, 4} + assert File.read(dest) == {:ok, "FOO\n"} + after + File.close(src) + File.rm(dest) + end + end + + test "copy with raw io_device" do + {:ok, src} = File.open(fixture_path("file.txt"), [:raw]) + dest = tmp_path("tmp_test.txt") + try do + refute File.exists?(dest) + assert File.copy(src, dest) == {:ok, 4} + assert File.read(dest) == {:ok, "FOO\n"} + after + File.close(src) + File.rm(dest) + end + end + + test "copy with ram io_device" do + {:ok, src} = File.open("FOO\n", [:ram]) + dest = tmp_path("tmp_test.txt") + try do + refute File.exists?(dest) + assert File.copy(src, dest) == {:ok, 4} + assert File.read(dest) == {:ok, "FOO\n"} + after + File.close(src) + File.rm(dest) + end + end + + test "copy with bytes count" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") try do @@ -1064,13 +1604,13 @@ defmodule FileTest do end end - test :copy_with_invalid_file do + test "copy with invalid file" do src = fixture_path("invalid.txt") dest = tmp_path("tmp_test.txt") assert File.copy(src, dest, 2) == {:error, :enoent} end - test :copy! do + test "copy!" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") try do @@ -1082,7 +1622,7 @@ defmodule FileTest do end end - test :copy_with_bytes_count! do + test "copy! with bytes count" do src = fixture_path("file.txt") dest = tmp_path("tmp_test.txt") try do @@ -1094,18 +1634,18 @@ defmodule FileTest do end end - test :copy_with_invalid_file! do + test "copy! with invalid file" do src = fixture_path("invalid.txt") dest = tmp_path("tmp_test.txt") - assert_raise File.CopyError, "could not copy from #{src} to #{dest}: no such file or directory", fn -> + assert_raise File.CopyError, "could not copy from #{inspect(src)} to #{inspect(dest)}: no such file or directory", fn -> File.copy!(src, dest, 2) end end - test :cwd_and_cd do + test "cwd and cd" do {:ok, current} = File.cwd try do - assert File.cd(fixture_path) == :ok + assert File.cd(fixture_path()) == :ok assert File.exists?("file.txt") after File.cd!(current) @@ -1113,7 +1653,7 @@ defmodule FileTest do end if :file.native_name_encoding == :utf8 do - test :cwd_and_cd_with_utf8 do + test "cwd and cd with UTF-8" do File.mkdir_p(tmp_path("héllò")) File.cd!(tmp_path("héllò"), fn -> @@ -1124,25 +1664,25 @@ defmodule FileTest do end end - test :invalid_cd do + test "invalid cd" do assert io_error? File.cd(fixture_path("file.txt")) end - test :invalid_cd! do - message = ~r"^could not set current working directory to #{escape fixture_path("file.txt")}: (not a directory|no such file or directory)" + test "invalid_cd!" do + message = ~r"\Acould not set current working directory to #{inspect fixture_path("file.txt")}: (not a directory|no such file or directory)" assert_raise File.Error, message, fn -> File.cd!(fixture_path("file.txt")) end end - test :cd_with_function do - assert File.cd!(fixture_path, fn -> + test "cd with function" do + assert File.cd!(fixture_path(), fn -> assert File.exists?("file.txt") :cd_result end) == :cd_result end - test :touch_with_no_file do + test "touch with no file" do fixture = tmp_path("tmp_test.txt") time = {{2010, 4, 17}, {14, 0, 0}} @@ -1156,124 +1696,124 @@ defmodule FileTest do end end - test :touch_with_timestamp do + test "touch with timestamp" do fixture = tmp_path("tmp_test.txt") try do assert File.touch!(fixture) == :ok stat = File.stat!(fixture) - assert File.touch!(fixture, last_year) == :ok + assert File.touch!(fixture, last_year()) == :ok assert stat.mtime > File.stat!(fixture).mtime after File.rm(fixture) end end - test :touch_with_dir do - assert File.touch(fixture_path) == :ok + test "touch with dir" do + assert File.touch(fixture_path()) == :ok end - test :touch_with_failure do + test "touch with failure" do fixture = fixture_path("file.txt/bar") assert io_error? File.touch(fixture) end - test :touch_with_success! do - assert File.touch!(fixture_path) == :ok + test "touch! with success" do + assert File.touch!(fixture_path()) == :ok end - test :touch_with_failure! do + test "touch! with failure" do fixture = fixture_path("file.txt/bar") - assert_raise File.Error, ~r"could not touch #{escape fixture}: (not a directory|no such file or directory)", fn -> + assert_raise File.Error, ~r"\Acould not touch #{inspect fixture}: (not a directory|no such file or directory)", fn -> File.touch!(fixture) end end - test :chmod_with_success do + test "chmod with success" do fixture = tmp_path("tmp_test.txt") File.touch(fixture) try do - assert File.chmod(fixture, 0100666) == :ok + assert File.chmod(fixture, 0o100666) == :ok stat = File.stat!(fixture) - assert stat.mode == 0100666 + assert stat.mode == 0o100666 - unless is_win? do - assert File.chmod(fixture, 0100777) == :ok + unless windows?() do + assert File.chmod(fixture, 0o100777) == :ok stat = File.stat!(fixture) - assert stat.mode == 0100777 + assert stat.mode == 0o100777 end after File.rm(fixture) end end - test :chmod_with_success! do + test "chmod! with success" do fixture = tmp_path("tmp_test.txt") File.touch(fixture) try do - assert File.chmod!(fixture, 0100666) == :ok + assert File.chmod!(fixture, 0o100666) == :ok stat = File.stat!(fixture) - assert stat.mode == 0100666 + assert stat.mode == 0o100666 - unless is_win? do - assert File.chmod!(fixture, 0100777) == :ok + unless windows?() do + assert File.chmod!(fixture, 0o100777) == :ok stat = File.stat!(fixture) - assert stat.mode == 0100777 + assert stat.mode == 0o100777 end after File.rm(fixture) end end - test :chmod_with_failure do + test "chmod with failure" do fixture = tmp_path("tmp_test.txt") File.rm(fixture) - assert File.chmod(fixture, 0100777) == {:error,:enoent} + assert File.chmod(fixture, 0o100777) == {:error, :enoent} end - test :chmod_with_failure! do + test "chmod! with failure" do fixture = tmp_path("tmp_test.txt") File.rm(fixture) - message = ~r"could not change mode for #{escape fixture}: no such file or directory" + message = ~r"could not change mode for #{inspect fixture}: no such file or directory" assert_raise File.Error, message, fn -> - File.chmod!(fixture, 0100777) + File.chmod!(fixture, 0o100777) end end - test :chgrp_with_failure do + test "chgrp with failure" do fixture = tmp_path("tmp_test.txt") File.rm(fixture) - assert File.chgrp(fixture, 1) == {:error,:enoent} + assert File.chgrp(fixture, 1) == {:error, :enoent} end - test :chgrp_with_failure! do + test "chgrp! with failure" do fixture = tmp_path("tmp_test.txt") File.rm(fixture) - message = ~r"could not change group for #{escape fixture}: no such file or directory" + message = ~r"could not change group for #{inspect fixture}: no such file or directory" assert_raise File.Error, message, fn -> File.chgrp!(fixture, 1) end end - test :chown_with_failure do + test "chown with failure" do fixture = tmp_path("tmp_test.txt") File.rm(fixture) - assert File.chown(fixture, 1) == {:error,:enoent} + assert File.chown(fixture, 1) == {:error, :enoent} end - test :chown_with_failure! do + test "chown! with failure" do fixture = tmp_path("tmp_test.txt") File.rm(fixture) - message = ~r"could not change owner for #{escape fixture}: no such file or directory" + message = ~r"could not change owner for #{inspect fixture}: no such file or directory" assert_raise File.Error, message, fn -> File.chown!(fixture, 1) end @@ -1283,6 +1823,10 @@ defmodule FileTest do last_year :calendar.local_time end + defp last_year({{year, 2, 29}, time}) do + {{year - 1, 2, 28}, time} + end + defp last_year({{year, month, day}, time}) do {{year - 1, month, day}, time} end diff --git a/lib/elixir/test/elixir/fixtures/at_exit.exs b/lib/elixir/test/elixir/fixtures/at_exit.exs index 78460b222ba..7d6e0b5bffa 100644 --- a/lib/elixir/test/elixir/fixtures/at_exit.exs +++ b/lib/elixir/test/elixir/fixtures/at_exit.exs @@ -5,4 +5,4 @@ defmodule AtExit do end System.at_exit fn(status) -> IO.puts "cruel world with status #{status}" end AtExit.at_exit("goodbye ") -exit(0) \ No newline at end of file +exit({:shutdown, 1}) diff --git a/lib/elixir/test/elixir/fixtures/calendar/julian.exs b/lib/elixir/test/elixir/fixtures/calendar/julian.exs new file mode 100644 index 00000000000..17a435e096e --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/calendar/julian.exs @@ -0,0 +1,99 @@ +defmodule Calendar.Julian do + # This calendar is used to test conversions between calendars. + # It implements the Julian calendar. + + import Integer, only: [floor_div: 2] + + def date_to_string(year, month, day) do + "#{year}-#{month}-#{day} (O.S.)" + end + + def naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) do + "#{year}-#{month}-#{day} #{Calendar.ISO.time_to_string(hour, minute, second, microsecond)} (O.S.)" + end + + def time_to_string(hour, minute, second, microsecond) do + Calendar.ISO.time_to_string(hour, minute, second, microsecond) + end + + def date(year, month, day) do + %Date{year: year, month: month, day: day, calendar: Calendar.Julian} + end + + def naive_datetime(year, month, day, hour, minute, second, microsecond \\ {0, 0}) + + def naive_datetime(year, month, day, hour, minute, second, microsecond) when is_integer(microsecond) do + naive_datetime(year, month, day, hour, minute, second, {microsecond, 6}) + end + + def naive_datetime(year, month, day, hour, minute, second, microsecond) do + %NaiveDateTime{year: year, month: month, day: day, hour: hour, minute: minute, second: second, microsecond: microsecond, calendar: Calendar.Julian} + end + + def day_rollover_relative_to_midnight_utc(), do: {0, 1} + + def naive_datetime_from_rata_die({days, day_fraction}) do + {year, month, day} = date_from_rata_die(days) + {hour, minute, second, microsecond} = time_from_day_fraction(day_fraction) + {year, month, day, hour, minute, second, microsecond} + end + + defp date_from_rata_die(days) do + approx = floor_div((4 * (days - epoch())) + 1464, 1461) + year = if approx <= 0, do: approx - 1, else: approx + prior_days = days - date_to_rata_die(year, 1, 1) + correction = cond do + days < date_to_rata_die(year, 3, 1) -> 0 + leap_year?(year) -> 1 + true -> 2 + end + month = floor_div(12*(prior_days + correction) + 373, 367) + day = 1 + days - date_to_rata_die(year, month, 1) + {year, month, day} + end + + def naive_datetime_to_rata_die(year, month, day, hour, minute, second, microsecond) do + days = date_to_rata_die(year, month, day) + day_fraction = time_to_day_fraction(hour, minute, second, microsecond) + {days, day_fraction} + end + + def time_from_day_fraction(day_fraction), do: Calendar.ISO.time_from_day_fraction(day_fraction) + + def time_to_day_fraction(hour, minute, second, microsecond) when is_integer(microsecond) do + time_to_day_fraction(hour, minute, second, {microsecond, 6}) + end + + def time_to_day_fraction(hour, minute, second, microsecond) do + Calendar.ISO.time_to_day_fraction(hour, minute, second, microsecond) + end + + def leap_year?(year) when is_integer(year) and year >= 0 do + rem(year, 4) == if year > 0, do: 0, else: 3 + end + + def days_in_month(year, month) + def days_in_month(year, 2) do + if leap_year?(year), do: 29, else: 28 + end + def days_in_month(_, month) when month in [4, 6, 9, 11], do: 30 + def days_in_month(_, month) when month in 1..12, do: 31 + + defp date_to_rata_die(year, month, day) do + year = if year < 0, do: year + 1, else: year + + epoch() - 1 + (365 * (year - 1)) + floor_div(year - 1, 4) + + floor_div(367 * month - 362, 12) + + adjustment_for_leap_year(year, month) + day + end + + defp epoch(), do: -1 + + defp adjustment_for_leap_year(year, month) do + cond do + month <= 2 -> 0 + leap_year?(year) -> -1 + true -> -2 + end + end +end diff --git a/lib/elixir/test/elixir/fixtures/code_sample.exs b/lib/elixir/test/elixir/fixtures/code_sample.exs index f1895089889..511623e3a7c 100644 --- a/lib/elixir/test/elixir/fixtures/code_sample.exs +++ b/lib/elixir/test/elixir/fixtures/code_sample.exs @@ -1,3 +1,3 @@ # Some Comments var = 1 + 2 -var \ No newline at end of file +var diff --git a/lib/elixir/test/elixir/fixtures/compile_sample.ex b/lib/elixir/test/elixir/fixtures/compile_sample.ex index 7cbd92b5a29..3ee7c2fbd3c 100644 --- a/lib/elixir/test/elixir/fixtures/compile_sample.ex +++ b/lib/elixir/test/elixir/fixtures/compile_sample.ex @@ -1 +1 @@ -defmodule CompileSample, do: nil \ No newline at end of file +defmodule CompileSample, do: nil diff --git a/lib/elixir/test/elixir/fixtures/dialyzer/boolean_check.ex b/lib/elixir/test/elixir/fixtures/dialyzer/boolean_check.ex new file mode 100644 index 00000000000..c19234b6890 --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/dialyzer/boolean_check.ex @@ -0,0 +1,17 @@ +defmodule Dialyzer.BooleanCheck do + def and_check(arg) when is_boolean(arg) do + arg and arg + end + + def and_check_optimized(arg) when is_integer(arg) do + arg < :infinity and arg + end + + def or_check(arg) when is_boolean(arg) do + arg or arg + end + + def or_check_optimized(arg) when is_integer(arg) do + arg < :infinity or arg + end +end diff --git a/lib/elixir/test/elixir/fixtures/dialyzer/for_boolean_check.ex b/lib/elixir/test/elixir/fixtures/dialyzer/for_boolean_check.ex new file mode 100644 index 00000000000..a15783397ee --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/dialyzer/for_boolean_check.ex @@ -0,0 +1,9 @@ +defmodule Dialyzer.ForBooleanCheck do + def foo(enum, potential) when is_binary(potential) do + for element <- enum, + string = Atom.to_string(element), + string == potential do + element + end + end +end diff --git a/lib/elixir/test/elixir/fixtures/dialyzer/macrocallback.ex b/lib/elixir/test/elixir/fixtures/dialyzer/macrocallback.ex new file mode 100644 index 00000000000..9d41902755e --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/dialyzer/macrocallback.ex @@ -0,0 +1,11 @@ +defmodule Dialyzer.Macrocallback do + @macrocallback required(atom) :: Macro.t + @macrocallback optional(atom) :: Macro.t + @optional_callbacks [optional: 1] +end + +defmodule Dialyzer.Macrocallback.Impl do + @behaviour Dialyzer.Macrocallback + defmacro required(var), do: Macro.expand(var, __CALLER__) + defmacro optional(var), do: Macro.expand(var, __CALLER__) +end diff --git a/lib/elixir/test/elixir/fixtures/dialyzer/protocol_opaque.ex b/lib/elixir/test/elixir/fixtures/dialyzer/protocol_opaque.ex new file mode 100644 index 00000000000..a05dc529369 --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/dialyzer/protocol_opaque.ex @@ -0,0 +1,21 @@ +defmodule Dialyzer.ProtocolOpaque do + def circus() do + Dialyzer.ProtocolOpaque.Entity.speak(Dialyzer.ProtocolOpaque.Duck.new) + end +end + +defprotocol Dialyzer.ProtocolOpaque.Entity do + def speak(entity) +end + +defmodule Dialyzer.ProtocolOpaque.Duck do + @opaque t :: %__MODULE__{} + defstruct feathers: :white_and_grey + + @spec new :: t + def new(), do: %__MODULE__{} + + defimpl Dialyzer.ProtocolOpaque.Entity do + def speak(%Dialyzer.ProtocolOpaque.Duck{}), do: "Quack!" + end +end diff --git a/lib/elixir/test/elixir/fixtures/dialyzer/raise.ex b/lib/elixir/test/elixir/fixtures/dialyzer/raise.ex new file mode 100644 index 00000000000..808ba3b0233 --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/dialyzer/raise.ex @@ -0,0 +1,21 @@ +defmodule Dialyzer.Raise do + defexception [:message] + + def exception_var() do + ex = %Dialyzer.Raise{} + raise ex + end + + def exception_var(ex = %Dialyzer.Raise{}) do + raise ex + end + + def string_var() do + string = "hello" + raise string + end + + def string_var(string) when is_binary(string) do + raise string + end +end diff --git a/lib/elixir/test/elixir/fixtures/dialyzer/remote_call.ex b/lib/elixir/test/elixir/fixtures/dialyzer/remote_call.ex new file mode 100644 index 00000000000..14e019900c3 --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/dialyzer/remote_call.ex @@ -0,0 +1,30 @@ +defmodule Dialyzer.RemoteCall do + _ = Application.load(:dialyzer) + case Application.spec(:dialyzer, :vsn) do + ~c(2.) ++ _ -> + @dialyzer {:no_fail_call, [map_var: 0]} + three when three < ~c(3.0.2) -> + # regression introduced in 3.0 for map warnings fixed in 3.0.2 + @dialyzer {:no_match, [map_var: 0, mod_var: 0, mod_var: 1]} + _ -> + :ok + end + + def map_var() do + map = %{key: 1} + map.key + end + + def map_var(map) when is_map(map) do + map.key + end + + def mod_var() do + module = Hello + module.fun + end + + def mod_var(module) when is_atom(module) or is_atom(elem(module, 0)) do + module.fun + end +end diff --git a/lib/elixir/test/elixir/fixtures/dialyzer/rewrite.ex b/lib/elixir/test/elixir/fixtures/dialyzer/rewrite.ex new file mode 100644 index 00000000000..6f6ff964b68 --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/dialyzer/rewrite.ex @@ -0,0 +1,9 @@ +defmodule Dialyzer.Rewrite do + def interpolation do + "foo #{:a}" + end + + def reverse do + Enum.reverse 1..3 + end +end diff --git a/lib/elixir/test/elixir/fixtures/dialyzer/struct_update.ex b/lib/elixir/test/elixir/fixtures/dialyzer/struct_update.ex new file mode 100644 index 00000000000..fbacdab6778 --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/dialyzer/struct_update.ex @@ -0,0 +1,7 @@ +defmodule Dialyzer.StructUpdate do + defstruct [:foo] + + def update(%__MODULE__{} = struct) do + %__MODULE__{struct | foo: :bar} + end +end diff --git a/lib/elixir/test/elixir/fixtures/file.bin b/lib/elixir/test/elixir/fixtures/file.bin new file mode 100644 index 00000000000..491c0980ddb --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/file.bin @@ -0,0 +1,4 @@ +LF +CR CRLF +LFCR + \ No newline at end of file diff --git a/lib/elixir/test/elixir/fixtures/init_sample.exs b/lib/elixir/test/elixir/fixtures/init_sample.exs deleted file mode 100644 index 4a1775c5027..00000000000 --- a/lib/elixir/test/elixir/fixtures/init_sample.exs +++ /dev/null @@ -1 +0,0 @@ -IO.puts to_string(1 + 2) \ No newline at end of file diff --git a/lib/elixir/test/elixir/fixtures/parallel_compiler/bar.ex b/lib/elixir/test/elixir/fixtures/parallel_compiler/bar.ex index d6134b4c825..0fdbe2d7634 100644 --- a/lib/elixir/test/elixir/fixtures/parallel_compiler/bar.ex +++ b/lib/elixir/test/elixir/fixtures/parallel_compiler/bar.ex @@ -1,5 +1,5 @@ -defmodule Bar do +defmodule BarParallel do end -require Foo -IO.puts Foo.message \ No newline at end of file +require FooParallel +IO.puts FooParallel.message diff --git a/lib/elixir/test/elixir/fixtures/parallel_compiler/bat.ex b/lib/elixir/test/elixir/fixtures/parallel_compiler/bat.ex index 87cfccc56c9..47ce74a01e3 100644 --- a/lib/elixir/test/elixir/fixtures/parallel_compiler/bat.ex +++ b/lib/elixir/test/elixir/fixtures/parallel_compiler/bat.ex @@ -1,3 +1,8 @@ +# We need to ensure it won't block even after multiple calls. +# So we use both behaviour and struct expansion below. defmodule Bat do - ThisModuleWillNeverBeAvailable[] -end \ No newline at end of file + # @behaviour will call ensure_compiled(). + @behaviour :unknown + # Struct expansion calls it as well. + %ThisModuleWillNeverBeAvailable{} +end diff --git a/lib/elixir/test/elixir/fixtures/parallel_compiler/foo.ex b/lib/elixir/test/elixir/fixtures/parallel_compiler/foo.ex index ea9a7cd6d50..55114107c43 100644 --- a/lib/elixir/test/elixir/fixtures/parallel_compiler/foo.ex +++ b/lib/elixir/test/elixir/fixtures/parallel_compiler/foo.ex @@ -1,3 +1,8 @@ -defmodule Foo do +defmodule FooParallel do + # We use this ensure_compiled? clause so both Foo and + # Bar block. Foo depends on Unknown and Bar depends on + # Foo. The compiler will see this dependency and first + # release Foo and then Bar, compiling with success. + false = Code.ensure_compiled?(Unknown) def message, do: "message_from_foo" -end \ No newline at end of file +end diff --git a/lib/elixir/test/elixir/fixtures/parallel_deadlock/bar.ex b/lib/elixir/test/elixir/fixtures/parallel_deadlock/bar.ex index 4cb9fca2c7b..81f507d6e19 100644 --- a/lib/elixir/test/elixir/fixtures/parallel_deadlock/bar.ex +++ b/lib/elixir/test/elixir/fixtures/parallel_deadlock/bar.ex @@ -1,3 +1,3 @@ -defmodule Bar do - Foo.__info__(:macros) -end \ No newline at end of file +defmodule BarDeadlock do + FooDeadlock.__info__(:macros) +end diff --git a/lib/elixir/test/elixir/fixtures/parallel_deadlock/foo.ex b/lib/elixir/test/elixir/fixtures/parallel_deadlock/foo.ex index d50072d11af..7bdc9012bde 100644 --- a/lib/elixir/test/elixir/fixtures/parallel_deadlock/foo.ex +++ b/lib/elixir/test/elixir/fixtures/parallel_deadlock/foo.ex @@ -1,3 +1,3 @@ -defmodule Foo do - Bar.__info__(:macros) -end \ No newline at end of file +defmodule FooDeadlock do + BarDeadlock.__info__(:macros) +end diff --git a/lib/elixir/test/elixir/fixtures/parallel_struct/bar.ex b/lib/elixir/test/elixir/fixtures/parallel_struct/bar.ex index 6e697731403..de8afa1015d 100644 --- a/lib/elixir/test/elixir/fixtures/parallel_struct/bar.ex +++ b/lib/elixir/test/elixir/fixtures/parallel_struct/bar.ex @@ -1,4 +1,3 @@ -defmodule Bar do - defstruct name: "" - def foo?(%Foo{}), do: true +defmodule BarStruct do + defstruct name: "", foo: %FooStruct{} end diff --git a/lib/elixir/test/elixir/fixtures/parallel_struct/foo.ex b/lib/elixir/test/elixir/fixtures/parallel_struct/foo.ex index 0e7153e01de..4827fc143d9 100644 --- a/lib/elixir/test/elixir/fixtures/parallel_struct/foo.ex +++ b/lib/elixir/test/elixir/fixtures/parallel_struct/foo.ex @@ -1,4 +1,4 @@ -defmodule Foo do +defmodule FooStruct do defstruct name: "" - def bar?(%Bar{}), do: true + def bar?(%BarStruct{}), do: true end diff --git a/lib/elixir/test/elixir/fixtures/parallel_struct/undef.ex b/lib/elixir/test/elixir/fixtures/parallel_struct/undef.ex new file mode 100644 index 00000000000..6e2ac07f7f5 --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/parallel_struct/undef.ex @@ -0,0 +1,5 @@ +defmodule Undef do + def undef() do + %__MODULE__{} + end +end diff --git a/lib/elixir/test/elixir/fixtures/utf8_bom.txt b/lib/elixir/test/elixir/fixtures/utf8_bom.txt new file mode 100644 index 00000000000..bed73fc4ca1 --- /dev/null +++ b/lib/elixir/test/elixir/fixtures/utf8_bom.txt @@ -0,0 +1,2 @@ +Русский +日 diff --git a/lib/elixir/test/elixir/float_test.exs b/lib/elixir/test/elixir/float_test.exs index a1a1471095e..29a1426ce47 100644 --- a/lib/elixir/test/elixir/float_test.exs +++ b/lib/elixir/test/elixir/float_test.exs @@ -3,7 +3,9 @@ Code.require_file "test_helper.exs", __DIR__ defmodule FloatTest do use ExUnit.Case, async: true - test :parse do + doctest Float + + test "parse/1" do assert Float.parse("12") === {12.0, ""} assert Float.parse("-12") === {-12.0, ""} assert Float.parse("-0.1") === {-0.1, ""} @@ -12,6 +14,8 @@ defmodule FloatTest do assert Float.parse("12.524235") === {12.524235, ""} assert Float.parse("-12.5") === {-12.5, ""} assert Float.parse("-12.524235") === {-12.524235, ""} + assert Float.parse("0.3534091") === {0.3534091, ""} + assert Float.parse("0.3534091elixir") === {0.3534091, "elixir"} assert Float.parse("7.5e3") === {7.5e3, ""} assert Float.parse("7.5e-3") === {7.5e-3, ""} assert Float.parse("12x") === {12.0, "x"} @@ -22,49 +26,96 @@ defmodule FloatTest do assert Float.parse("1.32453e-10") === {1.32453e-10, ""} assert Float.parse("1.32.45") === {1.32, ".45"} assert Float.parse("1.o") === {1.0, ".o"} + assert Float.parse("+12.3E+4") === {1.23e5, ""} + assert Float.parse("+12.3E-4x") === {0.00123, "x"} + assert Float.parse("-1.23e-0xFF") === {-1.23, "xFF"} + assert Float.parse("-1.e2") === {-1.0, ".e2"} + assert Float.parse(".12") === :error assert Float.parse("--1.2") === :error assert Float.parse("++1.2") === :error assert Float.parse("pi") === :error + assert Float.parse("1.7976931348623157e308") === {1.7976931348623157e308, ""} + assert_raise ArgumentError, fn -> + Float.parse("1.7976931348623159e308") + end + end + + test "floor/1" do + assert Float.floor(12.524235) === 12.0 + assert Float.floor(-12.5) === -13.0 + assert Float.floor(-12.524235) === -13.0 + assert Float.floor(7.5e3) === 7500.0 + assert Float.floor(7.5432e3) === 7543.0 + assert Float.floor(7.5e-3) === 0.0 + assert Float.floor(-12.32453e4) === -123246.0 + assert Float.floor(-12.32453e-10) === -1.0 + assert Float.floor(0.32453e-10) === 0.0 + assert Float.floor(-0.32453e-10) === -1.0 + assert Float.floor(1.32453e-10) === 0.0 + end + + test "floor/2 with precision" do + assert Float.floor(12.524235, 0) === 12.0 + assert Float.floor(-12.524235, 0) === -13.0 + + assert Float.floor(12.52, 2) === 12.51 + assert Float.floor(-12.52, 2) === -12.52 + + assert Float.floor(12.524235, 2) === 12.52 + assert Float.floor(-12.524235, 3) === -12.525 + + assert Float.floor(12.32453e-20, 2) === 0.0 + assert Float.floor(-12.32453e-20, 2) === -0.01 + + assert_raise ArgumentError, "precision 16 is out of valid range of 0..15" , fn -> + Float.floor(1.1, 16) + end end - test :floor do - assert Float.floor(12) === 12 - assert Float.floor(-12) === -12 - assert Float.floor(12.524235) === 12 - assert Float.floor(-12.5) === -13 - assert Float.floor(-12.524235) === -13 - assert Float.floor(7.5e3) === 7500 - assert Float.floor(7.5432e3) === 7543 - assert Float.floor(7.5e-3) === 0 - assert Float.floor(-12.32453e4) === -123246 - assert Float.floor(-12.32453e-10) === -1 - assert Float.floor(0.32453e-10) === 0 - assert Float.floor(-0.32453e-10) === -1 - assert Float.floor(1.32453e-10) === 0 + test "ceil/1" do + assert Float.ceil(12.524235) === 13.0 + assert Float.ceil(-12.5) === -12.0 + assert Float.ceil(-12.524235) === -12.0 + assert Float.ceil(7.5e3) === 7500.0 + assert Float.ceil(7.5432e3) === 7544.0 + assert Float.ceil(7.5e-3) === 1.0 + assert Float.ceil(-12.32453e4) === -123245.0 + assert Float.ceil(-12.32453e-10) === 0.0 + assert Float.ceil(0.32453e-10) === 1.0 + assert Float.ceil(-0.32453e-10) === 0.0 + assert Float.ceil(1.32453e-10) === 1.0 + assert Float.ceil(0.0) === 0.0 end - test :ceil do - assert Float.ceil(12) === 12 - assert Float.ceil(-12) === -12 - assert Float.ceil(12.524235) === 13 - assert Float.ceil(-12.5) === -12 - assert Float.ceil(-12.524235) === -12 - assert Float.ceil(7.5e3) === 7500 - assert Float.ceil(7.5432e3) === 7544 - assert Float.ceil(7.5e-3) === 1 - assert Float.ceil(-12.32453e4) === -123245 - assert Float.ceil(-12.32453e-10) === 0 - assert Float.ceil(0.32453e-10) === 1 - assert Float.ceil(-0.32453e-10) === 0 - assert Float.ceil(1.32453e-10) === 1 + test "ceil/2 with precision" do + assert Float.ceil(12.524235, 0) === 13.0 + assert Float.ceil(-12.524235, 0) === -12.0 + + assert Float.ceil(12.52, 2) === 12.52 + assert Float.ceil(-12.52, 2) === -12.51 + + assert Float.ceil(12.524235, 2) === 12.53 + assert Float.ceil(-12.524235, 3) === -12.524 + + assert Float.ceil(12.32453e-20, 2) === 0.01 + assert Float.ceil(-12.32453e-20, 2) === 0.0 + + assert Float.ceil(0.0, 2) === 0.0 + + assert_raise ArgumentError, "precision 16 is out of valid range of 0..15" , fn -> + Float.ceil(1.1, 16) + end end - test :round do - assert Float.round(5.5675, 3) === 5.568 + test "round/2" do + assert Float.round(5.5675, 3) === 5.567 assert Float.round(-5.5674, 3) === -5.567 assert Float.round(5.5, 3) === 5.5 - assert Float.round(5.5e-10, 10) === 6.0e-10 + assert Float.round(5.5e-10, 10) === 5.0e-10 assert Float.round(5.5e-10, 8) === 0.0 assert Float.round(5.0, 0) === 5.0 + assert_raise ArgumentError, "precision 16 is out of valid range of 0..15" , fn -> + Float.round(1.1, 16) + end end end diff --git a/lib/elixir/test/elixir/gen_event_test.exs b/lib/elixir/test/elixir/gen_event_test.exs deleted file mode 100644 index 005cb715b21..00000000000 --- a/lib/elixir/test/elixir/gen_event_test.exs +++ /dev/null @@ -1,392 +0,0 @@ -Code.require_file "test_helper.exs", __DIR__ - -defmodule GenEventTest do - use ExUnit.Case, async: true - - defmodule LoggerHandler do - use GenEvent - - def handle_event({:log, x}, messages) do - {:ok, [x|messages]} - end - - def handle_call(:messages, messages) do - {:ok, Enum.reverse(messages), []} - end - - def handle_call(call, state) do - super(call, state) - end - end - - defmodule SlowHandler do - use GenEvent - - def handle_event(_event, _state) do - :timer.sleep(100) - :remove_handler - end - end - - @receive_timeout 1000 - - test "start_link/2 and handler workflow" do - {:ok, pid} = GenEvent.start_link() - - {:links, links} = Process.info(self, :links) - assert pid in links - - assert GenEvent.notify(pid, {:log, 0}) == :ok - assert GenEvent.add_handler(pid, LoggerHandler, []) == :ok - assert GenEvent.notify(pid, {:log, 1}) == :ok - assert GenEvent.notify(pid, {:log, 2}) == :ok - - assert GenEvent.call(pid, LoggerHandler, :messages) == [1, 2] - assert GenEvent.call(pid, LoggerHandler, :messages) == [] - - assert GenEvent.call(pid, LoggerHandler, :whatever) == {:error, :bad_call} - assert GenEvent.call(pid, UnknownHandler, :messages) == {:error, :bad_module} - - assert GenEvent.remove_handler(pid, LoggerHandler, []) == :ok - assert GenEvent.stop(pid) == :ok - end - - test "start/2 with linked handler" do - {:ok, pid} = GenEvent.start() - - {:links, links} = Process.info(self, :links) - refute pid in links - - assert GenEvent.add_handler(pid, LoggerHandler, [], link: true) == :ok - - {:links, links} = Process.info(self, :links) - assert pid in links - - assert GenEvent.notify(pid, {:log, 1}) == :ok - assert GenEvent.sync_notify(pid, {:log, 2}) == :ok - - assert GenEvent.call(pid, LoggerHandler, :messages) == [1, 2] - assert GenEvent.stop(pid) == :ok - end - - test "start/2 with linked swap" do - {:ok, pid} = GenEvent.start() - - assert GenEvent.add_handler(pid, LoggerHandler, []) == :ok - - {:links, links} = Process.info(self, :links) - refute pid in links - - assert GenEvent.swap_handler(pid, LoggerHandler, [], LoggerHandler, [], link: true) == :ok - - {:links, links} = Process.info(self, :links) - assert pid in links - - assert GenEvent.stop(pid) == :ok - end - - test "start/2 with registered name" do - {:ok, _} = GenEvent.start(name: :logger) - assert GenEvent.stop(:logger) == :ok - end - - test "sync stream/2" do - {:ok, pid} = GenEvent.start_link() - parent = self() - - spawn_link fn -> - send parent, Enum.take(GenEvent.stream(pid, mode: :sync), 3) - end - - wait_for_handlers(pid, 1) - - for i <- 1..3 do - GenEvent.sync_notify(pid, i) - end - - # Receive one of the results - assert_receive [1, 2, 3], @receive_timeout - wait_for_handlers(pid, 0) - - spawn_link fn -> - Enum.each(GenEvent.stream(pid, mode: :sync), fn _ -> - :timer.sleep(:infinity) - end) - end - - wait_for_handlers(pid, 1) - - for i <- 1..6 do - GenEvent.notify(pid, i) - end - - wait_for_queue_length(pid, 5) - end - - test "async stream/2" do - {:ok, pid} = GenEvent.start_link() - parent = self() - - spawn_link fn -> - Enum.each(GenEvent.stream(pid, mode: :async), fn _ -> - :timer.sleep(:infinity) - end) - end - - spawn_link fn -> - send parent, Enum.take(GenEvent.stream(pid, mode: :async), 3) - end - - wait_for_handlers(pid, 2) - - for i <- 1..3 do - GenEvent.sync_notify(pid, i) - end - - # Receive one of the results - assert_receive [1, 2, 3], @receive_timeout - - # One of the subscriptions are gone - wait_for_handlers(pid, 1) - end - - Enum.each [:sync, :async], fn mode -> - test "#{mode} stream/2 with parallel use (and first finishing first)" do - {:ok, pid} = GenEvent.start_link() - stream = GenEvent.stream(pid, duration: 200, mode: unquote(mode)) - - parent = self() - spawn_link fn -> send parent, {:take, Enum.take(stream, 3)} end - wait_for_handlers(pid, 1) - spawn_link fn -> send parent, {:to_list, Enum.to_list(stream)} end - wait_for_handlers(pid, 2) - - # Notify the events for both handlers - for i <- 1..3 do - GenEvent.sync_notify(pid, i) - end - assert_receive {:take, [1, 2, 3]}, @receive_timeout - - # Notify the events for to_list stream handler - for i <- 4..5 do - GenEvent.sync_notify(pid, i) - end - - assert_receive {:to_list, [1, 2, 3, 4, 5]}, @receive_timeout - end - - test "#{mode} stream/2 with timeout" do - # Start a manager - {:ok, pid} = GenEvent.start_link() - Process.flag(:trap_exit, true) - - pid = spawn_link fn -> - Enum.take(GenEvent.stream(pid, timeout: 50, mode: unquote(mode)), 5) - end - - assert_receive {:EXIT, ^pid, - {:timeout, {Enumerable.GenEvent, :next, [_, _]}}}, @receive_timeout - end - - test "#{mode} stream/2 with error/timeout on subscription" do - # Start a manager - {:ok, pid} = GenEvent.start_link() - - # Start a subscriber with timeout - child = spawn fn -> Enum.to_list(GenEvent.stream(pid, mode: unquote(mode))) end - wait_for_handlers(pid, 1) - - # Kill and wait until we have 0 handlers - Process.exit(child, :kill) - wait_for_handlers(pid, 0) - GenEvent.stop(pid) - end - - test "#{mode} stream/2 with manager stop" do - # Start a manager and subscribers - {:ok, pid} = GenEvent.start_link() - - parent = self() - stream_pid = spawn_link fn -> - send parent, Enum.take(GenEvent.stream(pid, mode: unquote(mode)), 5) - end - wait_for_handlers(pid, 1) - - # Notify the events - for i <- 1..3 do - GenEvent.sync_notify(pid, i) - end - - Process.flag(:trap_exit, true) - GenEvent.stop(pid) - assert_receive {:EXIT, ^stream_pid, - {:shutdown, {Enumerable.GenEvent, :next, [_, _]}}}, @receive_timeout - end - - test "#{mode} stream/2 with cancel streams" do - # Start a manager and subscribers - {:ok, pid} = GenEvent.start_link() - stream = GenEvent.stream(pid, id: make_ref(), mode: unquote(mode)) - - parent = self() - spawn_link fn -> send parent, Enum.take(stream, 5) end - wait_for_handlers(pid, 1) - - # Notify the events - for i <- 1..3 do - GenEvent.sync_notify(pid, i) - end - - GenEvent.cancel_streams(stream) - assert_receive [1, 2, 3], @receive_timeout - GenEvent.stop(pid) - end - - test "#{mode} stream/2 with swap_handler" do - # Start a manager and subscribers - {:ok, pid} = GenEvent.start_link() - stream = GenEvent.stream(pid, id: make_ref(), mode: unquote(mode)) - - parent = self() - stream_pid = spawn_link fn -> send parent, Enum.take(stream, 5) end - wait_for_handlers(pid, 1) - - # Notify the events - for i <- 1..3 do - GenEvent.sync_notify(pid, i) - end - - [handler] = GenEvent.which_handlers(pid) - Process.flag(:trap_exit, true) - GenEvent.swap_handler(pid, handler, :swap_handler, LogHandler, []) - assert_receive {:EXIT, ^stream_pid, - {{:swapped, LogHandler, _}, - {Enumerable.GenEvent, :next, [_, _]}}}, @receive_timeout - end - - test "#{mode} stream/2 with duration" do - # Start a manager and subscribers - {:ok, pid} = GenEvent.start_link() - stream = GenEvent.stream(pid, duration: 200, mode: unquote(mode)) - - parent = self() - spawn_link fn -> send parent, {:duration, Enum.take(stream, 10)} end - wait_for_handlers(pid, 1) - - # Notify the events - for i <- 1..5 do - GenEvent.sync_notify(pid, i) - end - - # Wait until the handler is gone - wait_for_handlers(pid, 0) - - # The stream is not complete but terminated anyway due to duration - assert_receive {:duration, [1, 2, 3, 4, 5]}, @receive_timeout - - GenEvent.stop(pid) - end - - test "#{mode} stream/2 with manager killed and trap_exit" do - # Start a manager and subscribers - {:ok, pid} = GenEvent.start_link() - stream = GenEvent.stream(pid, mode: unquote(mode)) - - parent = self() - stream_pid = spawn_link fn -> - send parent, Enum.to_list(stream) - end - wait_for_handlers(pid, 1) - - Process.flag(:trap_exit, true) - Process.exit(pid, :kill) - assert_receive {:EXIT, ^pid, :killed}, @receive_timeout - assert_receive {:EXIT, ^stream_pid, - {:killed, {Enumerable.GenEvent, :next, [_,_]}}}, @receive_timeout - end - - test "#{mode} stream/2 with manager not alive" do - # Start a manager and subscribers - stream = GenEvent.stream(:does_not_exit, mode: unquote(mode)) - - parent = self() - stream_pid = spawn_link fn -> - send parent, Enum.to_list(stream) - end - - Process.flag(:trap_exit, true) - assert_receive {:EXIT, ^stream_pid, - {:noproc, {Enumerable.GenEvent, :start, [_]}}}, @receive_timeout - end - - test "#{mode} stream/2 with manager unregistered" do - # Start a manager and subscribers - {:ok, pid} = GenEvent.start_link(name: :unreg) - stream = GenEvent.stream(:unreg, mode: unquote(mode)) - - parent = self() - spawn_link fn -> - send parent, Enum.take(stream, 5) - end - wait_for_handlers(pid, 1) - - # Notify the events - for i <- 1..3 do - GenEvent.sync_notify(pid, i) - end - - # Unregister the process - Process.unregister(:unreg) - - # Notify the remaining events - for i <- 4..5 do - GenEvent.sync_notify(pid, i) - end - - # We should have gotten the message and all handlers were removed - assert_receive [1, 2, 3, 4, 5], @receive_timeout - wait_for_handlers(pid, 0) - end - - test "#{mode} stream/2 flushes events on abort" do - # Start a manager and subscribers - {:ok, pid} = GenEvent.start_link() - - spawn_link fn -> - wait_for_handlers(pid, 2) - GenEvent.notify(pid, 1) - GenEvent.notify(pid, 2) - GenEvent.notify(pid, 3) - end - - GenEvent.add_handler(pid, SlowHandler, []) - stream = GenEvent.stream(pid, mode: unquote(mode)) - - try do - Enum.each stream, fn _ -> throw :done end - catch - :done -> :ok - end - - # Wait for the slow handler to be removed - # so all events have been handled - wait_for_handlers(pid, 0) - - # Check no messages leaked. - refute_received _any - end - end - - defp wait_for_handlers(pid, count) do - unless length(GenEvent.which_handlers(pid)) == count do - wait_for_handlers(pid, count) - end - end - - defp wait_for_queue_length(pid, count) do - {:message_queue_len, n} = Process.info(pid, :message_queue_len) - unless n == count do - wait_for_queue_length(pid, count) - end - end -end diff --git a/lib/elixir/test/elixir/gen_server_test.exs b/lib/elixir/test/elixir/gen_server_test.exs index 880d7d0f864..d74643bfdd0 100644 --- a/lib/elixir/test/elixir/gen_server_test.exs +++ b/lib/elixir/test/elixir/gen_server_test.exs @@ -6,13 +6,12 @@ defmodule GenServerTest do defmodule Stack do use GenServer - def handle_call(:stop, from, state) do - GenServer.reply(from, :ok) - {:stop, :normal, state} + def handle_call(:pop, _from, [h | t]) do + {:reply, h, t} end - def handle_call(:pop, _from, [h|t]) do - {:reply, h, t} + def handle_call(:noreply, _from, h) do + {:noreply, h} end def handle_call(request, from, state) do @@ -20,7 +19,7 @@ defmodule GenServerTest do end def handle_cast({:push, item}, state) do - {:noreply, [item|state]} + {:noreply, [item | state]} end def handle_cast(request, state) do @@ -31,7 +30,7 @@ defmodule GenServerTest do # There is a race condition if the agent is # restarted too fast and it is registered. try do - self |> Process.info(:registered_name) |> elem(1) |> Process.unregister + self() |> Process.info(:registered_name) |> elem(1) |> Process.unregister rescue _ -> :ok end @@ -39,16 +38,72 @@ defmodule GenServerTest do end end + test "start_link/3" do + assert_raise ArgumentError, ~r"expected :name option to be one of:", fn -> + GenServer.start_link(Stack, [:hello], name: "my_gen_server_name") + end + + assert_raise ArgumentError, ~r"expected :name option to be one of:", fn -> + GenServer.start_link(Stack, [:hello], name: {:invalid_tuple, "my_gen_server_name"}) + end + + assert_raise ArgumentError, ~r"expected :name option to be one of:", fn -> + GenServer.start_link(Stack, [:hello], name: {:via, "Via", "my_gen_server_name"}) + end + + assert_raise ArgumentError, ~r/Got: "my_gen_server_name"/, fn -> + GenServer.start_link(Stack, [:hello], name: "my_gen_server_name") + end + end + + test "start_link/3 with via" do + GenServer.start_link(Stack, [:hello], name: {:via, :global, :via_stack}) + assert GenServer.call({:via, :global, :via_stack}, :pop) == :hello + end + + test "start_link/3 with global" do + GenServer.start_link(Stack, [:hello], name: {:global, :global_stack}) + assert GenServer.call({:global, :global_stack}, :pop) == :hello + end + + test "start_link/3 with local" do + GenServer.start_link(Stack, [:hello], name: :stack) + assert GenServer.call(:stack, :pop) == :hello + end + test "start_link/2, call/2 and cast/2" do {:ok, pid} = GenServer.start_link(Stack, [:hello]) - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) assert pid in links assert GenServer.call(pid, :pop) == :hello assert GenServer.cast(pid, {:push, :world}) == :ok assert GenServer.call(pid, :pop) == :world - assert GenServer.call(pid, :stop) == :ok + assert GenServer.stop(pid) == :ok + + assert GenServer.cast({:global, :foo}, {:push, :world}) == :ok + assert GenServer.cast({:via, :foo, :bar}, {:push, :world}) == :ok + assert GenServer.cast(:foo, {:push, :world}) == :ok + end + + @tag capture_log: true + test "call/3 exit messages" do + name = :self + Process.register self(), name + :global.register_name name, self() + {:ok, pid} = GenServer.start_link(Stack, [:hello]) + {:ok, stopped_pid} = GenServer.start(Stack, [:hello]) + GenServer.stop(stopped_pid) + + assert catch_exit(GenServer.call(name, :pop, 5000)) == {:calling_self, {GenServer, :call, [name, :pop, 5000]}} + assert catch_exit(GenServer.call({:global, name}, :pop, 5000)) == {:calling_self, {GenServer, :call, [{:global, name}, :pop, 5000]}} + assert catch_exit(GenServer.call({:via, :global, name}, :pop, 5000)) == {:calling_self, {GenServer, :call, [{:via, :global, name}, :pop, 5000]}} + assert catch_exit(GenServer.call(self(), :pop, 5000)) == {:calling_self, {GenServer, :call, [self(), :pop, 5000]}} + assert catch_exit(GenServer.call(pid, :noreply, 1)) == {:timeout, {GenServer, :call, [pid, :noreply, 1]}} + assert catch_exit(GenServer.call(nil, :pop, 5000)) == {:noproc, {GenServer, :call, [nil, :pop, 5000]}} + assert catch_exit(GenServer.call(stopped_pid, :pop, 5000)) == {:noproc, {GenServer, :call, [stopped_pid, :pop, 5000]}} + assert catch_exit(GenServer.call({:stack, :bogus_node}, :pop, 5000)) == {{:nodedown, :bogus_node}, {GenServer, :call, [{:stack, :bogus_node}, :pop, 5000]}} end test "nil name" do @@ -58,9 +113,9 @@ defmodule GenServerTest do test "start/2" do {:ok, pid} = GenServer.start(Stack, [:hello]) - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) refute pid in links - GenServer.call(pid, :stop) + GenServer.stop(pid) end test "abcast/3" do @@ -69,10 +124,10 @@ defmodule GenServerTest do assert GenServer.abcast(:stack, {:push, :hello}) == :abcast assert GenServer.call({:stack, node()}, :pop) == :hello - assert GenServer.abcast([node, :foo@bar], :stack, {:push, :world}) == :abcast + assert GenServer.abcast([node(), :foo@bar], :stack, {:push, :world}) == :abcast assert GenServer.call(:stack, :pop) == :world - GenServer.call(:stack, :stop) + GenServer.stop(:stack) end test "multi_call/4" do @@ -80,9 +135,34 @@ defmodule GenServerTest do assert GenServer.multi_call(:stack, :pop) == {[{node(), :hello}], []} - assert GenServer.multi_call([node, :foo@bar], :stack, :pop) == - {[{node, :world}], [:foo@bar]} + assert GenServer.multi_call([node(), :foo@bar], :stack, :pop) == + {[{node(), :world}], [:foo@bar]} + + GenServer.stop(:stack) + end + + test "whereis/1" do + name = :whereis_server + + {:ok, pid} = GenServer.start_link(Stack, [], name: name) + assert GenServer.whereis(name) == pid + assert GenServer.whereis({name, node()}) == pid + assert GenServer.whereis({name, :another_node}) == {name, :another_node} + assert GenServer.whereis(pid) == pid + assert GenServer.whereis(:whereis_bad_server) == nil + + {:ok, pid} = GenServer.start_link(Stack, [], name: {:global, name}) + assert GenServer.whereis({:global, name}) == pid + assert GenServer.whereis({:global, :whereis_bad_server}) == nil + assert GenServer.whereis({:via, :global, name}) == pid + assert GenServer.whereis({:via, :global, :whereis_bad_server}) == nil + end + + test "stop/3" do + {:ok, pid} = GenServer.start(Stack, []) + assert GenServer.stop(pid, :normal) == :ok - GenServer.call(:stack, :stop) + {:ok, _} = GenServer.start(Stack, [], name: :stack_for_stop) + assert GenServer.stop(:stack_for_stop, :normal) == :ok end end diff --git a/lib/elixir/test/elixir/hash_dict_test.exs b/lib/elixir/test/elixir/hash_dict_test.exs deleted file mode 100644 index 3a64f3e818b..00000000000 --- a/lib/elixir/test/elixir/hash_dict_test.exs +++ /dev/null @@ -1,91 +0,0 @@ -Code.require_file "test_helper.exs", __DIR__ - -defmodule HashDictTest do - use ExUnit.Case, async: true - - @dict Enum.into([foo: :bar], HashDict.new) - - test "access" do - dict = Enum.into([foo: :baz], HashDict.new) - assert Access.get(@dict, :foo) == :bar - assert Access.get_and_update(@dict, :foo, fn :bar -> {:ok, :baz} end) == {:ok, dict} - assert Access.get_and_update(HashDict.new, :foo, fn nil -> {:ok, :baz} end) == {:ok, dict} - end - - test "is serializable as attribute" do - assert @dict == Enum.into([foo: :bar], HashDict.new) - end - - test "is accessible as attribute" do - assert @dict[:foo] == :bar - end - - test "small dict smoke test" do - smoke_test(1..8) - smoke_test(8..1) - end - - test "medium dict smoke test" do - smoke_test(1..80) - smoke_test(80..1) - end - - test "large dict smoke test" do - smoke_test(1..1200) - smoke_test(1200..1) - end - - test "reduce/3 (via to_list)" do - dict = filled_dict(8) - list = dict |> HashDict.to_list - assert length(list) == 8 - assert {1, 1} in list - assert list == Enum.to_list(dict) - - dict = filled_dict(20) - list = dict |> HashDict.to_list - assert length(list) == 20 - assert {1, 1} in list - assert list == Enum.to_list(dict) - - dict = filled_dict(120) - list = dict |> HashDict.to_list - assert length(list) == 120 - assert {1, 1} in list - assert list == Enum.to_list(dict) - end - - test "comparison when subsets" do - d1 = Enum.into [a: 0], HashDict.new - d2 = Enum.into [a: 0, b: 1], HashDict.new - - refute HashDict.equal?(d1, d2) - refute HashDict.equal?(d2, d1) - end - - defp smoke_test(range) do - {dict, _} = Enum.reduce range, {HashDict.new, 1}, fn(x, {acc, i}) -> - acc = HashDict.put(acc, x, x) - assert HashDict.size(acc) == i - {acc, i + 1} - end - - Enum.each range, fn(x) -> - assert HashDict.get(dict, x) == x - end - - {dict, _} = Enum.reduce range, {dict, Enum.count(range)}, fn(x, {acc, i}) -> - assert HashDict.size(acc) == i - acc = HashDict.delete(acc, x) - assert HashDict.size(acc) == i - 1 - assert HashDict.get(acc, x) == nil - {acc, i - 1} - end - - assert dict == HashDict.new - end - - defp filled_dict(range) do - Enum.reduce 1..range, HashDict.new, &HashDict.put(&2, &1, &1) - end -end diff --git a/lib/elixir/test/elixir/hash_set_test.exs b/lib/elixir/test/elixir/hash_set_test.exs deleted file mode 100644 index cb231a1a2a6..00000000000 --- a/lib/elixir/test/elixir/hash_set_test.exs +++ /dev/null @@ -1,55 +0,0 @@ -Code.require_file "test_helper.exs", __DIR__ - -defmodule HashSetTest do - use ExUnit.Case, async: true - - test "union" do - assert HashSet.union(filled_set(21), filled_set(22)) == filled_set(22) - assert HashSet.union(filled_set(121), filled_set(120)) == filled_set(121) - end - - test "intersection" do - assert HashSet.intersection(filled_set(21), filled_set(20)) == filled_set(20) - assert HashSet.equal?(HashSet.intersection(filled_set(120), filled_set(121)), filled_set(120)) - end - - test "difference" do - assert HashSet.equal?(HashSet.difference(filled_set(20), filled_set(21)), HashSet.new) - - diff = HashSet.difference(filled_set(9000), filled_set(9000)) - assert HashSet.equal?(diff, HashSet.new) - assert HashSet.size(diff) == 0 - end - - test "subset?" do - assert HashSet.subset?(HashSet.new, HashSet.new) - assert HashSet.subset?(filled_set(6), filled_set(10)) - assert HashSet.subset?(filled_set(6), filled_set(120)) - refute HashSet.subset?(filled_set(120), filled_set(6)) - end - - test "equal?" do - assert HashSet.equal?(HashSet.new, HashSet.new) - assert HashSet.equal?(filled_set(20), HashSet.delete(filled_set(21), 21)) - assert HashSet.equal?(filled_set(120), filled_set(120)) - end - - test "to_list" do - set = filled_set(20) - list = HashSet.to_list(set) - assert length(list) == 20 - assert 1 in list - assert Enum.sort(list) == Enum.sort(1..20) - - set = filled_set(120) - list = HashSet.to_list(set) - assert length(list) == 120 - assert 1 in list - assert Enum.sort(list) == Enum.sort(1..120) - end - - defp filled_set(range) do - Enum.into 1..range, HashSet.new - end -end - diff --git a/lib/elixir/test/elixir/inspect/algebra_test.exs b/lib/elixir/test/elixir/inspect/algebra_test.exs index 3b4b46a5263..e5ca7817854 100644 --- a/lib/elixir/test/elixir/inspect/algebra_test.exs +++ b/lib/elixir/test/elixir/inspect/algebra_test.exs @@ -3,6 +3,8 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Inspect.AlgebraTest do use ExUnit.Case, async: true + doctest Inspect.Algebra + import Inspect.Algebra def helloabcd do @@ -15,36 +17,42 @@ defmodule Inspect.AlgebraTest do "d") end - def factor(doc, w), do: format(w, 0, [{0, :flat, group(doc)}]) + def sdoc(doc) do + format(group(doc), :infinity) + end + + defp render(doc, limit) do + format(doc, limit) |> IO.iodata_to_binary + end test "empty doc" do - # Consistence with definitions - assert empty == :doc_nil + # Consistent with definitions + assert empty() == :doc_nil - # Consistence of corresponding sdoc - assert factor(empty, 80) == [] + # Consistent with corresponding sdoc + assert sdoc(empty()) == [] # Consistent formatting - assert pretty(empty, 80) == "" + assert render(empty(), 80) == "" end test "break doc" do - # Consistence with definitions + # Consistent with definitions assert break("break") == {:doc_break, "break"} assert break("") == {:doc_break, ""} # Wrong argument type assert_raise FunctionClauseError, fn -> break(42) end - # Consistence of corresponding sdoc - assert factor(break("_"), 80) == ["_"] + # Consistent with corresponding sdoc + assert sdoc(break("_")) == ["_"] # Consistent formatting - assert pretty(break("_"), 80) == "_" + assert render(break("_"), 80) == "_" end test "glue doc" do - # Consistence with definitions + # Consistent with definitions assert glue("a", "->", "b") == {:doc_cons, "a", {:doc_cons, {:doc_break, "->"}, "b"} } @@ -55,71 +63,123 @@ defmodule Inspect.AlgebraTest do end test "text doc" do - # Consistence of corresponding sdoc - assert factor("_", 80) == ["_"] + # Consistent with corresponding sdoc + assert sdoc("_") == ["_"] # Consistent formatting - assert pretty("_", 80) == "_" + assert render("_", 80) == "_" end test "space doc" do - # Consistency with definitions + # Consistent with definitions assert space("a", "b") == {:doc_cons, "a", {:doc_cons, " ", "b"} } end test "nest doc" do - # Consistence with definitions - assert nest(empty, 1) == {:doc_nest, empty, 1} - assert nest(empty, 0) == :doc_nil + # Consistent with definitions + assert nest(empty(), 1) == {:doc_nest, empty(), 1} + assert nest(empty(), 0) == :doc_nil # Wrong argument type - assert_raise FunctionClauseError, fn -> nest("foo", empty) end + assert_raise FunctionClauseError, fn -> nest("foo", empty()) end - # Consistence of corresponding sdoc - assert factor(nest("a", 1), 80) == ["a"] - assert format(2, 0, [{0, :break, nest(glue("a", "b"), 1)}]) == ["a", "\n ", "b"] + # Consistent with corresponding sdoc + assert sdoc(nest("a", 1)) == ["a"] # Consistent formatting - assert pretty(nest("a", 1), 80) == "a" - assert render(format 2, 0, [{0, :break, nest(glue("a", "b"), 1)}]) == "a\n b" + assert render(nest("a", 1), 80) == "a" + assert render(nest(glue("a", "b"), 1), 2) == "a\n b" + end + + test "color doc" do + # Consistent with definitions + opts = %Inspect.Opts{} + assert color(empty(), :atom, opts) == empty() + + opts = %Inspect.Opts{syntax_colors: [regex: :red]} + assert color(empty(), :atom, opts) == empty() + + opts = %Inspect.Opts{syntax_colors: [atom: :red]} + doc1 = {:doc_color, "Hi", :red} + doc2 = {:doc_color, empty(), :reset} + assert color("Hi", :atom, opts) == concat(doc1, doc2) + + opts = %Inspect.Opts{syntax_colors: [reset: :red]} + assert color(empty(), :atom, opts) == empty() + + opts = %Inspect.Opts{syntax_colors: [number: :cyan, reset: :red]} + doc1 = {:doc_color, "123", :cyan} + doc2 = {:doc_color, empty(), :red} + assert color("123", :number, opts) == concat(doc1, doc2) + + # Consistent formatting + opts = %Inspect.Opts{syntax_colors: [atom: :cyan]} + assert render(glue(color("AA", :atom, opts), "BB"), 5) == "\e[36mAA\e[0m BB" + assert render(glue(color("AA", :atom, opts), "BB"), 3) == "\e[36mAA\e[0m\nBB" + assert render(glue("AA", color("BB", :atom, opts)), 6) == "AA \e[36mBB\e[0m" end test "line doc" do - # Consistency with definitions + # Consistent with definitions assert line("a", "b") == {:doc_cons, "a", {:doc_cons, :doc_line, "b"}} - # Consistence of corresponding sdoc - assert factor(line("a", "b"), 1) == ["a", "\n", "b"] - assert factor(line("a", "b"), 9) == ["a", "\n", "b"] + # Consistent with corresponding sdoc + assert sdoc(line("a", "b")) == ["a", "\n", "b"] # Consistent formatting - assert pretty(line(glue("aaa", "bbb"), glue("ccc", "ddd")), 10) == + assert render(line(glue("aaa", "bbb"), glue("ccc", "ddd")), 10) == "aaa bbb\nccc ddd" end test "group doc" do - # Consistency with definitions + # Consistent with definitions assert group(glue("a", "b")) == - {:doc_group, {:doc_cons, "a", concat(break, "b")}} - assert group(empty) == {:doc_group, empty} + {:doc_group, {:doc_cons, "a", concat(break(), "b")}} + assert group(empty()) == {:doc_group, empty()} - # Consistence of corresponding sdoc - assert factor(glue("a", "b"), 1) == ["a", " ", "b"] - assert factor(glue("a", "b"), 9) == ["a", " ", "b"] + # Consistent with corresponding sdoc + assert sdoc(glue("a", "b")) == ["a", " ", "b"] # Consistent formatting - assert pretty(helloabcd, 5) == "hello\na b\ncd" - assert pretty(helloabcd, 80) == "hello a b cd" + assert render(helloabcd(), 5) == "hello\na b\ncd" + assert render(helloabcd(), 80) == "hello a b cd" end test "formatting with infinity" do s = String.duplicate "x", 50 g = ";" - doc = group(glue(s, g, s) |> glue(g, s) |> glue(g, s) |> glue(g, s)) + doc = glue(s, g, s) |> glue(g, s) |> glue(g, s) |> glue(g, s) |> group + + assert render(doc, :infinity) == s <> g <> s <> g <> s <> g <> s <> g <> s + end + + test "formatting surround_many with empty" do + sm = &surround_many("[", &1, "]", %Inspect.Opts{}, fn(d, _) -> d end, ",") + + assert sm.([]) |> render(80) == "[]" + assert sm.([empty()]) |> render(80) == "[]" + assert sm.([empty(), empty()]) |> render(80) == "[]" + assert sm.(["a"]) |> render(80) == "[a]" + assert sm.(["a", empty()]) |> render(80) == "[a]" + assert sm.([empty(), "a"]) |> render(80) == "[a]" + assert sm.(["a", empty(), "b"]) |> render(80) == "[a, b]" + assert sm.([empty(), "a", "b"]) |> render(80) == "[a, b]" + assert sm.(["a", "b", empty()]) |> render(80) == "[a, b]" + assert sm.(["a", "b" | "c"]) |> render(80) == "[a, b | c]" + assert sm.(["a" | "b"]) |> render(80) == "[a | b]" + assert sm.(["a" | empty()]) |> render(80) == "[a]" + assert sm.([empty() | "b"]) |> render(80) == "[b]" + end + + test "surround_many with docs as the wrappers and as the separator" do + opts = %Inspect.Opts{} + fun = fn(d, _) -> d end - assert pretty(doc, :infinity) == s <> g <> s <> g <> s <> g <> s <> g <> s + doc = surround_many(break("["), ["a", "b", "c"], break("]"), opts, fun, break(",")) + assert render(doc, 80) == "[a, b, c]" + assert render(doc, 5) == "[a, b\n c]" end end diff --git a/lib/elixir/test/elixir/inspect_test.exs b/lib/elixir/test/elixir/inspect_test.exs index 307801d8efc..e3657a0e055 100644 --- a/lib/elixir/test/elixir/inspect_test.exs +++ b/lib/elixir/test/elixir/inspect_test.exs @@ -3,298 +3,505 @@ Code.require_file "test_helper.exs", __DIR__ defmodule Inspect.AtomTest do use ExUnit.Case, async: true - test :basic do + doctest Inspect + + test "basic" do assert inspect(:foo) == ":foo" end - test :empty do + test "empty" do assert inspect(:"") == ":\"\"" end - test :true_false_nil do + test "true, false, nil" do assert inspect(false) == "false" assert inspect(true) == "true" assert inspect(nil) == "nil" end - test :with_uppercase do + test "with uppercase letters" do assert inspect(:fOO) == ":fOO" assert inspect(:FOO) == ":FOO" end - test :alias_atom do + test "aliases" do assert inspect(Foo) == "Foo" assert inspect(Foo.Bar) == "Foo.Bar" assert inspect(Elixir) == "Elixir" + assert inspect(Elixir.Foo) == "Foo" assert inspect(Elixir.Elixir) == "Elixir.Elixir" + assert inspect(Elixir.Elixir.Foo) == "Elixir.Elixir.Foo" end - test :with_integers do + test "with integers" do assert inspect(User1) == "User1" assert inspect(:user1) == ":user1" end - test :with_punctuation do + test "with trailing ? or !" do assert inspect(:foo?) == ":foo?" assert inspect(:bar!) == ":bar!" + assert inspect(:Foo?) == ":Foo?" end - test :op do - assert inspect(:+) == ":+" + test "operators" do + assert inspect(:+) == ":+" + assert inspect(:<~) == ":<~" + assert inspect(:~>) == ":~>" assert inspect(:&&&) == ":&&&" assert inspect(:~~~) == ":~~~" + assert inspect(:<<~) == ":<<~" + assert inspect(:~>>) == ":~>>" + assert inspect(:<~>) == ":<~>" + assert inspect(:<|>) == ":<|>" end - test :... do - assert inspect(:...) == ":..." - end - - test :@ do + test "with @" do assert inspect(:@) == ":@" assert inspect(:foo@bar) == ":foo@bar" assert inspect(:foo@bar@) == ":foo@bar@" assert inspect(:foo@bar@baz) == ":foo@bar@baz" end - test :others do + test "others" do + assert inspect(:...) == ":..." assert inspect(:<<>>) == ":<<>>" - assert inspect(:{}) == ":{}" - assert inspect(:%{}) == ":%{}" - assert inspect(:%) == ":%" + assert inspect(:{}) == ":{}" + assert inspect(:%{}) == ":%{}" + assert inspect(:%) == ":%" + end + + test "escaping" do + assert inspect(:"hy-phen") == ~s(:"hy-phen") + assert inspect(:"@hello") == ~s(:"@hello") + assert inspect(:"Wat!?") == ~s(:"Wat!?") + assert inspect(:"'quotes' and \"double quotes\"") == ~S(:"'quotes' and \"double quotes\"") + end + + test "colors" do + opts = [syntax_colors: [atom: :red]] + assert inspect(:hello, opts) == "\e[31m:hello\e[0m" + opts = [syntax_colors: [reset: :cyan]] + assert inspect(:hello, opts) == ":hello" + end + + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + test "unicode" do + assert inspect(:"olá") == ":olá" + assert inspect(:"Olá") == ":Olá" + assert inspect(:"Ólá") == ":Ólá" + + hello_world = String.to_atom("こんにちは世界") + assert inspect(hello_world) == ":こんにちは世界" + + nfd = :unicode.characters_to_nfd_binary("olá") + assert inspect(String.to_atom(nfd)) == ":\"#{nfd}\"" + end end end defmodule Inspect.BitStringTest do use ExUnit.Case, async: true - test :bitstring do - assert inspect(<<1 :: [size(12), integer, signed]>>) == "<<0, 1::size(4)>>" + test "bitstring" do + assert inspect(<<1::12-integer-signed>>) == "<<0, 1::size(4)>>" end - test :binary do + test "binary" do assert inspect("foo") == "\"foo\"" assert inspect(<>) == "\"abc\"" end - test :escape do + test "escape" do assert inspect("f\no") == "\"f\\no\"" assert inspect("f\\o") == "\"f\\\\o\"" assert inspect("f\ao") == "\"f\\ao\"" end - test :utf8 do + test "UTF-8" do assert inspect(" ゆんゆん") == "\" ゆんゆん\"" end - test :all_escapes do + test "all escapes" do assert inspect("\a\b\d\e\f\n\r\s\t\v") == "\"\\a\\b\\d\\e\\f\\n\\r \\t\\v\"" end - test :opt_infer do - assert inspect(<<"eric", 193, "mj">>, binaries: :infer) == ~s(<<101, 114, 105, 99, 193, 109, 106>>) - assert inspect(<<"eric">>, binaries: :infer) == ~s("eric") + test "opt infer" do + assert inspect(<<"john", 193, "doe">>, binaries: :infer) == ~s(<<106, 111, 104, 110, 193, 100, 111, 101>>) + assert inspect(<<"john">>, binaries: :infer) == ~s("john") assert inspect(<<193>>, binaries: :infer) == ~s(<<193>>) end - test :opt_as_strings do - assert inspect(<<"eric", 193, "mj">>, binaries: :as_strings) == ~s("eric\\301mj") - assert inspect(<<"eric">>, binaries: :as_strings) == ~s("eric") - assert inspect(<<193>>, binaries: :as_strings) == ~s("\\301") + test "opt as strings" do + assert inspect(<<"john", 193, "doe">>, binaries: :as_strings) == ~s("john\\xC1doe") + assert inspect(<<"john">>, binaries: :as_strings) == ~s("john") + assert inspect(<<193>>, binaries: :as_strings) == ~s("\\xC1") end - test :opt_as_binaries do - assert inspect(<<"eric", 193, "mj">>, binaries: :as_binaries) == "<<101, 114, 105, 99, 193, 109, 106>>" - assert inspect(<<"eric">>, binaries: :as_binaries) == "<<101, 114, 105, 99>>" + test "opt as binaries" do + assert inspect(<<"john", 193, "doe">>, binaries: :as_binaries) == "<<106, 111, 104, 110, 193, 100, 111, 101>>" + assert inspect(<<"john">>, binaries: :as_binaries) == "<<106, 111, 104, 110>>" assert inspect(<<193>>, binaries: :as_binaries) == "<<193>>" + # base: :hex is recognized + assert inspect("abc", binaries: :as_binary, base: :hex) == "<<0x61, 0x62, 0x63>>" + # any base other than :decimal implies binaries: :as_binaries + assert inspect("abc", base: :hex) == "<<0x61, 0x62, 0x63>>" + assert inspect("abc", base: :octal) == "<<0o141, 0o142, 0o143>>" + # size is still represented as decimal + assert inspect(<<10, 11, 12::4>>, base: :hex) == "<<0xA, 0xB, 0xC::size(4)>>" end - test :unprintable_with_opts do + test "unprintable with opts" do assert inspect(<<193, 193, 193, 193>>, limit: 3) == "<<193, 193, 193, ...>>" end + + test "printable limit" do + assert inspect("hello world", printable_limit: 4) == ~s("hell" <> ...) + # non printable characters after the limit don't matter + assert inspect("hello world" <> <<0>>, printable_limit: 4) == ~s("hell" <> ...) + # non printable strings aren't affected by printable limit + assert inspect(<<0,1,2,3,4>>, printable_limit: 3) == ~s(<<0, 1, 2, 3, 4>>) + end end defmodule Inspect.NumberTest do use ExUnit.Case, async: true - test :integer do + test "integer" do assert inspect(100) == "100" end - test :float do + test "decimal" do + assert inspect(100, base: :decimal) == "100" + end + + test "hex" do + assert inspect(100, base: :hex) == "0x64" + end + + test "octal" do + assert inspect(100, base: :octal) == "0o144" + end + + test "binary" do + assert inspect(86, base: :binary) == "0b1010110" + end + + test "float" do assert inspect(1.0) == "1.0" assert inspect(1.0E10) == "1.0e10" assert inspect(1.0e10) == "1.0e10" assert inspect(1.0e-10) == "1.0e-10" end + + test "integer colors" do + opts = [syntax_colors: [number: :red]] + assert inspect(123, opts) == "\e[31m123\e[0m" + opts = [syntax_colors: [reset: :cyan]] + assert inspect(123, opts) == "123" + end + + test "float colors" do + opts = [syntax_colors: [number: :red]] + assert inspect(1.3, opts) == "\e[31m1.3\e[0m" + opts = [syntax_colors: [reset: :cyan]] + assert inspect(1.3, opts) == "1.3" + end end defmodule Inspect.TupleTest do use ExUnit.Case - test :basic do + test "basic" do assert inspect({1, "b", 3}) == "{1, \"b\", 3}" assert inspect({1, "b", 3}, [pretty: true, width: 1]) == "{1,\n \"b\",\n 3}" end - test :empty do + test "empty" do assert inspect({}) == "{}" end - test :with_limit do + test "with limit" do assert inspect({1, 2, 3, 4}, limit: 3) == "{1, 2, 3, ...}" end + + test "colors" do + opts = [syntax_colors: []] + assert inspect({}, opts) == "{}" + + opts = [syntax_colors: [reset: :cyan]] + assert inspect({}, opts) == "{}" + assert inspect({:x, :y}, opts) == "{:x, :y}" + + opts = [syntax_colors: [reset: :cyan, atom: :red]] + assert inspect({}, opts) == "{}" + assert inspect({:x, :y}, opts) == + "{\e[31m:x\e[36m, \e[31m:y\e[36m}" + + opts = [syntax_colors: [tuple: :green, reset: :cyan, atom: :red]] + assert inspect({}, opts) == "\e[32m{\e[36m\e[32m}\e[36m" + assert inspect({:x, :y}, opts) == + "\e[32m{\e[36m" <> + "\e[31m:x\e[36m" <> + "\e[32m,\e[36m " <> + "\e[31m:y\e[36m" <> + "\e[32m}\e[36m" + end end defmodule Inspect.ListTest do use ExUnit.Case, async: true - test :basic do + test "basic" do assert inspect([ 1, "b", 3 ]) == "[1, \"b\", 3]" assert inspect([ 1, "b", 3 ], [pretty: true, width: 1]) == "[1,\n \"b\",\n 3]" end - test :printable do + test "printable" do assert inspect('abc') == "'abc'" end - test :keyword do + test "printable limit" do + assert inspect('hello world', printable_limit: 4) == ~s('hell' ++ ...) + # non printable characters after the limit don't matter + assert inspect('hello world' ++ [0], printable_limit: 4) == ~s('hell' ++ ...) + # non printable strings aren't affected by printable limit + assert inspect([0,1,2,3,4], printable_limit: 3) == ~s([0, 1, 2, 3, 4]) + end + + test "keyword" do assert inspect([a: 1]) == "[a: 1]" assert inspect([a: 1, b: 2]) == "[a: 1, b: 2]" assert inspect([a: 1, a: 2, b: 2]) == "[a: 1, a: 2, b: 2]" assert inspect(["123": 1]) == ~s(["123": 1]) - assert inspect([foo: [1,2,3,:bar], bazzz: :bat], [pretty: true, width: 30]) == + assert inspect([foo: [1, 2, 3, :bar], bazzz: :bat], [pretty: true, width: 30]) == "[foo: [1, 2, 3, :bar],\n bazzz: :bat]" end - test :opt_infer do - assert inspect('eric' ++ [0] ++ 'mj', char_lists: :infer) == "[101, 114, 105, 99, 0, 109, 106]" - assert inspect('eric', char_lists: :infer) == "'eric'" - assert inspect([0], char_lists: :infer) == "[0]" + test "opt infer" do + assert inspect('john' ++ [0] ++ 'doe', charlists: :infer) == "[106, 111, 104, 110, 0, 100, 111, 101]" + assert inspect('john', charlists: :infer) == "'john'" + assert inspect([0], charlists: :infer) == "[0]" end - test :opt_as_strings do - assert inspect('eric' ++ [0] ++ 'mj', char_lists: :as_char_lists) == "'eric\\000mj'" - assert inspect('eric', char_lists: :as_char_lists) == "'eric'" - assert inspect([0], char_lists: :as_char_lists) == "'\\000'" + test "opt as strings" do + assert inspect('john' ++ [0] ++ 'doe', charlists: :as_charlists) == "'john\\0doe'" + assert inspect('john', charlists: :as_charlists) == "'john'" + assert inspect([0], charlists: :as_charlists) == "'\\0'" end - test :opt_as_lists do - assert inspect('eric' ++ [0] ++ 'mj', char_lists: :as_lists) == "[101, 114, 105, 99, 0, 109, 106]" - assert inspect('eric', char_lists: :as_lists) == "[101, 114, 105, 99]" - assert inspect([0], char_lists: :as_lists) == "[0]" + test "opt as lists" do + assert inspect('john' ++ [0] ++ 'doe', charlists: :as_lists) == "[106, 111, 104, 110, 0, 100, 111, 101]" + assert inspect('john', charlists: :as_lists) == "[106, 111, 104, 110]" + assert inspect([0], charlists: :as_lists) == "[0]" end - test :non_printable do + test "non printable" do assert inspect([{:b, 1}, {:a, 1}]) == "[b: 1, a: 1]" end - test :unproper do + test "improper" do assert inspect([:foo | :bar]) == "[:foo | :bar]" - assert inspect([1,2,3,4,5|42], [pretty: true, width: 1]) == "[1,\n 2,\n 3,\n 4,\n 5 |\n 42]" + assert inspect([1, 2, 3, 4, 5 | 42], [pretty: true, width: 1]) == "[1,\n 2,\n 3,\n 4,\n 5 |\n 42]" end - test :codepoints do + test "nested" do + assert inspect(Enum.reduce(1..100, [0], &[&2, Integer.to_string(&1)]), [limit: 5]) == + "[[[[[[...], ...], \"97\"], \"98\"], \"99\"], \"100\"]" + assert inspect(Enum.reduce(1..100, [0], &[&2 | Integer.to_string(&1)]), [limit: 5]) == + "[[[[[[...] | \"96\"] | \"97\"] | \"98\"] | \"99\"] | \"100\"]" + end + + test "codepoints" do assert inspect('é') == "[233]" end - test :empty do + test "empty" do assert inspect([]) == "[]" end - test :with_limit do + test "with limit" do assert inspect([ 1, 2, 3, 4 ], limit: 3) == "[1, 2, 3, ...]" end + + test "colors" do + opts = [syntax_colors: []] + assert inspect([], opts) == "[]" + + opts = [syntax_colors: [reset: :cyan]] + assert inspect([], opts) == "[]" + assert inspect([:x, :y], opts) == + "[:x, :y]" + + opts = [syntax_colors: [reset: :cyan, atom: :red]] + assert inspect([], opts) == "[]" + assert inspect([:x, :y], opts) == + "[\e[31m:x\e[36m, \e[31m:y\e[36m]" + + opts = [syntax_colors: [reset: :cyan, atom: :red, list: :green]] + assert inspect([], opts) == "\e[32m[]\e[36m" + assert inspect([:x, :y], opts) == + "\e[32m[\e[36m" <> + "\e[31m:x\e[36m" <> + "\e[32m,\e[36m " <> + "\e[31m:y\e[36m" <> + "\e[32m]\e[36m" + end + + test "keyword with colors" do + opts = [syntax_colors: [reset: :cyan, list: :green, number: :blue]] + assert inspect([], opts) == "\e[32m[]\e[36m" + assert inspect([a: 9999], opts) == + "\e[32m[\e[36m" <> + "a: " <> + "\e[34m9999\e[36m" <> + "\e[32m]\e[36m" + + opts = [syntax_colors: [reset: :cyan, atom: :red, list: :green, number: :blue]] + assert inspect([], opts) == "\e[32m[]\e[36m" + assert inspect([a: 9999], opts) == + "\e[32m[\e[36m" <> + "\e[31ma: \e[36m" <> + "\e[34m9999\e[36m" <> + "\e[32m]\e[36m" + end + + test "limit with colors" do + opts = [limit: 1, syntax_colors: [reset: :cyan, list: :green, atom: :red]] + assert inspect([], opts) == "\e[32m[]\e[36m" + assert inspect([:x, :y], opts) == + "\e[32m[\e[36m" <> + "\e[31m:x\e[36m" <> + "\e[32m,\e[36m " <> + "..." <> + "\e[32m]\e[36m" + end end defmodule Inspect.MapTest do use ExUnit.Case - test :basic do + test "basic" do assert inspect(%{1 => "b"}) == "%{1 => \"b\"}" assert inspect(%{1 => "b", 2 => "c"}, [pretty: true, width: 1]) == "%{1 => \"b\",\n 2 => \"c\"}" end - test :keyword do + test "keyword" do assert inspect(%{a: 1}) == "%{a: 1}" assert inspect(%{a: 1, b: 2}) == "%{a: 1, b: 2}" assert inspect(%{a: 1, b: 2, c: 3}) == "%{a: 1, b: 2, c: 3}" end - test :with_limit do + test "with limit" do assert inspect(%{1 => 1, 2 => 2, 3 => 3, 4 => 4}, limit: 3) == "%{1 => 1, 2 => 2, 3 => 3, ...}" end defmodule Public do - def __struct__ do - %{key: 0, __struct__: Public} - end + defstruct key: 0 end defmodule Private do end - test :public_struct do + test "public struct" do assert inspect(%Public{key: 1}) == "%Inspect.MapTest.Public{key: 1}" end - test :public_modified_struct do + test "public modified struct" do public = %Public{key: 1} assert inspect(Map.put(public, :foo, :bar)) == "%{__struct__: Inspect.MapTest.Public, foo: :bar, key: 1}" end - test :private_struct do + test "private struct" do assert inspect(%{__struct__: Private, key: 1}) == "%{__struct__: Inspect.MapTest.Private, key: 1}" end defmodule Failing do - def __struct__ do - %{key: 0} - end + defstruct key: 0 defimpl Inspect do - def inspect(_, _) do - raise "failing" + def inspect(struct, _) do + struct.unknown end end end - test :bad_implementation do - msg = "Got RuntimeError with message \"failing\" " <> - "while inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}" + test "bad implementation unsafe" do + msg = "got KeyError with message \"key :unknown not found in: " <> + "%{__struct__: Inspect.MapTest.Failing, key: 0}\" while " <> + "inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}" - assert_raise ArgumentError, msg, fn -> - inspect(%Failing{}) + assert_raise Inspect.Error, msg, fn -> + inspect(%Failing{}, safe: false) end + + assert [{Inspect.Inspect.MapTest.Failing, :inspect, 2, _} | _] = System.stacktrace end - test :exception do + test "bad implementation safe" do + msg = "got KeyError with message \"key :unknown not found in: " <> + "%{__struct__: Inspect.MapTest.Failing, key: 0}\" while " <> + "inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}" + + assert inspect(%Failing{}) == + inspect(%Inspect.Error{message: "#{msg}"}) + end + + test "exception" do assert inspect(%RuntimeError{message: "runtime error"}) == "%RuntimeError{message: \"runtime error\"}" end + + test "colors" do + opts = [syntax_colors: [reset: :cyan, atom: :red, number: :magenta]] + assert inspect(%{1 => 2}, opts) == + "%{\e[35m1\e[36m => \e[35m2\e[36m}" + + assert inspect(%{a: 1}, opts) == + "%{\e[31ma: \e[36m\e[35m1\e[36m}" + + assert inspect(%Public{key: 1}, opts) == + "%Inspect.MapTest.Public{\e[31mkey: \e[36m\e[35m1\e[36m}" + + opts = [syntax_colors: [reset: :cyan, atom: :red, map: :green, number: :blue]] + assert inspect(%{a: 9999}, opts) == + "\e[32m%{\e[36m" <> + "\e[31ma: \e[36m" <> + "\e[34m9999\e[36m" <> + "\e[32m}\e[36m" + end end defmodule Inspect.OthersTest do use ExUnit.Case, async: true - def f do + def fun() do + fn() -> :ok end + end + + def unquote(:"weirdly named/fun-")() do fn() -> :ok end end - test :external_elixir_funs do + test "external Elixir funs" do bin = inspect(&Enum.map/2) assert bin == "&Enum.map/2" + + assert inspect(&__MODULE__."weirdly named/fun-"/0) == + ~s(&Inspect.OthersTest."weirdly named/fun-"/0) end - test :external_erlang_funs do + test "external Erlang funs" do bin = inspect(&:lists.map/2) assert bin == "&:lists.map/2" end - test :outdated_functions do + test "outdated functions" do defmodule V do def fun do fn -> 1 end @@ -317,27 +524,44 @@ defmodule Inspect.OthersTest do Application.delete_env(:elixir, :named) end - test :other_funs do + test "other funs" do assert "#Function<" <> _ = inspect(fn(x) -> x + 1 end) - assert "#Function<" <> _ = inspect(f) + assert "#Function<" <> _ = inspect(fun()) + opts = [syntax_colors: []] + assert "#Function<" <> _ = inspect(fun(), opts) + opts = [syntax_colors: [reset: :red]] + assert "#Function<" <> rest = inspect(fun(), opts) + assert String.ends_with?(rest, ">") + + inspected = inspect(__MODULE__."weirdly named/fun-"()) + assert inspected =~ ~r(#Function<\d+\.\d+/0 in Inspect\.OthersTest\."weirdly named/fun-"/0>) end - test :hash_dict_set do - assert "#HashDict<" <> _ = inspect(HashDict.new) - assert "#HashSet<" <> _ = inspect(HashSet.new) + test "map set" do + assert "#MapSet<" <> _ = inspect(MapSet.new) end - test :pids do - assert "#PID<" <> _ = inspect(self) + test "PIDs" do + assert "#PID<" <> _ = inspect(self()) + opts = [syntax_colors: []] + assert "#PID<" <> _ = inspect(self(), opts) + opts = [syntax_colors: [reset: :cyan]] + assert "#PID<" <> rest = inspect(self(), opts) + assert String.ends_with?(rest, ">") end - test :references do - assert "#Reference<" <> _ = inspect(make_ref) + test "references" do + assert "#Reference<" <> _ = inspect(make_ref()) end - test :regex do - "~r/foo/m" = inspect(~r(foo)m) - "~r/\\a\\010\\177\\033\\f\\n\\r \\t\\v\\//" = inspect(Regex.compile!("\a\b\d\e\f\n\r\s\t\v/")) - "~r/\\a\\b\\d\\e\\f\\n\\r\\s\\t\\v\\//" = inspect(~r<\a\b\d\e\f\n\r\s\t\v/>) + test "regex" do + assert inspect(~r(foo)m) == "~r/foo/m" + assert inspect(Regex.compile!("\a\b\d\e\f\n\r\s\t\v/")) == + "~r/\\a\\x08\\x7F\\x1B\\f\\n\\r \\t\\v\\//" + assert inspect(~r<\a\b\d\e\f\n\r\s\t\v/>) == + "~r/\\a\\b\\d\\e\\f\\n\\r\\s\\t\\v\\//" + opts = [syntax_colors: [regex: :red]] + assert inspect(~r/hi/, opts) == + "\e[31m~r/hi/\e[0m" end end diff --git a/lib/elixir/test/elixir/integer_test.exs b/lib/elixir/test/elixir/integer_test.exs index 547c71567a0..c488bb57640 100644 --- a/lib/elixir/test/elixir/integer_test.exs +++ b/lib/elixir/test/elixir/integer_test.exs @@ -2,30 +2,129 @@ Code.require_file "test_helper.exs", __DIR__ defmodule IntegerTest do use ExUnit.Case, async: true + + doctest Integer + require Integer - test :odd? do - assert Integer.odd?(0) == false - assert Integer.odd?(1) == true - assert Integer.odd?(2) == false - assert Integer.odd?(3) == true - assert Integer.odd?(-1) == true - assert Integer.odd?(-2) == false - assert Integer.odd?(-3) == true + def test_is_odd_in_guards(number) when Integer.is_odd(number), + do: number + def test_is_odd_in_guards(_number), + do: false + + def test_is_even_in_guards(number) when Integer.is_even(number), + do: number + def test_is_even_in_guards(_number), + do: false + + test "is_odd/1" do + assert Integer.is_odd(0) == false + assert Integer.is_odd(1) == true + assert Integer.is_odd(2) == false + assert Integer.is_odd(3) == true + assert Integer.is_odd(-1) == true + assert Integer.is_odd(-2) == false + assert Integer.is_odd(-3) == true + assert test_is_odd_in_guards(10) == false + assert test_is_odd_in_guards(11) == 11 end - test :even? do - assert Integer.even?(0) == true - assert Integer.even?(1) == false - assert Integer.even?(2) == true - assert Integer.even?(3) == false - assert Integer.even?(-1) == false - assert Integer.even?(-2) == true - assert Integer.even?(-3) == false + test "is_even/1" do + assert Integer.is_even(0) == true + assert Integer.is_even(1) == false + assert Integer.is_even(2) == true + assert Integer.is_even(3) == false + assert Integer.is_even(-1) == false + assert Integer.is_even(-2) == true + assert Integer.is_even(-3) == false + assert test_is_even_in_guards(10) == 10 + assert test_is_even_in_guards(11) == false end - test :parse do + test "mod/2" do + assert Integer.mod(3, 2) == 1 + assert Integer.mod(0, 10) == 0 + assert Integer.mod(30000, 2001) == 1986 + assert Integer.mod(-20, 11) == 2 + end + + test "mod/2 raises ArithmeticError when divisor is 0" do + assert_raise ArithmeticError, fn -> Integer.mod(3, 0) end + assert_raise ArithmeticError, fn -> Integer.mod(-50, 0) end + end + + test "mod/2 raises ArithmeticError when non-integers used as arguments" do + assert_raise ArithmeticError, fn -> Integer.mod(3.0, 2) end + assert_raise ArithmeticError, fn -> Integer.mod(20, 1.2) end + end + + test "floor_div/2" do + assert Integer.floor_div(3, 2) == 1 + assert Integer.floor_div(0, 10) == 0 + assert Integer.floor_div(30000, 2001) == 14 + assert Integer.floor_div(-20, 11) == -2 + end + + test "floor_div/2 raises ArithmeticError when divisor is 0" do + assert_raise ArithmeticError, fn -> Integer.floor_div(3, 0) end + assert_raise ArithmeticError, fn -> Integer.floor_div(-50, 0) end + end + + test "floor_div/2 raises ArithmeticError when non-integers used as arguments" do + assert_raise ArithmeticError, fn -> Integer.floor_div(3.0, 2) end + assert_raise ArithmeticError, fn -> Integer.floor_div(20, 1.2) end + end + + test "digits/2" do + assert Integer.digits(0) == [0] + assert Integer.digits(0, 2) == [0] + assert Integer.digits(1) == [1] + assert Integer.digits(-1) == [-1] + assert Integer.digits(123, 123) == [1, 0] + assert Integer.digits(-123, 123) == [-1, 0] + assert Integer.digits(456, 1000) == [456] + assert Integer.digits(-456, 1000) == [-456] + assert Integer.digits(123) == [1, 2, 3] + assert Integer.digits(-123) == [-1, -2, -3] + assert Integer.digits(58127, 2) == [1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1] + assert Integer.digits(-58127, 2) == [-1, -1, -1, 0, 0, 0, -1, -1, 0, 0, 0, 0, -1, -1, -1, -1] + + for n <- Enum.to_list(-1..1) do + assert_raise FunctionClauseError, fn -> + Integer.digits(10, n) + Integer.digits(-10, n) + end + end + end + + test "undigits/2" do + assert Integer.undigits([]) == 0 + assert Integer.undigits([0]) == 0 + assert Integer.undigits([1]) == 1 + assert Integer.undigits([1, 0, 1]) == 101 + assert Integer.undigits([1, 4], 16) == 0x14 + assert Integer.undigits([1, 4], 8) == 0o14 + assert Integer.undigits([1, 1], 2) == 0b11 + assert Integer.undigits([1, 2, 3, 4, 5]) == 12345 + assert Integer.undigits([1, 0, -5]) == 95 + assert Integer.undigits([-1, -1, -5]) == -115 + assert Integer.undigits([0, 0, 0, -1, -1, -5]) == -115 + + for n <- Enum.to_list(-1..1) do + assert_raise FunctionClauseError, fn -> + Integer.undigits([1, 0, 1], n) + end + end + + assert_raise ArgumentError, "invalid digit 17 in base 16", fn -> + Integer.undigits([1, 2, 17], 16) + end + end + + test "parse/2" do assert Integer.parse("12") === {12, ""} + assert Integer.parse("012") === {12, ""} + assert Integer.parse("+12") === {12, ""} assert Integer.parse("-12") === {-12, ""} assert Integer.parse("123456789") === {123456789, ""} assert Integer.parse("12.5") === {12, ".5"} @@ -35,5 +134,126 @@ defmodule IntegerTest do assert Integer.parse("--1") === :error assert Integer.parse("+-1") === :error assert Integer.parse("three") === :error + + assert Integer.parse("12", 10) === {12, ""} + assert Integer.parse("-12", 12) === {-14, ""} + assert Integer.parse("12345678", 9) === {6053444, ""} + assert Integer.parse("3.14", 4) === {3, ".14"} + assert Integer.parse("64eb", 16) === {25835, ""} + assert Integer.parse("64eb", 10) === {64, "eb"} + assert Integer.parse("10", 2) === {2, ""} + assert Integer.parse("++4", 10) === :error + + # Base should be in range 2..36 + assert_raise ArgumentError, "invalid base 1", fn -> Integer.parse("2", 1) end + assert_raise ArgumentError, "invalid base 37", fn -> Integer.parse("2", 37) end + + # Base should be an integer + assert_raise ArgumentError, "invalid base 10.2", fn -> Integer.parse("2", 10.2) end + + assert_raise ArgumentError, "invalid base nil", fn -> Integer.parse("2", nil) end + end + + test "to_string/1" do + assert Integer.to_string(42) == "42" + assert Integer.to_string(+42) == "42" + assert Integer.to_string(-42) == "-42" + assert Integer.to_string(-0001) == "-1" + + for n <- [42.0, :forty_two, '42', "42"] do + assert_raise ArgumentError, fn -> + Integer.to_string(n) + end + end + end + + test "to_string/2" do + assert Integer.to_string(42, 2) == "101010" + assert Integer.to_string(42, 10) == "42" + assert Integer.to_string(42, 16) == "2A" + assert Integer.to_string(+42, 16) == "2A" + assert Integer.to_string(-42, 16) == "-2A" + assert Integer.to_string(-042, 16) == "-2A" + + for n <- [42.0, :forty_two, '42', "42"] do + assert_raise ArgumentError, fn -> + Integer.to_string(n, 42) + end + end + + for n <- [-1, 0, 1, 37] do + assert_raise ArgumentError, fn -> + Integer.to_string(42, n) + end + + assert_raise ArgumentError, fn -> + Integer.to_string(n, n) + end + end + end + + test "to_charlist/1" do + assert Integer.to_charlist(42) == '42' + assert Integer.to_charlist(+42) == '42' + assert Integer.to_charlist(-42) == '-42' + assert Integer.to_charlist(-0001) == '-1' + + for n <- [42.0, :forty_two, '42', "42"] do + assert_raise ArgumentError, fn -> + Integer.to_charlist(n) + end + end + end + + test "to_char_list/1" do + module = Integer + assert module.to_char_list(42) == '42' + end + + test "to_charlist/2" do + assert Integer.to_charlist(42, 2) == '101010' + assert Integer.to_charlist(42, 10) == '42' + assert Integer.to_charlist(42, 16) == '2A' + assert Integer.to_charlist(+42, 16) == '2A' + assert Integer.to_charlist(-42, 16) == '-2A' + assert Integer.to_charlist(-042, 16) == '-2A' + + for n <- [42.0, :forty_two, '42', "42"] do + assert_raise ArgumentError, fn -> + Integer.to_charlist(n, 42) + end + end + + for n <- [-1, 0, 1, 37] do + assert_raise ArgumentError, fn -> + Integer.to_charlist(42, n) + end + + assert_raise ArgumentError, fn -> + Integer.to_charlist(n, n) + end + end + end + + test "to_char_list/2" do + module = Integer + assert module.to_char_list(42, 2) == '101010' + end + + test "gcd/2" do + assert Integer.gcd(1, 5) == 1 + assert Integer.gcd(2, 3) == 1 + assert Integer.gcd(8, 12) == 4 + assert Integer.gcd(-8, 12) == 4 + assert Integer.gcd(8, -12) == 4 + assert Integer.gcd(-8, -12) == 4 + assert Integer.gcd(27, 27) == 27 + assert Integer.gcd(-27, -27) == 27 + assert Integer.gcd(-27, 27) == 27 + assert Integer.gcd(0, 3) == 3 + assert Integer.gcd(0, -3) == 3 + assert Integer.gcd(3, 0) == 3 + assert Integer.gcd(-3, 0) == 3 + assert Integer.gcd(0, 0) == 0 end end diff --git a/lib/elixir/test/elixir/io/ansi/docs_test.exs b/lib/elixir/test/elixir/io/ansi/docs_test.exs index 66173bc3b5c..7afd5ff25d7 100644 --- a/lib/elixir/test/elixir/io/ansi/docs_test.exs +++ b/lib/elixir/test/elixir/io/ansi/docs_test.exs @@ -5,88 +5,92 @@ defmodule IO.ANSI.DocsTest do import ExUnit.CaptureIO def format_heading(str) do - capture_io(fn -> IO.ANSI.Docs.print_heading(str, []) end) |> String.strip + capture_io(fn -> IO.ANSI.Docs.print_heading(str, []) end) |> String.trim_trailing end def format(str) do - capture_io(fn -> IO.ANSI.Docs.print(str, []) end) |> String.strip + capture_io(fn -> IO.ANSI.Docs.print(str, []) end) |> String.trim_trailing end test "heading is formatted" do result = format_heading("wibble") - assert String.starts_with?(result, "\e[0m\n\e[7m\e[33m\e[1m") + assert String.starts_with?(result, "\e[0m\n\e[7m\e[33m") assert String.ends_with?(result, "\e[0m\n\e[0m") assert String.contains?(result, " wibble ") end test "first level heading is converted" do result = format("# wibble\n\ntext\n") - assert result == "\e[33m\e[1mWIBBLE\e[0m\n\e[0m\ntext\n\e[0m" + assert result == "\e[33m# wibble\e[0m\n\e[0m\ntext\n\e[0m" end test "second level heading is converted" do result = format("## wibble\n\ntext\n") - assert result == "\e[33m\e[1mwibble\e[0m\n\e[0m\ntext\n\e[0m" + assert result == "\e[33m## wibble\e[0m\n\e[0m\ntext\n\e[0m" end test "third level heading is converted" do - result = format("## wibble\n\ntext\n") - assert result == "\e[33m\e[1mwibble\e[0m\n\e[0m\ntext\n\e[0m" + result = format("### wibble\n\ntext\n") + assert result == "\e[33m### wibble\e[0m\n\e[0m\ntext\n\e[0m" end test "code block is converted" do result = format("line\n\n code\n code2\n\nline2\n") - assert result == "line\n\e[0m\n\e[36m\e[1m┃ code\n┃ code2\e[0m\n\e[0m\nline2\n\e[0m" + assert result == "line\n\e[0m\n\e[36m code\n code2\e[0m\n\e[0m\nline2\n\e[0m" + end + + test "fenced code block is converted" do + result = format("line\n```\ncode\ncode2\n```\nline2\n") + assert result == "line\n\e[0m\n\e[36m code\n code2\e[0m\n\e[0m\nline2\n\e[0m" + result = format("line\n```elixir\ncode\ncode2\n```\nline2\n") + assert result == "line\n\e[0m\n\e[36m code\n code2\e[0m\n\e[0m\nline2\n\e[0m" + result = format("line\n~~~elixir\ncode\n```\n~~~\nline2\n") + assert result == "line\n\e[0m\n\e[36m code\n ```\e[0m\n\e[0m\nline2\n\e[0m" end test "* list is converted" do result = format("* one\n* two\n* three\n") - assert result == "• one\n• two\n• three\n\e[0m" + assert result == " • one\n • two\n • three\n\e[0m" end test "* list surrounded by text is converted" do result = format("Count:\n\n* one\n* two\n* three\n\nDone") - assert result == "Count:\n\e[0m\n• one\n• two\n• three\n\e[0m\nDone\n\e[0m" + assert result == "Count:\n\e[0m\n • one\n • two\n • three\n\e[0m\nDone\n\e[0m" end test "* list with continuation is converted" do - result = format("* one\n two\n three\n* four") - assert result == "• one two three\n• four" + result = format("* one\ntwo\n\n three\nfour\n* five") + assert result == " • one two\n three four\n\e[0m\n • five\n\e[0m" end test "* nested lists are converted" do result = format("* one\n * one.one\n * one.two\n* two") - assert result == "• one\n • one.one\n • one.two\n• two" + assert result == " • one\n • one.one\n • one.two\n\e[0m\n • two\n\e[0m" end test "* lists with spaces are converted" do result = format(" * one\n * two\n * three") - assert result == "• one\n• two\n• three" - end - - test "- list is converted" do - result = format("- one\n- two\n- three\n") - assert result == "• one\n• two\n• three\n\e[0m" + assert result == " • one\n • two\n • three\n\e[0m" end - test "- list surrounded by text is converted" do - result = format("Count:\n\n- one\n- two\n- three\n\nDone") - assert result == "Count:\n\e[0m\n• one\n• two\n• three\n\e[0m\nDone\n\e[0m" + test "* lists with code" do + result = format(" * one\n two three") + assert result == " • one\n\e[36m two three\e[0m\n\e[0m\n\e[0m" end - test "- list with continuation is converted" do - result = format("- one\n two\n three\n- four") - assert result == "• one two three\n• four" + test "- list is converted" do + result = format("- one\n- two\n- three\n") + assert result == " • one\n • two\n • three\n\e[0m" end test "+ list is converted" do result = format("+ one\n+ two\n+ three\n") - assert result == "• one\n• two\n• three\n\e[0m" + assert result == " • one\n • two\n • three\n\e[0m" end test "+ and - nested lists are converted" do result = format("- one\n + one.one\n + one.two\n- two") - assert result == "• one\n • one.one\n • one.two\n• two" + assert result == " • one\n • one.one\n • one.two\n\e[0m\n • two\n\e[0m" end test "paragraphs are split" do @@ -101,13 +105,16 @@ defmodule IO.ANSI.DocsTest do test "extra whitespace doesn't mess up a following list" do result = format("para1\n \n* one\n* two") - assert result == "para1\n\e[0m\n• one\n• two" + assert result == "para1\n\e[0m\n • one\n • two\n\e[0m" end test "star/underscore/backtick works" do result = format("*world*") assert result == "\e[1mworld\e[0m\n\e[0m" + result = format("*world*.") + assert result == "\e[1mworld\e[0m.\n\e[0m" + result = format("**world**") assert result == "\e[1mworld\e[0m\n\e[0m" @@ -118,7 +125,7 @@ defmodule IO.ANSI.DocsTest do assert result == "\e[36mworld\e[0m\n\e[0m" end - test "star/underscore/backtick works accross words" do + test "star/underscore/backtick works across words" do result = format("*hello world*") assert result == "\e[1mhello world\e[0m\n\e[0m" @@ -132,7 +139,29 @@ defmodule IO.ANSI.DocsTest do assert result == "\e[36mhello world\e[0m\n\e[0m" end - test "star/underscore preceeded by space doesn't get interpreted" do + test "multiple stars/underscores/backticks work" do + result = format("*hello world* *hello world*") + assert result == "\e[1mhello world\e[0m \e[1mhello world\e[0m\n\e[0m" + + result = format("_hello world_ _hello world_") + assert result == "\e[4mhello world\e[0m \e[4mhello world\e[0m\n\e[0m" + + result = format("`hello world` `hello world`") + assert result == "\e[36mhello world\e[0m \e[36mhello world\e[0m\n\e[0m" + end + + test "multiple stars/underscores/backticks work when separated by other words" do + result = format("*hello world* unit test *hello world*") + assert result == "\e[1mhello world\e[0m unit test \e[1mhello world\e[0m\n\e[0m" + + result = format("_hello world_ unit test _hello world_") + assert result == "\e[4mhello world\e[0m unit test \e[4mhello world\e[0m\n\e[0m" + + result = format("`hello world` unit test `hello world`") + assert result == "\e[36mhello world\e[0m unit test \e[36mhello world\e[0m\n\e[0m" + end + + test "star/underscore preceded by space doesn't get interpreted" do result = format("_unit _size") assert result == "_unit _size\n\e[0m" @@ -143,7 +172,44 @@ defmodule IO.ANSI.DocsTest do assert result == "*unit *size\n\e[0m" end - test "backtick preceeded by space gets interpreted" do + test "star/underscore/backtick preceded by non-space delimiters gets interpreted" do + result = format("(`hello world`)") + assert result == "(\e[36mhello world\e[0m)\n\e[0m" + result = format("<`hello world`>") + assert result == "<\e[36mhello world\e[0m>\n\e[0m" + + result = format("(*hello world*)") + assert result == "(\e[1mhello world\e[0m)\n\e[0m" + result = format("@*hello world*@") + assert result == "@\e[1mhello world\e[0m@\n\e[0m" + + result = format("(_hello world_)") + assert result == "(\e[4mhello world\e[0m)\n\e[0m" + result = format("'_hello world_'") + assert result == "'\e[4mhello world\e[0m'\n\e[0m" + end + + test "star/underscore/backtick starts/ends within a word doesn't get interpreted" do + result = format("foo_bar, foo_bar_baz!") + assert result == "foo_bar, foo_bar_baz!\n\e[0m" + + result = format("_foo_bar") + assert result == "_foo_bar\n\e[0m" + + result = format("foo_bar_") + assert result == "foo_bar_\n\e[0m" + + result = format("foo*bar, foo*bar*baz!") + assert result == "foo*bar, foo*bar*baz!\n\e[0m" + + result = format("*foo*bar") + assert result == "*foo*bar\n\e[0m" + + result = format("foo*bar*") + assert result == "foo*bar*\n\e[0m" + end + + test "backtick preceded by space gets interpreted" do result = format("`unit `size") assert result == "\e[36munit \e[0msize\n\e[0m" end @@ -205,15 +271,50 @@ defmodule IO.ANSI.DocsTest do assert result == "\e[36m__world__\e[0m\n\e[0m" end - test "backtick works inside parenthesis" do - result = format("(`hello world`)") - assert result == "(\e[36mhello world\e[0m)\n\e[0m" + test "escaping of underlines within links" do + result = format("(https://en.wikipedia.org/wiki/ANSI_escape_code)") + assert result == "(https://en.wikipedia.org/wiki/ANSI_escape_code)\n\e[0m" + result = format("[ANSI escape code](https://en.wikipedia.org/wiki/ANSI_escape_code)") + assert result == "ANSI escape code (https://en.wikipedia.org/wiki/ANSI_escape_code)\n\e[0m" end - test "escaping of underlines within links" do - result = format("(http://en.wikipedia.org/wiki/ANSI_escape_code)") - assert result == "(http://en.wikipedia.org/wiki/ANSI_escape_code)\n\e[0m" - result = format("[ANSI escape code](http://en.wikipedia.org/wiki/ANSI_escape_code)") - assert result == "ANSI escape code (http://en.wikipedia.org/wiki/ANSI_escape_code)\n\e[0m" + test "escaping of underlines within links does not escape surrounding text" do + result = format("_emphasis_ (https://en.wikipedia.org/wiki/ANSI_escape_code) more _emphasis_") + assert result == "\e[4memphasis\e[0m (https://en.wikipedia.org/wiki/ANSI_escape_code) more \e[4memphasis\e[0m\n\e[0m" + end + + test "lone thing that looks like a table line isn't" do + assert format("one\n2 | 3\ntwo\n") == + "one 2 | 3 two\n\e[0m" + end + + test "lone table line at end of input isn't" do + assert format("one\n2 | 3") == + "one 2 | 3\n\e[0m" + end + + test "two successive table lines are a table" do + assert format("a | b\none | two\n") == + "a | b \none | two\n\e[0m" # note spacing + end + + test "table with heading" do + assert format("column 1 | and 2\n-- | --\na | b\none | two\n") == + "\e[7mcolumn 1 | and 2\e[0m\na | b \none | two \n\e[0m" + end + + test "table with formatting in cells" do + assert format("`a` | _b_\nc | d") == + "\e[36ma\e[0m | \e[4mb\e[0m\nc | d\n\e[0m" + end + + test "table with variable number of columns" do + assert format("a | b | c\nd | e") == + "a | b | c\nd | e | \n\e[0m" + end + + test "one reference link label per line" do + assert format(" [id]: //example.com\n [Elixir]: http://elixir-lang.org") == + " [id]: //example.com\n [Elixir]: http://elixir-lang.org" end end diff --git a/lib/elixir/test/elixir/io/ansi_test.exs b/lib/elixir/test/elixir/io/ansi_test.exs index f05a0679985..4d3a5da2542 100644 --- a/lib/elixir/test/elixir/io/ansi_test.exs +++ b/lib/elixir/test/elixir/io/ansi_test.exs @@ -3,52 +3,153 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule IO.ANSITest do use ExUnit.Case, async: true - test :escape_single do - assert IO.ANSI.escape("Hello, %{red}world!", true) == - "Hello, #{IO.ANSI.red}world!#{IO.ANSI.reset}" - assert IO.ANSI.escape("Hello, %{red}world!", true) == - "Hello, #{IO.ANSI.red}world!#{IO.ANSI.reset}" + doctest IO.ANSI + + test "format ansicode" do + assert IO.chardata_to_string(IO.ANSI.format(:green, true)) == + "#{IO.ANSI.green}#{IO.ANSI.reset}" + assert IO.chardata_to_string(IO.ANSI.format(:green, false)) == + "" + end + + test "format binary" do + assert IO.chardata_to_string(IO.ANSI.format("Hello, world!", true)) == + "Hello, world!" + assert IO.chardata_to_string(IO.ANSI.format("A map: %{foo: :bar}", false)) == + "A map: %{foo: :bar}" end - test :escape_non_attribute do - assert IO.ANSI.escape("Hello %{clear}world!", true) == - "Hello #{IO.ANSI.clear}world!#{IO.ANSI.reset}" - assert IO.ANSI.escape("Hello %{home}world!", true) == - "Hello #{IO.ANSI.home}world!#{IO.ANSI.reset}" + test "format empty list" do + assert IO.chardata_to_string(IO.ANSI.format([], true)) == + "" + assert IO.chardata_to_string(IO.ANSI.format([], false)) == + "" end - test :escape_multiple do - assert IO.ANSI.escape("Hello, %{red,bright}world!", true) == - "Hello, #{IO.ANSI.red}#{IO.ANSI.bright}world!#{IO.ANSI.reset}" - assert IO.ANSI.escape("Hello, %{red, bright}world!", true) == - "Hello, #{IO.ANSI.red}#{IO.ANSI.bright}world!#{IO.ANSI.reset}" - assert IO.ANSI.escape("Hello, %{red , bright}world!", true) == - "Hello, #{IO.ANSI.red}#{IO.ANSI.bright}world!#{IO.ANSI.reset}" + test "format ansicode list" do + assert IO.chardata_to_string(IO.ANSI.format([:red, :bright], true)) == + "#{IO.ANSI.red}#{IO.ANSI.bright}#{IO.ANSI.reset}" + assert IO.chardata_to_string(IO.ANSI.format([:red, :bright], false)) == + "" end - test :no_emit do - assert IO.ANSI.escape("Hello, %{}world!", false) == + test "format binary list" do + assert IO.chardata_to_string(IO.ANSI.format(["Hello, ", "world!"], true)) == + "Hello, world!" + assert IO.chardata_to_string(IO.ANSI.format(["Hello, ", "world!"], false)) == + "Hello, world!" + end + + test "format charlist" do + assert IO.chardata_to_string(IO.ANSI.format('Hello, world!', true)) == + "Hello, world!" + assert IO.chardata_to_string(IO.ANSI.format('Hello, world!', false)) == "Hello, world!" + end + + test "format mixed list" do + data = ["Hello", ?,, 32, :red, "world!"] - assert IO.ANSI.escape("Hello, %{red,bright}world!", false) == + assert IO.chardata_to_string(IO.ANSI.format(data, true)) == + "Hello, #{IO.ANSI.red}world!#{IO.ANSI.reset}" + assert IO.chardata_to_string(IO.ANSI.format(data, false)) == "Hello, world!" end - test :fragment do - assert IO.ANSI.escape("%{red}", true) == "#{IO.ANSI.red}#{IO.ANSI.reset}" - assert IO.ANSI.escape_fragment("", true) == "" + test "format nested list" do + data = ["Hello, ", ["nested", 32, :red, "world!"]] + + assert IO.chardata_to_string(IO.ANSI.format(data, true)) == + "Hello, nested #{IO.ANSI.red}world!#{IO.ANSI.reset}" + assert IO.chardata_to_string(IO.ANSI.format(data, false)) == + "Hello, nested world!" end - test :noop do - assert IO.ANSI.escape("") == "" + test "format improper list" do + data = ["Hello, ", :red, "world" | "!"] + + assert IO.chardata_to_string(IO.ANSI.format(data, true)) == + "Hello, #{IO.ANSI.red}world!#{IO.ANSI.reset}" + assert IO.chardata_to_string(IO.ANSI.format(data, false)) == + "Hello, world!" + end + + test "format nested improper list" do + data = [["Hello, " | :red], "world!" | :green] + + assert IO.chardata_to_string(IO.ANSI.format(data, true)) == + "Hello, #{IO.ANSI.red}world!#{IO.ANSI.green}#{IO.ANSI.reset}" + assert IO.chardata_to_string(IO.ANSI.format(data, false)) == + "Hello, world!" + end + + test "format fragment" do + assert IO.chardata_to_string(IO.ANSI.format_fragment([:red, "Hello!"], true)) == + "#{IO.ANSI.red}Hello!" + end + + test "format invalid sequence" do + assert_raise ArgumentError, "invalid ANSI sequence specification: :brigh", fn -> + IO.ANSI.format([:brigh, "Hello!"], true) + end + assert_raise ArgumentError, "invalid ANSI sequence specification: nil", fn -> + IO.ANSI.format(["Hello!", nil], true) + end + end + + test "colors" do + assert IO.ANSI.red == "\e[31m" + assert IO.ANSI.light_red == "\e[91m" + + assert IO.ANSI.red_background == "\e[41m" + assert IO.ANSI.light_red_background == "\e[101m" + end + + test "color/1" do + assert IO.ANSI.color(0) == "\e[38;5;0m" + assert IO.ANSI.color(42) == "\e[38;5;42m" + assert IO.ANSI.color(255) == "\e[38;5;255m" + assert_raise FunctionClauseError, fn() -> + IO.ANSI.color(-1) + end + assert_raise FunctionClauseError, fn() -> + IO.ANSI.color(256) + end + end + + test "color/3" do + assert IO.ANSI.color(0, 4, 2) == "\e[38;5;42m" + assert IO.ANSI.color(1, 1, 1) == "\e[38;5;59m" + assert IO.ANSI.color(5, 5, 5) == "\e[38;5;231m" + assert_raise FunctionClauseError, fn() -> + IO.ANSI.color(0, 6, 1) + end + assert_raise FunctionClauseError, fn() -> + IO.ANSI.color(5, -1, 1) + end + end + + test "color_background/1" do + assert IO.ANSI.color_background(0) == "\e[48;5;0m" + assert IO.ANSI.color_background(42) == "\e[48;5;42m" + assert IO.ANSI.color_background(255) == "\e[48;5;255m" + assert_raise FunctionClauseError, fn() -> + IO.ANSI.color_background(-1) + end + assert_raise FunctionClauseError, fn() -> + IO.ANSI.color_background(256) + end end - test :invalid do - assert_raise ArgumentError, "invalid ANSI sequence specification: brigh", fn -> - IO.ANSI.escape("%{brigh}, yes") + test "color_background/3" do + assert IO.ANSI.color_background(0, 4, 2) == "\e[48;5;42m" + assert IO.ANSI.color_background(1, 1, 1) == "\e[48;5;59m" + assert IO.ANSI.color_background(5, 5, 5) == "\e[48;5;231m" + assert_raise FunctionClauseError, fn() -> + IO.ANSI.color_background(0, 6, 1) end - assert_raise ArgumentError, "invalid ANSI sequence specification: brigh", fn -> - IO.ANSI.escape("%{brigh,red}, yes") + assert_raise FunctionClauseError, fn() -> + IO.ANSI.color_background(5, -1, 1) end end end diff --git a/lib/elixir/test/elixir/io_test.exs b/lib/elixir/test/elixir/io_test.exs index 7d46dbbf563..8557116a082 100644 --- a/lib/elixir/test/elixir/io_test.exs +++ b/lib/elixir/test/elixir/io_test.exs @@ -1,28 +1,37 @@ Code.require_file "test_helper.exs", __DIR__ defmodule IOTest do - use ExUnit.Case, async: true + use ExUnit.Case + + doctest IO + import ExUnit.CaptureIO - test :read_with_count do - {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list]) + test "read with count" do + {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:charlist]) assert 'FOO' == IO.read(file, 3) assert File.close(file) == :ok end - test :read_with_utf8_and_binary do + test "read with UTF-8 and binary" do {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8]) assert "Русский" == IO.read(file, 7) assert File.close(file) == :ok end - test :binread do + test "binread" do {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__)) assert "Русский" == IO.binread(file, 14) assert File.close(file) == :ok end - test :getn do + test "binread all" do + {:ok, file} = File.open(Path.expand('fixtures/file.bin', __DIR__)) + assert "LF\nCR\rCRLF\r\nLFCR\n\r" == IO.binread(file, :all) + assert File.close(file) == :ok + end + + test "getn" do {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__)) assert "F" == IO.getn(file, "") assert "O" == IO.getn(file, "") @@ -32,78 +41,147 @@ defmodule IOTest do assert File.close(file) == :ok end - test :getn_with_count do - {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list]) - assert 'FOO' == IO.getn(file, "", 3) + test "getn with count" do + {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:charlist]) + assert 'F' == IO.getn(file, "λ") + assert 'OO' == IO.getn(file, "", 2) + assert '\n' == IO.getn(file, "λ", 99) assert File.close(file) == :ok end - test :getn_with_utf8_and_binary do + test "getn with UTF-8 and binary" do {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8]) assert "Русский" == IO.getn(file, "", 7) assert File.close(file) == :ok end - test :gets do - {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list]) + test "gets" do + {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:charlist]) assert 'FOO\n' == IO.gets(file, "") assert :eof == IO.gets(file, "") assert File.close(file) == :ok end - test :gets_with_utf8_and_binary do + test "gets with UTF-8 and binary" do {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8]) assert "Русский\n" == IO.gets(file, "") assert "日\n" == IO.gets(file, "") assert File.close(file) == :ok end - test :readline do + test "readall" do + {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__)) + assert "FOO\n" == IO.read(file, :all) + assert "" == IO.read(file, :all) + assert File.close(file) == :ok + end + + test "readall with UTF-8 and binary" do + {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8]) + assert "Русский\n日\n" == IO.read(file, :all) + assert "" == IO.read(file, :all) + assert File.close(file) == :ok + end + + test "readline" do {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__)) assert "FOO\n" == IO.read(file, :line) assert :eof == IO.read(file, :line) assert File.close(file) == :ok end - test :readline_with_utf8_and_binary do + test "readline with UTF-8 and binary" do {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8]) assert "Русский\n" == IO.read(file, :line) assert "日\n" == IO.read(file, :line) assert File.close(file) == :ok end - test :binreadline do + test "binreadall" do + {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__)) + assert "Русский\n日\n" == IO.binread(file, :all) + assert "" == IO.binread(file, :all) + assert File.close(file) == :ok + end + + test "binreadline" do {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__)) assert "Русский\n" == IO.binread(file, :line) assert "日\n" == IO.binread(file, :line) assert File.close(file) == :ok end - test :puts_with_chardata do + test "puts with chardata" do assert capture_io(fn -> IO.puts("hello") end) == "hello\n" assert capture_io(fn -> IO.puts('hello') end) == "hello\n" assert capture_io(fn -> IO.puts(:hello) end) == "hello\n" assert capture_io(fn -> IO.puts(13) end) == "13\n" end - test :write_with_chardata do + test "warn with chardata" do + assert capture_io(:stderr, fn -> IO.warn("hello") end) =~ "hello\n (ex_unit) lib/ex_unit" + assert capture_io(:stderr, fn -> IO.warn('hello') end) =~ "hello\n (ex_unit) lib/ex_unit" + assert capture_io(:stderr, fn -> IO.warn(:hello) end) =~ "hello\n (ex_unit) lib/ex_unit" + assert capture_io(:stderr, fn -> IO.warn(13) end) =~ "13\n (ex_unit) lib/ex_unit" + assert capture_io(:stderr, fn -> IO.warn("hello", []) end) =~ "hello\n" + stacktrace = [{IEx.Evaluator, :eval, 4, [file: 'lib/iex/evaluator.ex', line: 108]}] + assert capture_io(:stderr, fn -> IO.warn("hello", stacktrace) end) =~ """ + hello + lib/iex/evaluator.ex:108: IEx.Evaluator.eval/4 + """ + end + + test "write with chardata" do assert capture_io(fn -> IO.write("hello") end) == "hello" assert capture_io(fn -> IO.write('hello') end) == "hello" assert capture_io(fn -> IO.write(:hello) end) == "hello" assert capture_io(fn -> IO.write(13) end) == "13" end - test :gets_with_chardata do + test "gets with chardata" do assert capture_io("foo\n", fn -> IO.gets("hello") end) == "hello" assert capture_io("foo\n", fn -> IO.gets('hello') end) == "hello" assert capture_io("foo\n", fn -> IO.gets(:hello) end) == "hello" assert capture_io("foo\n", fn -> IO.gets(13) end) == "13" end - test :getn_with_chardata do + test "getn with chardata" do assert capture_io("foo\n", fn -> IO.getn("hello", 3) end) == "hello" assert capture_io("foo\n", fn -> IO.getn('hello', 3) end) == "hello" assert capture_io("foo\n", fn -> IO.getn(:hello, 3) end) == "hello" assert capture_io("foo\n", fn -> IO.getn(13, 3) end) == "13" end + + test "getn with different arities" do + assert capture_io("hello", fn -> + input = IO.getn(">") + IO.write input + end) == ">h" + + assert capture_io("hello", fn -> + input = IO.getn(">", 3) + IO.write input + end) == ">hel" + + assert capture_io("hello", fn -> + input = IO.getn(Process.group_leader, ">") + IO.write input + end) == ">h" + + assert capture_io("hello", fn -> + input = IO.getn(Process.group_leader, ">") + IO.write input + end) == ">h" + + assert capture_io("hello", fn -> + input = IO.getn(Process.group_leader, ">", 99) + IO.write input + end) == ">hello" + end + + test "inspect" do + assert capture_io(fn -> IO.inspect(1) end) == "1\n" + assert capture_io(fn -> IO.inspect(1, label: "foo") end) == "foo: 1\n" + assert capture_io(fn -> IO.inspect(1, label: :foo) end) == "foo: 1\n" + end end diff --git a/lib/elixir/test/elixir/kernel/alias_test.exs b/lib/elixir/test/elixir/kernel/alias_test.exs index be552061d10..b9503df33e8 100644 --- a/lib/elixir/test/elixir/kernel/alias_test.exs +++ b/lib/elixir/test/elixir/kernel/alias_test.exs @@ -9,25 +9,25 @@ end defmodule Kernel.AliasTest do use ExUnit.Case, async: true - test :alias_erlang do + test "alias Erlang" do alias :lists, as: MyList assert MyList.flatten([1, [2], 3]) == [1, 2, 3] assert Elixir.MyList.Bar == :"Elixir.MyList.Bar" assert MyList.Bar == :"Elixir.lists.Bar" end - test :double_alias do + test "double alias" do alias Kernel.AliasTest.Nested, as: Nested2 - assert Nested.value == 1 + assert Nested.value == 1 assert Nested2.value == 1 end - test :overwriten_alias do - alias List, as: Nested + test "overwriten alias" do + assert alias(List, as: Nested) == List assert Nested.flatten([[13]]) == [13] end - test :lexical do + test "lexical" do if true do alias OMG, as: List, warn: false else @@ -41,9 +41,26 @@ defmodule Kernel.AliasTest do def sample, do: 1 end - test :nested_elixir_alias do + test "nested elixir alias" do assert Kernel.AliasTest.Elixir.sample == 1 end + + test "multi-call" do + result = alias unquote(Inspect).{ + Opts, Algebra, + } + assert result == [Inspect.Opts, Inspect.Algebra] + assert %Opts{} == %Inspect.Opts{} + assert Algebra.empty == :doc_nil + end + + test "alias removal" do + alias __MODULE__.Foo + assert Foo == __MODULE__.Foo + alias Elixir.Foo + assert Foo == Elixir.Foo + alias Elixir.Bar + end end defmodule Kernel.AliasNestingGenerator do @@ -66,7 +83,7 @@ defmodule Kernel.AliasNestingTest do require Kernel.AliasNestingGenerator Kernel.AliasNestingGenerator.create - test :aliases_nesting do + test "aliases nesting" do assert Parent.a == :a assert Parent.Child.b == :a end @@ -75,7 +92,7 @@ defmodule Kernel.AliasNestingTest do def value, do: 2 end - test :aliases_nesting_with_previous_alias do + test "aliases nesting with previous alias" do assert Nested.value == 2 end end diff --git a/lib/elixir/test/elixir/kernel/binary_test.exs b/lib/elixir/test/elixir/kernel/binary_test.exs index e6e7934dd16..ad16cd6e141 100644 --- a/lib/elixir/test/elixir/kernel/binary_test.exs +++ b/lib/elixir/test/elixir/kernel/binary_test.exs @@ -3,7 +3,7 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.BinaryTest do use ExUnit.Case, async: true - test :heredoc do + test "heredoc" do assert 7 == __ENV__.line assert "foo\nbar\n" == """ foo @@ -13,18 +13,18 @@ bar assert 13 == __ENV__.line assert "foo\nbar \"\"\"\n" == """ foo -bar """ +bar \""" """ end - test :aligned_heredoc do + test "aligned heredoc" do assert "foo\nbar\n" == """ foo bar """ end - test :heredoc_with_interpolation do + test "heredoc with interpolation" do assert "29\n" == """ #{__ENV__.line} """ @@ -35,22 +35,22 @@ bar """ """ end - test :heredoc_in_call do + test "heredoc in call" do assert "foo\nbar" == Kernel.<>(""" foo """, "bar") end - test :utf8 do + test "UTF-8" do assert byte_size(" ゆんゆん") == 13 end - test :utf8_char do + test "UTF-8 char" do assert ?ゆ == 12422 assert ?\ゆ == 12422 end - test :string_concatenation_as_match do + test "string concatenation as match" do "foo" <> x = "foobar" assert x == "bar" @@ -60,142 +60,155 @@ bar """ <<"f", "oo">> <> x = "foobar" assert x == "bar" - <> <> _ = "foobar" + <> <> _ = "foobar" assert x == "foo" size = 3 - <> <> _ = "foobar" + <> <> _ = "foobar" assert x == "foo" - <> <> _ = "foobar" + size = 16 + <> <> _ = "foobar" + assert x == 26223 + + <> <> _ = "foobar" assert x == "foo" - assert_raise ErlangError, fn -> - Code.eval_string(~s{<> <> _ = "foobar"}) - end + <> <> _ = "foobar" + assert x == "foo" - assert_raise ErlangError, fn -> - Code.eval_string(~s{<> <> _ = "foobar"}) + <> <> _ = "foobar" + assert x == "foo" + + assert_raise MatchError, fn -> + Code.eval_string(~s{<> <> _ = "foobar"}) end - end - test :octals do - assert "\1" == <<1>> - assert "\12" == "\n" - assert "\123" == "S" - assert "\123" == "S" - assert "\377" == "ÿ" - assert "\128" == "\n8" - assert "\18" == <<1, ?8>> + assert_raise MatchError, fn -> + Code.eval_string(~s{<> <> _ = "foobar"}) + end end - test :hex do - assert "\xa" == "\n" - assert "\xE9" == "é" - assert "\xFF" == "ÿ" - assert "\x{A}"== "\n" - assert "\x{E9}"== "é" - assert "\x{10F}" == <<196, 143>> - assert "\x{10FF}" == <<225, 131, 191>> - assert "\x{10FFF}" == <<240, 144, 191, 191>> - assert "\x{10FFFF}" == <<244, 143, 191, 191>> + test "hex" do + assert "\x76" == "v" + assert "\u00FF" == "ÿ" + assert "\u{A}"== "\n" + assert "\u{E9}"== "é" + assert "\u{10F}" == <<196, 143>> + assert "\u{10FF}" == <<225, 131, 191>> + assert "\u{10FFF}" == <<240, 144, 191, 191>> + assert "\u{10FFFF}" == <<244, 143, 191, 191>> end - test :match do - assert match?(<< ?a, _ :: binary >>, "ab") - refute match?(<< ?a, _ :: binary >>, "cd") - assert match?(<< _ :: utf8 >> <> _, "éf") + test "match" do + assert match?(<>, "ab") + refute match?(<>, "cd") + assert match?(<<_::utf8>> <> _, "éf") end - test :interpolation do + test "interpolation" do res = "hello \\abc" assert "hello #{"\\abc"}" == res assert "hello #{"\\abc" <> ""}" == res end - test :pattern_match do + test "pattern match" do s = 16 assert <<_a, _b :: size(s)>> = "foo" end - test :pattern_match_with_splice do - assert << 1, <<2, 3, 4>>, 5 >> = <<1, 2, 3, 4, 5>> + test "pattern match with splice" do + assert <<1, <<2, 3, 4>>, 5>> = <<1, 2, 3, 4, 5>> end - test :partial_application do - assert (&<< &1, 2 >>).(1) == << 1, 2 >> - assert (&<< &1, &2 >>).(1, 2) == << 1, 2 >> - assert (&<< &2, &1 >>).(2, 1) == << 1, 2 >> + test "partial application" do + assert (&<<&1, 2>>).(1) == <<1, 2>> + assert (&<<&1, &2>>).(1, 2) == <<1, 2>> + assert (&<<&2, &1>>).(2, 1) == <<1, 2>> end - test :literal do - assert <<106,111,115,195,169>> == << "josé" :: binary >> - assert <<106,111,115,195,169>> == << "josé" :: bits >> - assert <<106,111,115,195,169>> == << "josé" :: bitstring >> - assert <<106,111,115,195,169>> == << "josé" :: bytes >> + test "literal" do + assert <<106, 111, 115, 195, 169>> == <<"josé">> + assert <<106, 111, 115, 195, 169>> == <<"#{:"josé"}">> + assert <<106, 111, 115, 195, 169>> == <<"josé"::binary>> + assert <<106, 111, 115, 195, 169>> == <<"josé"::bits>> + assert <<106, 111, 115, 195, 169>> == <<"josé"::bitstring>> + assert <<106, 111, 115, 195, 169>> == <<"josé"::bytes>> - assert <<106,111,115,195,169>> == << "josé" :: utf8 >> - assert <<0,106,0,111,0,115,0,233>> == << "josé" :: utf16 >> - assert <<106,0,111,0,115,0,233,0>> == << "josé" :: [utf16, little] >> - assert <<0,0,0,106,0,0,0,111,0,0,0,115,0,0,0,233>> == << "josé" :: utf32 >> + assert <<106, 111, 115, 195, 169>> == <<"josé"::utf8>> + assert <<0, 106, 0, 111, 0, 115, 0, 233>> == <<"josé"::utf16>> + assert <<106, 0, 111, 0, 115, 0, 233, 0>> == <<"josé"::little-utf16>> + assert <<0, 0, 0, 106, 0, 0, 0, 111, 0, 0, 0, 115, 0, 0, 0, 233>> == <<"josé"::utf32>> end - test :literal_errors do + test "literal errors" do assert_raise CompileError, fn -> - Code.eval_string(~s[<< "foo" :: integer >>]) + Code.eval_string(~s[<<"foo"::integer>>]) end assert_raise CompileError, fn -> - Code.eval_string(~s[<< "foo" :: float >>]) + Code.eval_string(~s[<<"foo"::float>>]) end assert_raise CompileError, fn -> - Code.eval_string(~s[<< 'foo' :: binary >>]) + Code.eval_string(~s[<<'foo'::binary>>]) end assert_raise ArgumentError, fn -> - Code.eval_string(~s[<<1::size(4)>> <> "foo"]) + Code.eval_string(~s[<<1::4>> <> "foo"]) end end + @bitstring <<"foo", 16::4>> + + test "bitstring attribute" do + assert @bitstring == <<"foo", 16::4>> + end + @binary "new " - test :bitsyntax_with_expansion do + test "bitsyntax expansion" do assert <<@binary, "world">> == "new world" end - test :bitsyntax_translation do + test "bitsyntax translation" do refb = "sample" sec_data = "another" - << byte_size(refb) :: [size(1), big, signed, integer, unit(8)], - refb :: binary, - byte_size(sec_data) :: [size(1), big, signed, integer, unit(16)], - sec_data :: binary >> + <> + end + + test "bitsyntax size shortcut" do + assert <<1::3>> == <<1::size(3)>> + assert <<1::3*8>> == <<1::size(3)-unit(8)>> end - test :bitsyntax_size_shorcut do - assert << 1 :: 3 >> == << 1 :: size(3) >> - assert << 1 :: [unit(8), 3] >> == << 1 :: [unit(8), size(3)] >> + test "bitsyntax variable size" do + x = 8 + assert <<_, _::size(x)>> = <> + assert (fn <<_, _::size(x)>> -> true end).(<>) end defmacrop signed_16 do quote do - [big, signed, integer, unit(16)] + big-signed-integer-unit(16) end end defmacrop refb_spec do quote do - [size(1), big, signed, integer, unit(8)] + 1*8-big-signed-integer end end - test :bitsyntax_macro do + test "bitsyntax macro" do refb = "sample" sec_data = "another" - << byte_size(refb) :: refb_spec, - refb :: binary, - byte_size(sec_data) :: [size(1), signed_16], - sec_data :: binary >> + <> end end diff --git a/lib/elixir/test/elixir/kernel/case_test.exs b/lib/elixir/test/elixir/kernel/case_test.exs deleted file mode 100644 index 9c9755c5564..00000000000 --- a/lib/elixir/test/elixir/kernel/case_test.exs +++ /dev/null @@ -1,66 +0,0 @@ -Code.require_file "../test_helper.exs", __DIR__ - -defmodule Kernel.CaseTest do - use ExUnit.Case, async: true - - test :inline_case do - assert (case 1, do: (1 -> :ok; 2 -> :wrong)) == :ok - end - - test :nested_variables do - assert vars_case(400, 1) == {400, 1} - assert vars_case(401, 1) == {400, -1} - assert vars_case(0, -1) == {0, -1} - assert vars_case(-1, -1) == {0, 1} - end - - test :nested_vars_match do - x = {:error, {:ok, :done}} - assert (case x do - {:ok, right} -> - right - {_left, right} -> - case right do - {:ok, right} -> right - end - end) == :done - end - - test :in_operator_outside_case do - x = 1 - y = 4 - assert x in [1, 2, 3], "in assertion" - assert not y in [1, 2, 3], "not in assertion" - end - - test :in_with_match do - refute 1.0 in [1, 2, 3], "not in assertion" - end - - test :in_cond_clause do - assert (cond do - format() && (f = format()) -> - f - true -> - :text - end) == :html - end - - defp format, do: :html - - defp vars_case(x, vx) do - case x > 400 do - true -> - x = 400 - vx = -vx - _ -> - case x < 0 do - true -> - x = 0 - vx = -vx - _ -> nil - end - end - {x, vx} - end -end diff --git a/lib/elixir/test/elixir/kernel/char_list_test.exs b/lib/elixir/test/elixir/kernel/char_list_test.exs deleted file mode 100644 index 7bbc381f2b2..00000000000 --- a/lib/elixir/test/elixir/kernel/char_list_test.exs +++ /dev/null @@ -1,45 +0,0 @@ -Code.require_file "../test_helper.exs", __DIR__ - -defmodule CharListTest do - use ExUnit.Case, async: true - - test :heredoc do - assert __ENV__.line == 7 - assert 'foo\nbar\n' == ''' -foo -bar -''' - - assert __ENV__.line == 13 - assert 'foo\nbar \'\'\'\n' == ''' -foo -bar ''' -''' - end - - test :utf8 do - assert length(' ゆんゆん') == 5 - end - - test :octals do - assert '\1' == [1] - assert '\12' == '\n' - assert '\123' == 'S' - assert '\123' == 'S' - assert '\377' == 'ÿ' - assert '\128' == '\n8' - assert '\18' == [1, ?8] - end - - test :hex do - assert '\xa' == '\n' - assert '\xE9' == 'é' - assert '\xfF' == 'ÿ' - assert '\x{A}' == '\n' - assert '\x{e9}' == 'é' - assert '\x{10F}' == [271] - assert '\x{10FF}' == [4351] - assert '\x{10FFF}' == [69631] - assert '\x{10FFFF}' == [1114111] - end -end diff --git a/lib/elixir/test/elixir/kernel/charlist_test.exs b/lib/elixir/test/elixir/kernel/charlist_test.exs new file mode 100644 index 00000000000..cc501820132 --- /dev/null +++ b/lib/elixir/test/elixir/kernel/charlist_test.exs @@ -0,0 +1,34 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule CharlistTest do + use ExUnit.Case, async: true + + test "heredoc" do + assert __ENV__.line == 7 + assert 'foo\nbar\n' == ''' + foo + bar + ''' + + assert __ENV__.line == 13 + assert 'foo\nbar \'\'\'\n' == ''' + foo + bar \'\'\' + ''' + end + + test "UTF-8" do + assert length(' ゆんゆん') == 5 + end + + test "hex" do + assert '\x76' == 'v' + assert '\u00fF' == 'ÿ' + assert '\u{A}' == '\n' + assert '\u{e9}' == 'é' + assert '\u{10F}' == [271] + assert '\u{10FF}' == [4351] + assert '\u{10FFF}' == [69631] + assert '\u{10FFFF}' == [1114111] + end +end diff --git a/lib/elixir/test/elixir/kernel/cli_test.exs b/lib/elixir/test/elixir/kernel/cli_test.exs index 65369f4d78e..80d79363b7d 100644 --- a/lib/elixir/test/elixir/kernel/cli_test.exs +++ b/lib/elixir/test/elixir/kernel/cli_test.exs @@ -40,17 +40,17 @@ defmodule Kernel.CLI.OptionParsingTest do use ExUnit.Case, async: true test "properly parses paths" do - root = fixture_path("../../..") |> to_char_list + root = fixture_path("../../..") |> to_charlist list = elixir('-pa "#{root}/*" -pz "#{root}/lib/*" -e "IO.inspect(:code.get_path, limit: :infinity)"') {path, _} = Code.eval_string list, [] # pa - assert to_char_list(Path.expand('ebin', root)) in path - assert to_char_list(Path.expand('lib', root)) in path - assert to_char_list(Path.expand('src', root)) in path + assert to_charlist(Path.expand('ebin', root)) in path + assert to_charlist(Path.expand('lib', root)) in path + assert to_charlist(Path.expand('src', root)) in path # pz - assert to_char_list(Path.expand('lib/list', root)) in path + assert to_charlist(Path.expand('lib/list', root)) in path end end @@ -58,8 +58,8 @@ defmodule Kernel.CLI.AtExitTest do use ExUnit.Case, async: true test "invokes at_exit callbacks" do - assert elixir(fixture_path("at_exit.exs") |> to_char_list) == - 'goodbye cruel world with status 0\n' + assert elixir(fixture_path("at_exit.exs") |> to_charlist) == + 'goodbye cruel world with status 1\n' end end @@ -68,99 +68,69 @@ defmodule Kernel.CLI.ErrorTest do test "properly format errors" do assert :string.str('** (throw) 1', elixir('-e "throw 1"')) == 0 - assert :string.str('** (ErlangError) erlang error: 1', elixir('-e "error 1"')) == 0 + assert :string.str('** (ErlangError) Erlang error: 1', elixir('-e "error 1"')) == 0 + assert elixir('-e "IO.puts(Process.flag(:trap_exit, false)); exit({:shutdown, 1})"') == 'false\n' + end - # It does not catch exits with integers nor strings... - assert elixir('-e "exit 1"') == '' + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + test "blames exceptions" do + error = to_string elixir('-e "Access.fetch :foo, :bar"') + assert error =~ "** (FunctionClauseError) no function clause matching in Access.fetch/2" + assert error =~ "The following arguments were given to Access.fetch/2" + assert error =~ ":foo" + assert error =~ "def fetch(-%struct{} = container-, +key+)" + assert error =~ ~r"\(elixir\) lib/access\.ex:\d+: Access\.fetch/2" + end end end defmodule Kernel.CLI.CompileTest do use ExUnit.Case, async: true - test "compiles code" do - fixture = fixture_path "compile_sample.ex" - assert elixirc('#{fixture} -o #{tmp_path}') == '' - assert File.regular?(tmp_path "Elixir.CompileSample.beam") - after - File.rm(tmp_path("Elixir.CompileSample.beam")) - end - - test "compiles code with verbose mode" do + setup context do + # Set up a per-test temporary directory, so we can run these with async: true. + # We use the test's line number as the directory name, so they won't conflict. + tmp_dir_path = tmp_path("beams/#{context[:line]}") + beam_file_path = Path.join([tmp_dir_path, "Elixir.CompileSample.beam"]) fixture = fixture_path "compile_sample.ex" - assert elixirc('#{fixture} -o #{tmp_path} --verbose') == - 'Compiled #{fixture}\n' - assert File.regular?(tmp_path "Elixir.CompileSample.beam") - after - File.rm(tmp_path("Elixir.CompileSample.beam")) - end - - test "fails on missing patterns" do - fixture = fixture_path "compile_sample.ex" - output = elixirc('#{fixture} non_existing.ex -o #{tmp_path}') - assert :string.str(output, 'non_existing.ex') > 0, "expected non_existing.ex to be mentionned" - assert :string.str(output, 'compile_sample.ex') == 0, "expected compile_sample.ex to not be mentionned" - refute File.exists?(tmp_path("Elixir.CompileSample.beam")) , "expected the sample to not be compiled" - end -end - -defmodule Kernel.CLI.ParallelCompilerTest do - use ExUnit.Case - import ExUnit.CaptureIO - - test "compiles files solving dependencies" do - fixtures = [fixture_path("parallel_compiler/bar.ex"), fixture_path("parallel_compiler/foo.ex")] - assert capture_io(fn -> - assert [Bar, Foo] = Kernel.ParallelCompiler.files fixtures - end) =~ "message_from_foo" - after - Enum.map [Foo, Bar], fn mod -> - :code.purge(mod) - :code.delete(mod) - end + File.mkdir_p!(tmp_dir_path) + {:ok, [tmp_dir_path: tmp_dir_path, beam_file_path: beam_file_path, fixture: fixture]} end - test "compiles files with structs solving dependencies" do - fixtures = [fixture_path("parallel_struct/bar.ex"), fixture_path("parallel_struct/foo.ex")] - assert [Bar, Foo] = Kernel.ParallelCompiler.files(fixtures) |> Enum.sort + test "compiles code", context do + assert elixirc('#{context[:fixture]} -o #{context[:tmp_dir_path]}') == '' + assert File.regular?(context[:beam_file_path]) + # Assert that the module is loaded into memory with the proper destination for the BEAM file. + Code.append_path context[:tmp_dir_path] + assert :code.which(CompileSample) |> List.to_string == Path.expand(context[:beam_file_path]) after - Enum.map [Foo, Bar], fn mod -> - :code.purge(mod) - :code.delete(mod) - end + Code.delete_path context[:tmp_dir_path] end - test "does not hang on missing dependencies" do - fixtures = [fixture_path("parallel_compiler/bat.ex")] - assert capture_io(fn -> - assert catch_exit(Kernel.ParallelCompiler.files(fixtures)) == 1 - end) =~ "Compilation error" + test "fails on missing patterns", context do + output = elixirc('#{context[:fixture]} non_existing.ex -o #{context[:tmp_dir_path]}') + assert :string.str(output, 'non_existing.ex') > 0, "expected non_existing.ex to be mentioned" + assert :string.str(output, 'compile_sample.ex') == 0, "expected compile_sample.ex to not be mentioned" + refute File.exists?(context[:beam_file_path]), "expected the sample to not be compiled" end - test "handles possible deadlocks" do - fixtures = [fixture_path("parallel_deadlock/foo.ex"), - fixture_path("parallel_deadlock/bar.ex")] + test "fails on missing write access to .beam file", context do + compilation_args = '#{context[:fixture]} -o #{context[:tmp_dir_path]}' - msg = capture_io(fn -> - assert catch_exit(Kernel.ParallelCompiler.files fixtures) == 1 - end) - - assert msg =~ ~r"== Compilation error on file .+parallel_deadlock/foo\.ex ==" - assert msg =~ ~r"== Compilation error on file .+parallel_deadlock/bar\.ex ==" - end + assert elixirc(compilation_args) == '' + assert File.regular?(context[:beam_file_path]) - test "warnings as errors" do - warnings_as_errors = Code.compiler_options[:warnings_as_errors] - fixtures = [fixture_path("warnings_sample.ex")] + # Set the .beam file to read-only + File.chmod!(context[:beam_file_path], 4) - try do - Code.compiler_options(warnings_as_errors: true) + {:ok, %{access: access}} = File.stat(context[:beam_file_path]) - capture_io :stderr, fn -> - assert catch_exit(Kernel.ParallelCompiler.files fixtures) == 1 - end - after - Code.compiler_options(warnings_as_errors: warnings_as_errors) + # Can only assert when read-only applies to the user + if access != :read_write do + output = elixirc(compilation_args) + expected = '(File.Error) could not write to "' ++ String.to_charlist(context[:beam_file_path]) ++ '": permission denied' + assert :string.str(output, expected) > 0, "expected compilation error message due to not having write access" end end end diff --git a/lib/elixir/test/elixir/kernel/comprehension_test.exs b/lib/elixir/test/elixir/kernel/comprehension_test.exs index 1a731460407..f88f07deb38 100644 --- a/lib/elixir/test/elixir/kernel/comprehension_test.exs +++ b/lib/elixir/test/elixir/kernel/comprehension_test.exs @@ -6,8 +6,23 @@ defmodule Kernel.ComprehensionTest do import ExUnit.CaptureIO require Integer + defmodule Pdict do + defstruct [] + + defimpl Collectable do + def into(struct) do + {struct, + fn + _, {:cont, x} -> Process.put(:into_cont, [x | Process.get(:into_cont)]) + _, :done -> Process.put(:into_done, true) + _, :halt -> Process.put(:into_halt, true) + end} + end + end + end + defp to_bin(x) do - << x >> + <> end defp nilly, do: nil @@ -20,7 +35,32 @@ defmodule Kernel.ComprehensionTest do end test "for comprehensions with matching" do - assert for({_,x} <- 1..3, do: x * 2) == [] + assert for({_, x} <- 1..3, do: x * 2) == [] + end + + test "for comprehensions with pin matching" do + maps = [x: 1, y: 2, x: 3] + assert for({:x, v} <- maps, do: v * 2) == [2, 6] + x = :x + assert for({^x, v} <- maps, do: v * 2) == [2, 6] + end + + test "for comprehensions with guards" do + assert for(x when x < 4 <- 1..10, do: x) == [1, 2, 3] + assert for(x when x == 3 when x == 7 <- 1..10, do: x) == [3, 7] + end + + test "for comprehensions with guards and filters" do + assert for({var, _} when is_atom(var) <- [{:foo, 1}, {2, :bar}], + var = Atom.to_string(var), + do: var) == ["foo"] + end + + test "for comprehensions with map key matching" do + maps = [%{x: 1}, %{y: 2}, %{x: 3}] + assert for(%{x: v} <- maps, do: v * 2) == [2, 6] + x = :x + assert for(%{^x => v} <- maps, do: v * 2) == [2, 6] end test "for comprehensions with filters" do @@ -28,7 +68,7 @@ defmodule Kernel.ComprehensionTest do end test "for comprehensions with nilly filters" do - assert for(x <- 1..3, nilly, do: x * 2) == [] + assert for(x <- 1..3, nilly(), do: x * 2) == [] end test "for comprehensions with errors on filters" do @@ -58,7 +98,7 @@ defmodule Kernel.ComprehensionTest do end test "for comprehensions with binary, enum generators and filters" do - assert (for x <- [1, 2, 3], << y <- <<4, 5, 6>> >>, y / 2 == x, do: x * y) == + assert (for x <- [1, 2, 3], <>)>>, y / 2 == x, do: x * y) == [8, 18] end @@ -86,7 +126,7 @@ defmodule Kernel.ComprehensionTest do Process.put(:into_done, false) Process.put(:into_halt, false) - for x <- 1..3, into: collectable_pdict do + for x <- 1..3, into: %Pdict{} do x * 2 end @@ -101,7 +141,7 @@ defmodule Kernel.ComprehensionTest do Process.put(:into_halt, false) catch_error( - for x <- 1..3, into: collectable_pdict do + for x <- 1..3, into: %Pdict{} do if x > 2, do: raise("oops"), else: x end ) @@ -114,21 +154,13 @@ defmodule Kernel.ComprehensionTest do test "for comprehension with into, generators and filters" do Process.put(:into_cont, []) - for x <- 1..3, Integer.odd?(x), << y <- "hello" >>, into: collectable_pdict do + for x <- 1..3, Integer.is_odd(x), <>, into: %Pdict{} do x + y end assert IO.iodata_to_binary(Process.get(:into_cont)) == "roohkpmmfi" end - defp collectable_pdict do - fn - _, {:cont, x} -> Process.put(:into_cont, [x|Process.get(:into_cont)]) - _, :done -> Process.put(:into_done, true) - _, :halt -> Process.put(:into_halt, true) - end - end - ## List generators (inlined by the compiler) test "list for comprehensions" do @@ -137,7 +169,13 @@ defmodule Kernel.ComprehensionTest do end test "list for comprehensions with matching" do - assert for({_,x} <- [1, 2, a: 3, b: 4, c: 5], do: x * 2) == [6, 8, 10] + assert for({_, x} <- [1, 2, a: 3, b: 4, c: 5], do: x * 2) == [6, 8, 10] + end + + test "list for comprehension matched to '_' on last line of block" do + assert (if true do + _ = for x <- [1, 2, 3], do: x * 2 + end) == [2, 4, 6] end test "list for comprehensions with filters" do @@ -145,7 +183,7 @@ defmodule Kernel.ComprehensionTest do end test "list for comprehensions with nilly filters" do - assert for(x <- [1, 2, 3], nilly, do: x * 2) == [] + assert for(x <- [1, 2, 3], nilly(), do: x * 2) == [] end test "list for comprehensions with errors on filters" do @@ -169,8 +207,13 @@ defmodule Kernel.ComprehensionTest do assert for(x <- enum, into: "", do: to_bin(x * 2)) == <<2, 4, 6>> end + test "map for comprehensions into map" do + enum = %{a: 2, b: 3} + assert for({k, v} <- enum, into: %{}, do: {k, v * v}) == %{a: 4, b: 9} + end + test "list for comprehensions where value is not used" do - enum = [1,2,3] + enum = [1, 2, 3] assert capture_io(fn -> for(x <- enum, do: IO.puts x) @@ -182,27 +225,33 @@ defmodule Kernel.ComprehensionTest do test "binary for comprehensions" do bin = <<1, 2, 3>> - assert for(<< x <- bin >>, do: x * 2) == [2, 4, 6] + assert for(<>, do: x * 2) == [2, 4, 6] end test "binary for comprehensions with inner binary" do bin = <<1, 2, 3>> - assert for(<< <> <- bin >>, do: x * 2) == [2, 4, 6] + assert for(<<(<>) <- bin>>, do: x * 2) == [2, 4, 6] end test "binary for comprehensions with two generators" do - assert (for << x <- <<1, 2, 3>> >>, << y <- <<4, 5, 6>> >>, y / 2 == x, do: x * y) == + assert (for <>)>>, <>)>>, y / 2 == x, do: x * y) == [8, 18] end test "binary for comprehensions into list" do bin = <<1, 2, 3>> - assert for(<< x <- bin >>, into: [], do: x * 2) == [2, 4, 6] + assert for(<>, into: [], do: x * 2) == [2, 4, 6] end test "binary for comprehensions into binaries" do bin = <<1, 2, 3>> - assert for(<< x <- bin >>, into: "", do: to_bin(x * 2)) == <<2, 4, 6>> + assert for(<>, into: "", do: to_bin(x * 2)) == <<2, 4, 6>> + end + + test "binary for comprehensions with variable size" do + s = 16 + bin = <<1, 2, 3, 4, 5, 6>> + assert for(<>, into: "", do: to_bin(div(x, 2))) == <<129, 130, 131>> end test "binary for comprehensions where value is not used" do diff --git a/lib/elixir/test/elixir/kernel/dialyzer_test.exs b/lib/elixir/test/elixir/kernel/dialyzer_test.exs new file mode 100644 index 00000000000..92f1cec392c --- /dev/null +++ b/lib/elixir/test/elixir/kernel/dialyzer_test.exs @@ -0,0 +1,123 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule Kernel.DialyzerTest do + use ExUnit.Case, async: true + + @moduletag :dialyzer + import PathHelpers + + setup_all do + dir = tmp_path("dialyzer") + File.rm_rf!(dir) + File.mkdir_p!(dir) + + plt = + dir + |> Path.join("base_plt") + |> String.to_charlist() + + # Some OSs (like Windows) do not provide the HOME environment variable. + unless System.get_env("HOME") do + System.put_env("HOME", System.user_home()) + end + + # Add a few key Elixir modules for types and macro functions + mods = [Kernel, String, Atom, Enum, Keyword, Exception, Macro, Macro.Env, :elixir_env] + files = Enum.map(mods, &:code.which/1) + dialyzer_run([analysis_type: :plt_build, output_plt: plt, + apps: [:erts], files: files]) + + # Compile Dialyzer fixtures + assert '' = elixirc("#{fixture_path("dialyzer")} -o #{dir}") + + {:ok, [base_dir: dir, base_plt: plt]} + end + + setup context do + # Set up a per-test temporary directory, so we can run these with async: true. + # We use the test's line number as the directory name, so they won't conflict. + dir = + context[:base_dir] + |> Path.join("line#{context[:line]}") + |> String.to_charlist() + File.mkdir_p!(dir) + + plt = + dir + |> Path.join("plt") + |> String.to_charlist() + File.cp!(context[:base_plt], plt) + + dialyzer = [analysis_type: :succ_typings, check_plt: false, + files_rec: [dir], plts: [plt]] + + {:ok, [outdir: dir, dialyzer: dialyzer]} + end + + test "no warnings on valid remote calls", context do + copy_beam! context, Dialyzer.RemoteCall + assert_dialyze_no_warnings! context + end + + test "no warnings on rewrites", context do + copy_beam! context, Dialyzer.Rewrite + assert_dialyze_no_warnings! context + end + + test "no warnings on raise", context do + copy_beam! context, Dialyzer.Raise + assert_dialyze_no_warnings! context + end + + test "no warnings on macrocallback", context do + copy_beam! context, Dialyzer.Macrocallback + copy_beam! context, Dialyzer.Macrocallback.Impl + assert_dialyze_no_warnings! context + end + + test "no warnings on struct update", context do + copy_beam! context, Dialyzer.StructUpdate + assert_dialyze_no_warnings! context + end + + test "no warnings on protocol calls with opaque types", context do + copy_beam! context, Dialyzer.ProtocolOpaque + copy_beam! context, Dialyzer.ProtocolOpaque.Entity + copy_beam! context, Dialyzer.ProtocolOpaque.Duck + assert_dialyze_no_warnings! context + end + + test "no warnings on and/2 and or/2", context do + copy_beam! context, Dialyzer.BooleanCheck + assert_dialyze_no_warnings! context + end + + test "no warnings on for falsey check that always boolean", context do + copy_beam! context, Dialyzer.ForBooleanCheck + assert_dialyze_no_warnings! context + end + + defp copy_beam!(context, module) do + name = "#{module}.beam" + File.cp! Path.join(context[:base_dir], name), + Path.join(context[:outdir], name) + end + + defp assert_dialyze_no_warnings!(context) do + case dialyzer_run(context[:dialyzer]) do + [] -> + :ok + warnings -> + flunk IO.chardata_to_string(for warn <- warnings, do: [:dialyzer.format_warning(warn), ?\n]) + end + end + + defp dialyzer_run(opts) do + try do + :dialyzer.run(opts) + catch + :throw, {:dialyzer_error, chardata} -> + raise "dialyzer error: " <> IO.chardata_to_string(chardata) + end + end +end diff --git a/lib/elixir/test/elixir/kernel/docs_test.exs b/lib/elixir/test/elixir/kernel/docs_test.exs index 1b0d55a4986..e7b1cf8b5cc 100644 --- a/lib/elixir/test/elixir/kernel/docs_test.exs +++ b/lib/elixir/test/elixir/kernel/docs_test.exs @@ -3,57 +3,125 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.DocsTest do use ExUnit.Case + test "attributes format" do + defmodule DocAttributes do + @moduledoc "Module doc" + assert @moduledoc == "Module doc" + assert Module.get_attribute(__MODULE__, :moduledoc) == {8, "Module doc"} + + @typedoc "Type doc" + assert @typedoc == "Type doc" + assert Module.get_attribute(__MODULE__, :typedoc) == {12, "Type doc"} + @type foobar :: any + + @doc "Function doc" + assert @doc == "Function doc" + assert Module.get_attribute(__MODULE__, :doc) == {17, "Function doc"} + def foobar() do + :ok + end + end + end + test "compiled with docs" do deftestmodule(SampleDocs) + docs = Code.get_docs(SampleDocs, :all) + assert Code.get_docs(SampleDocs, :docs) == docs[:docs] + assert Code.get_docs(SampleDocs, :moduledoc) == docs[:moduledoc] + assert Code.get_docs(SampleDocs, :type_docs) == docs[:type_docs] + assert Code.get_docs(SampleDocs, :callback_docs) == docs[:callback_docs] + + assert [{{:arg_names, 5}, _, :def, + [{:list1, [], Elixir}, + {:list2, [], Elixir}, + {:map1, [], Elixir}, + {:list3, [], Elixir}, + {:map2, [], Elixir}], nil}, + {{:foo, 1}, _, :def, [{:arg, [], nil}], "Function doc"}, + {{:foobar, 0}, _, :def, [], nil}, + {{:qux, 1}, _, :def, [{:bool, [], Elixir}], false}, + {{:with_defaults, 4}, _, :def, + [{:int, [], Elixir}, + {:\\, [], [{:arg, [], nil}, 0]}, + {:\\, [], [{:year, [], nil}, 2015]}, + {:\\, [], [{:fun, [], nil}, {:&, _, [{:/, _, [{:>=, _, nil}, 2]}]}]}], nil}] = docs[:docs] + + assert {_, "Module doc"} = docs[:moduledoc] + + assert [{{:bar, 1}, _, :opaque, "Opaque type doc"}, + {{:foo, 1}, _, :type, "Type doc"}] = docs[:type_docs] - assert [{{:fun, 2}, _, :def, [{:x, [], nil}, {:y, [], nil}], "This is fun!\n"}, - {{:nofun, 0}, _, :def, [], nil}, - {{:sneaky, 1}, _, :def, [{:bool1, [], Elixir}], false}] = docs[:docs] - assert {_, "Hello, I am a module"} = docs[:moduledoc] + assert [{{:bar, 0}, _, :callback, false}, + {{:baz, 2}, _, :callback, nil}, + {{:foo, 1}, _, :callback, "Callback doc"}, + {{:qux, 1}, _, :macrocallback, "Macrocallback doc"}] = docs[:callback_docs] end test "compiled without docs" do Code.compiler_options(docs: false) - deftestmodule(SampleNoDocs) + deftestmodule(WithoutSampleDocs) - assert Code.get_docs(SampleNoDocs, :docs) == nil - assert Code.get_docs(SampleNoDocs, :moduledoc) == nil + assert Code.get_docs(WithoutSampleDocs, :docs) == nil + assert Code.get_docs(WithoutSampleDocs, :moduledoc) == nil + assert Code.get_docs(WithoutSampleDocs, :type_docs) == nil + assert Code.get_docs(WithoutSampleDocs, :callback_docs) == nil after Code.compiler_options(docs: true) end test "compiled in memory does not have accessible docs" do - defmodule NoDocs do - @moduledoc "moduledoc" + defmodule WithoutDocs do + @moduledoc "Module doc" - @doc "Some example" - def example(var), do: var + @doc "Some doc" + def foobar(arg), do: arg end assert Code.get_docs(NoDocs, :docs) == nil assert Code.get_docs(NoDocs, :moduledoc) == nil + assert Code.get_docs(NoDocs, :callback_docs) == nil end defp deftestmodule(name) do import PathHelpers write_beam(defmodule name do - @moduledoc "Hello, I am a module" + @moduledoc "Module doc" - @doc """ - This is fun! - """ - def fun(x, y) do - {x, y} + @typedoc "Type doc" + @type foo(any) :: any + + @typedoc "Opaque type doc" + @opaque bar(any) :: any + + @doc "Callback doc" + @callback foo(any) :: any + + @doc false + @callback bar() :: term + + @callback baz(any, term) :: any + + @doc "Macrocallback doc" + @macrocallback qux(any) :: any + + @doc "Function doc" + def foo(arg) do + arg + 1 end @doc false - def sneaky(true), do: false + def qux(true), do: false + + def foobar(), do: nil + + def arg_names([], [], %{}, [], %{}), do: false - def nofun() do - 'not fun at all' + @year 2015 + def with_defaults(@year, arg \\ 0, year \\ @year, fun \\ &>=/2) do + {fun, arg + year} end end) end diff --git a/lib/elixir/test/elixir/kernel/errors_test.exs b/lib/elixir/test/elixir/kernel/errors_test.exs index 5e5fa194a7a..5092dc5b09d 100644 --- a/lib/elixir/test/elixir/kernel/errors_test.exs +++ b/lib/elixir/test/elixir/kernel/errors_test.exs @@ -2,24 +2,28 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.ErrorsTest do use ExUnit.Case, async: true - import CompileAssertion - defmodule UnproperMacro do - defmacro unproper(args), do: args - defmacro exit(args), do: args + defmacro hello do + quote location: :keep do + def hello, do: :world + end end - test :invalid_token do + test "invalid token" do assert_compile_fail SyntaxError, - "nofile:1: invalid token: \end", - '\end\nlol\nbarbecue' + "nofile:1: unexpected token: \"\u200B\" (column 7, codepoint U+200B)", + '[foo: \u200B]\noops' end - test :invalid_quoted_token do + test "invalid quoted token" do assert_compile_fail SyntaxError, "nofile:1: syntax error before: \"world\"", '"hello" "world"' + assert_compile_fail SyntaxError, + "nofile:1: syntax error before: Foobar", + '1 Foobar' + assert_compile_fail SyntaxError, "nofile:1: syntax error before: foo", 'Foo.:foo' @@ -33,13 +37,58 @@ defmodule Kernel.ErrorsTest do 'Foo.:"\#{:bar}"' end - test :invalid_or_reserved_codepoint do - assert_compile_fail ArgumentError, - "invalid or reserved unicode codepoint 55296", - '?\\x{D800}' + test "invalid identifier" do + message = fn name -> "nofile:1: invalid character \"@\" (codepoint U+0040) in identifier: #{name}" end + assert_compile_fail SyntaxError, message.("foo@"), 'foo@' + assert_compile_fail SyntaxError, message.("foo@"), 'foo@ ' + assert_compile_fail SyntaxError, message.("foo@bar"), 'foo@bar' + + message = fn name -> "nofile:1: invalid character \"@\" (codepoint U+0040) in alias: #{name}" end + assert_compile_fail SyntaxError, message.("Foo@"), 'Foo@' + assert_compile_fail SyntaxError, message.("Foo@bar"), 'Foo@bar' + + message = "nofile:1: invalid character \"!\" (codepoint U+0021) in alias: Foo!" + assert_compile_fail SyntaxError, message, 'Foo!' + + message = "nofile:1: invalid character \"?\" (codepoint U+003F) in alias: Foo?" + assert_compile_fail SyntaxError, message, 'Foo?' + + # TODO: Remove this check once we depend on OTP 20+ + if :erlang.system_info(:otp_release) >= '20' do + message = "invalid character \"ó\" (codepoint U+00F3) in alias (only ascii characters are allowed): Foó" + assert_compile_fail SyntaxError, message, 'Foó' + + message = """ + Elixir expects unquoted Unicode atoms and variables to be in NFC form. + + Got: + + "foó" (codepoints 0066 006F 006F 0301) + + Expected: + + "foó" (codepoints 0066 006F 00F3) + + """ + assert_compile_fail SyntaxError, message, :unicode.characters_to_nfd_list("foó") + end + end + + test "invalid fn" do + assert_compile_fail SyntaxError, + "nofile:1: expected clauses to be defined with -> inside: 'fn'", + 'fn 1 end' end - test :sigil_terminator do + test "kw missing space" do + msg = "nofile:1: keyword argument must be followed by space after: foo:" + + assert_compile_fail SyntaxError, msg, "foo:bar" + assert_compile_fail SyntaxError, msg, "foo:+" + assert_compile_fail SyntaxError, msg, "foo:+1" + end + + test "sigil terminator" do assert_compile_fail TokenMissingError, "nofile:3: missing terminator: \" (for sigil ~r\" starting at line 1)", '~r"foo\n\n' @@ -49,111 +98,143 @@ defmodule Kernel.ErrorsTest do '~r{foo\n\n' end - test :dot_terminator do + test "dot terminator" do assert_compile_fail TokenMissingError, "nofile:1: missing terminator: \" (for function name starting at line 1)", 'foo."bar' end - test :string_terminator do + test "string terminator" do assert_compile_fail TokenMissingError, "nofile:1: missing terminator: \" (for string starting at line 1)", '"bar' end - test :heredoc_start do + test "heredoc start" do assert_compile_fail SyntaxError, "nofile:1: heredoc start must be followed by a new line after \"\"\"", '"""bar\n"""' end - test :heredoc_terminator do + test "heredoc terminator" do assert_compile_fail TokenMissingError, "nofile:2: missing terminator: \"\"\" (for heredoc starting at line 1)", '"""\nbar' + assert_compile_fail SyntaxError, + "nofile:2: invalid location for heredoc terminator, please escape token or move it to its own line: \"\"\"", + '"""\nbar"""' end - test :unexpected_end do + test "unexpected end" do assert_compile_fail SyntaxError, "nofile:1: unexpected token: end", '1 end' end - test :syntax_error do + test "syntax error" do assert_compile_fail SyntaxError, "nofile:1: syntax error before: '.'", '+.foo' end - test :compile_error_on_op_ambiguity do - msg = "nofile:1: \"a -1\" looks like a function call but there is a variable named \"a\", " <> - "please use explicit parenthesis or even spaces" - assert_compile_fail CompileError, msg, 'a = 1; a -1' + test "syntax error before sigil" do + msg = fn x -> "nofile:1: syntax error before: sigil ~s starting with content '#{x}'" end + + assert_compile_fail SyntaxError, msg.("bar baz"), '~s(foo) ~s(bar baz)' + assert_compile_fail SyntaxError, msg.(""), '~s(foo) ~s()' + assert_compile_fail SyntaxError, msg.("bar "), '~s(foo) ~s(bar \#{:baz})' + assert_compile_fail SyntaxError, msg.(""), '~s(foo) ~s(\#{:bar} baz)' + end + test "op ambiguity" do max = 1 assert max == 1 assert (max 1, 2) == 2 end - test :syntax_error_on_parens_call do - msg = "nofile:1: unexpected parenthesis. If you are making a function call, do not " <> - "insert spaces in between the function name and the opening parentheses. " <> + test "syntax error with do" do + assert_compile_fail SyntaxError, + ~r/nofile:1: unexpected token "do"./, + 'if true, do\n' + + assert_compile_fail SyntaxError, + ~r/nofile:1: unexpected keyword "do:"./, + 'if true do:\n' + end + + test "syntax error on parens call" do + msg = "nofile:1: unexpected parentheses. If you are making a function call, do not " <> + "insert spaces between the function name and the opening parentheses. " <> "Syntax error before: '('" assert_compile_fail SyntaxError, msg, 'foo (hello, world)' - assert_compile_fail SyntaxError, msg, 'foo ()' - assert_compile_fail SyntaxError, msg, 'foo (), 1' end - test :syntax_error_on_nested_no_parens_call do - msg = "nofile:1: unexpected comma. Parentheses are required to solve ambiguity in " <> - "nested calls. Syntax error before: ','" + test "syntax error on nested no parens call" do + msg = "nofile:1: unexpected comma. Parentheses are required to solve ambiguity" assert_compile_fail SyntaxError, msg, '[foo 1, 2]' + assert_compile_fail SyntaxError, msg, '[foo bar 1, 2]' assert_compile_fail SyntaxError, msg, '[do: foo 1, 2]' assert_compile_fail SyntaxError, msg, 'foo(do: bar 1, 2)' assert_compile_fail SyntaxError, msg, '{foo 1, 2}' + assert_compile_fail SyntaxError, msg, '{foo bar 1, 2}' assert_compile_fail SyntaxError, msg, 'foo 1, foo 2, 3' + assert_compile_fail SyntaxError, msg, 'foo 1, @bar 3, 4' + assert_compile_fail SyntaxError, msg, 'foo 1, 2 + bar 3, 4' assert_compile_fail SyntaxError, msg, 'foo(1, foo 2, 3)' assert is_list List.flatten [1] assert is_list Enum.reverse [3, 2, 1], [4, 5, 6] assert is_list(Enum.reverse [3, 2, 1], [4, 5, 6]) + assert false || is_list Enum.reverse [3, 2, 1], [4, 5, 6] + assert [List.flatten List.flatten [1]] == [[1]] + + interpret = fn x -> Macro.to_string Code.string_to_quoted! x end + assert interpret.("f 1 + g h 2, 3") == "f(1 + g(h(2, 3)))" + assert interpret.("assert [] = TestRepo.all from p in Post, where: p.title in ^[]") == + "assert([] = TestRepo.all(from(p in Post, where: p.title() in ^[])))" end - test :syntax_error_with_no_token do + test "syntax error on atom dot alias" do + msg = "nofile:1: atom cannot be followed by an alias. If the '.' was meant to be " <> + "part of the atom's name, the atom name must be quoted. Syntax error before: '.'" + + assert_compile_fail SyntaxError, msg, ':foo.Bar' + assert_compile_fail SyntaxError, msg, ':"foo".Bar' + end + + test "syntax error with no token" do assert_compile_fail TokenMissingError, "nofile:1: missing terminator: ) (for \"(\" starting at line 1)", 'case 1 (' end - test :clause_with_defaults do + test "clause with defaults" do assert_compile_fail CompileError, - "nofile:3: def hello/1 has default values and multiple clauses, " <> - "define a function head with the defaults", + "nofile:3: definitions with multiple clauses and default values require a header", ~C''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.ClauseWithDefaults1 do def hello(arg \\ 0), do: nil def hello(arg \\ 1), do: nil end ''' - end - test :invalid_match_pattern do assert_compile_fail CompileError, - "nofile:2: invalid expression in match", - ''' - case true do - true && true -> true - end - ''' + "nofile:2: undefined function foo/0", + ~C''' + defmodule Kernel.ErrorsTest.ClauseWithDefaults3 do + def hello(foo, bar \\ foo()) + def hello(foo, bar), do: foo + bar + end + ''' end - test :different_defs_with_defaults do + test "different defs with defaults" do assert_compile_fail CompileError, "nofile:3: def hello/3 defaults conflicts with def hello/2", ~C''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.DifferentDefsWithDefaults1 do def hello(a, b \\ nil), do: a + b def hello(a, b \\ nil, c \\ nil), do: a + b + c end @@ -162,42 +243,24 @@ defmodule Kernel.ErrorsTest do assert_compile_fail CompileError, "nofile:3: def hello/2 conflicts with defaults from def hello/3", ~C''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.DifferentDefsWithDefaults2 do def hello(a, b \\ nil, c \\ nil), do: a + b + c def hello(a, b \\ nil), do: a + b end ''' end - test :bad_form do + test "bad form" do assert_compile_fail CompileError, - "nofile:2: function bar/0 undefined", + "nofile:2: undefined function bar/0", ''' - defmodule ErrorsTest do - def foo, do: bar + defmodule Kernel.ErrorsTest.BadForm do + def foo, do: bar() end ''' end - test :unbound_var do - assert_compile_fail CompileError, - "nofile:1: unbound variable ^x", - '^x = 1' - end - - test :unbound_not_match do - assert_compile_fail CompileError, - "nofile:1: cannot use ^x outside of match clauses", - '^x' - end - - test :unbound_expr do - assert_compile_fail CompileError, - "nofile:1: invalid argument for unary operator ^, expected an existing variable, got: ^x(1)", - '^x(1) = 1' - end - - test :literal_on_map_and_struct do + test "literal on map and struct" do assert_compile_fail SyntaxError, "nofile:1: syntax error before: '}'", '%{{:a, :b}}' @@ -205,44 +268,43 @@ defmodule Kernel.ErrorsTest do assert_compile_fail SyntaxError, "nofile:1: syntax error before: '{'", '%{:a, :b}{a: :b}' + + assert_compile_fail CompileError, + "nofile:1: expected key-value pairs in a map, got: put_in(foo.bar().baz(), nil)", + 'foo = 1; %{put_in(foo.bar.baz, nil), :bar}' end - test :struct_fields_on_defstruct do + test "struct fields on defstruct" do assert_compile_fail ArgumentError, "struct field names must be atoms, got: 1", ''' - defmodule TZ do + defmodule Kernel.ErrorsTest.StructFieldsOnDefstruct do defstruct [1, 2, 3] end ''' end - test :struct_access_on_body do + test "struct access on body" do assert_compile_fail CompileError, - "nofile:3: cannot access struct TZ in body of the module that defines it " <> - "as the struct fields are not yet accessible", + "nofile:3: cannot access struct Kernel.ErrorsTest.StructAccessOnBody, " <> + "the struct was not yet defined or the struct " <> + "is being accessed in the same context that defines it", ''' - defmodule TZ do + defmodule Kernel.ErrorsTest.StructAccessOnBody do defstruct %{name: "Brasilia"} - %TZ{} + %Kernel.ErrorsTest.StructAccessOnBody{} end ''' end - test :unbound_map_key_var do - assert_compile_fail CompileError, - "nofile:1: illegal use of variable x in map key", - '%{x => 1} = %{}' - + test "struct errors" do assert_compile_fail CompileError, - "nofile:1: illegal use of variable x in map key", - '%{x = 1 => 1}' - end + "nofile:1: BadStruct.__struct__/1 is undefined, cannot expand struct BadStruct", + '%BadStruct{}' - test :struct_errors do assert_compile_fail CompileError, "nofile:1: BadStruct.__struct__/0 is undefined, cannot expand struct BadStruct", - '%BadStruct{}' + '%BadStruct{} = %{}' defmodule BadStruct do def __struct__ do @@ -252,88 +314,113 @@ defmodule Kernel.ErrorsTest do assert_compile_fail CompileError, "nofile:1: expected Kernel.ErrorsTest.BadStruct.__struct__/0 to return a map, got: []", - '%#{BadStruct}{}' + '%#{BadStruct}{} = %{}' defmodule GoodStruct do - def __struct__ do - %{name: "josé"} - end + defstruct name: "john" end + assert_compile_fail KeyError, + "key :age not found in: %Kernel.ErrorsTest.GoodStruct{name: \"john\"}", + '%#{GoodStruct}{age: 27}' + assert_compile_fail CompileError, "nofile:1: unknown key :age for struct Kernel.ErrorsTest.GoodStruct", - '%#{GoodStruct}{age: 27}' + '%#{GoodStruct}{age: 27} = %{}' end - test :name_for_defmodule do + test "name for defmodule" do assert_compile_fail CompileError, "nofile:1: invalid module name: 3", 'defmodule 1 + 2, do: 3' end - test :invalid_unquote do + test "invalid unquote" do assert_compile_fail CompileError, "nofile:1: unquote called outside quote", 'unquote 1' end - test :invalid_quote_args do - assert_compile_fail CompileError, - "nofile:1: invalid arguments for quote", - 'quote 1' + test "invalid unquote splicing in oneliners" do + assert_compile_fail ArgumentError, + "unquote_splicing only works inside arguments and block contexts, " <> + "wrap it in parens if you want it to work with one-liners", + ''' + defmodule Kernel.ErrorsTest.InvalidUnquoteSplicingInOneliners do + defmacro oneliner2 do + quote do: unquote_splicing 1 + end + + def callme do + oneliner2 + end + end + ''' end - test :invalid_calls do + test "undefined non-local function" do assert_compile_fail CompileError, - "nofile:1: invalid call foo(1)(2)", - 'foo(1)(2)' + "nofile:1: undefined function call/2", + 'call foo, do: :foo' + end - assert_compile_fail CompileError, - "nofile:1: invalid call 1.foo()", - '1.foo' + test "invalid attribute" do + msg = ~r"cannot inject attribute @foo into function/macro because cannot escape " + assert_raise ArgumentError, msg, fn -> + defmodule InvalidAttribute do + @foo fn -> nil end + def bar, do: @foo + end + end end - test :unhandled_stab do - assert_compile_fail CompileError, - "nofile:3: unhandled operator ->", - ''' - defmodule Mod do - def fun do - casea foo, do: (bar -> baz) - end + test "invalid struct field value" do + msg = ~r"invalid value for struct field baz, cannot escape " + assert_raise ArgumentError, msg, fn -> + defmodule InvaliadStructFieldValue do + defstruct baz: fn -> nil end end - ''' + end end - test :undefined_non_local_function do - assert_compile_fail CompileError, - "nofile:1: undefined function casea/2", - 'casea foo, do: 1' + test "match attribute in module" do + msg = "invalid write attribute syntax, you probably meant to use: @foo expression" + assert_raise ArgumentError, msg, fn -> + defmodule MatchAttributeInModule do + @foo = 42 + end + end end - test :invalid_fn_args do + test "invalid fn args" do assert_compile_fail TokenMissingError, "nofile:1: missing terminator: end (for \"fn\" starting at line 1)", 'fn 1' end - test :function_local_conflict do + test "invalid escape" do + assert_compile_fail TokenMissingError, + "nofile:1: invalid escape \\ at end of file", + '1 \\' + end + + test "function local conflict" do assert_compile_fail CompileError, - "nofile:1: imported Kernel.&&/2 conflicts with local function", + "nofile:3: imported Kernel.&&/2 conflicts with local function", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.FunctionLocalConflict do def other, do: 1 && 2 def _ && _, do: :error end ''' end - test :macro_local_conflict do + test "macro local conflict" do assert_compile_fail CompileError, "nofile:6: call to local macro &&/2 conflicts with imported Kernel.&&/2, " <> "please rename the local macro or remove the conflicting import", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.MacroLocalConflict do def hello, do: 1 || 2 defmacro _ || _, do: :ok @@ -343,34 +430,35 @@ defmodule Kernel.ErrorsTest do ''' end - test :macro_with_undefined_local do + test "macro with undefined local" do assert_compile_fail UndefinedFunctionError, - "undefined function: ErrorsTest.unknown/1", + "function Kernel.ErrorsTest.MacroWithUndefinedLocal.unknown/1" <> + " is undefined (function unknown/1 is not available)", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.MacroWithUndefinedLocal do defmacrop bar, do: unknown(1) def baz, do: bar() end ''' end - test :private_macro do + test "private macro" do assert_compile_fail UndefinedFunctionError, - "undefined function: ErrorsTest.foo/0", + "function Kernel.ErrorsTest.PrivateMacro.foo/0 is undefined (function foo/0 is not available)", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.PrivateMacro do defmacrop foo, do: 1 defmacro bar, do: __MODULE__.foo - defmacro baz, do: bar + defmacro baz, do: bar() end ''' end - test :function_definition_with_alias do + test "function definition with alias" do assert_compile_fail CompileError, "nofile:2: function names should start with lowercase characters or underscore, invalid name Bar", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.FunctionDefinitionWithAlias do def Bar do :baz end @@ -378,324 +466,356 @@ defmodule Kernel.ErrorsTest do ''' end - test :function_import_conflict do + test "function import conflict" do assert_compile_fail CompileError, "nofile:3: function exit/1 imported from both :erlang and Kernel, call is ambiguous", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.FunctionImportConflict do import :erlang, warn: false def foo, do: exit(:test) end ''' end - test :import_invalid_macro do + test "unrequired macro" do assert_compile_fail CompileError, - "nofile:1: cannot import Kernel.invalid/1 because it doesn't exist", - 'import Kernel, only: [invalid: 1]' - end - - test :unrequired_macro do - assert_compile_fail SyntaxError, - "nofile:2: you must require Kernel.ErrorsTest.UnproperMacro before invoking " <> - "the macro Kernel.ErrorsTest.UnproperMacro.unproper/1 " + "nofile:2: you must require Kernel.ErrorsTest before invoking " <> + "the macro Kernel.ErrorsTest.hello/0", ''' - defmodule ErrorsTest do - Kernel.ErrorsTest.UnproperMacro.unproper([]) + defmodule Kernel.ErrorsTest.UnrequiredMacro do + Kernel.ErrorsTest.hello() end ''' end - test :def_defmacro_clause_change do + test "def defmacro clause change" do assert_compile_fail CompileError, "nofile:3: defmacro foo/1 already defined as def", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.DefDefmacroClauseChange do def foo(1), do: 1 defmacro foo(x), do: x end ''' end - test :internal_function_overridden do + test "def defp clause change from another file" do assert_compile_fail CompileError, - "nofile:1: function __info__/1 is internal and should not be overridden", + ~r"nofile:4: def hello/0 already defined as defp", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.DefDefmacroClauseChange do + require Kernel.ErrorsTest + defp hello, do: :world + Kernel.ErrorsTest.hello() + end + ''' + end + + test "internal function overridden" do + assert_compile_fail CompileError, + "nofile:2: cannot define def __info__/1 as it is automatically defined by Elixir", + ''' + defmodule Kernel.ErrorsTest.InternalFunctionOverridden do def __info__(_), do: [] end ''' end - test :no_macros do + test "no macros" do assert_compile_fail CompileError, "nofile:2: could not load macros from module :lists", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.NoMacros do import :lists, only: :macros end ''' end - test :invalid_macro do + test "invalid macro" do assert_compile_fail CompileError, "nofile: invalid quoted expression: {:foo, :bar, :baz, :bat}", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.InvalidMacro do defmacrop oops do {:foo, :bar, :baz, :bat} end - def test, do: oops + def test, do: oops() end ''' end - test :unloaded_module do + test "unloaded module" do assert_compile_fail CompileError, "nofile:1: module Certainly.Doesnt.Exist is not loaded and could not be found", 'import Certainly.Doesnt.Exist' end - test :scheduled_module do + test "module imported from the context it was defined in" do + assert_compile_fail CompileError, + ~r"nofile:4: module Kernel.ErrorsTest.ScheduledModule.Hygiene is not loaded but was defined.", + ''' + defmodule Kernel.ErrorsTest.ScheduledModule do + defmodule Hygiene do + end + import Kernel.ErrorsTest.ScheduledModule.Hygiene + end + ''' + end + + test "module imported from the same module" do assert_compile_fail CompileError, - "nofile:4: module ErrorsTest.Hygiene is not loaded but was defined. " <> - "This happens because you are trying to use a module in the same context it is defined. " <> - "Try defining the module outside the context that requires it.", + ~r"nofile:3: you are trying to use the module Kernel.ErrorsTest.ScheduledModule.Hygiene which is currently being defined", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.ScheduledModule do defmodule Hygiene do + import Kernel.ErrorsTest.ScheduledModule.Hygiene end - import ErrorsTest.Hygiene end ''' end - test :already_compiled_module do + test "already compiled module" do assert_compile_fail ArgumentError, - "could not call eval_quoted on module Record " <> - "because it was already compiled", + "could not call eval_quoted with argument Record " <> + "because the module is already compiled", 'Module.eval_quoted Record, quote(do: 1), [], file: __ENV__.file' end - test :interpolation_error do + test "doc attributes format" do + message = + "expected the moduledoc attribute to be {line, doc} (where \"doc\" is " <> + "a binary, a boolean, or nil), got: \"Other\"" + assert_raise ArgumentError, message, fn -> + defmodule DocAttributesFormat do + Module.put_attribute(__MODULE__, :moduledoc, "Other") + end + end + + message = "expected the moduledoc attribute to contain a binary, a boolean, or nil, got: :not_a_binary" + assert_raise ArgumentError, message, fn -> + defmodule AtSyntaxDocAttributesFormat do + @moduledoc :not_a_binary + end + end + end + + test "@on_load attribute format" do + message = "expected the @on_load attribute to be an atom or a {atom, 0} tuple, got: \"not an atom\"" + assert_raise ArgumentError, message, fn -> + defmodule BadOnLoadAttribute do + Module.put_attribute(__MODULE__, :on_load, "not an atom") + end + end + end + + test "interpolation error" do assert_compile_fail SyntaxError, - "nofile:1: \"do\" starting at line 1 is missing terminator \"end\". Unexpected token: )", + "nofile:1: \"do\" is missing terminator \"end\". unexpected token: \")\" at line 1", '"foo\#{case 1 do )}bar"' end - test :in_definition_module do + test "in definition module" do assert_compile_fail CompileError, - "nofile:1: cannot define module ErrorsTest because it is currently being defined in nofile:1", - 'defmodule ErrorsTest, do: (defmodule Elixir.ErrorsTest, do: true)' + "nofile:2: cannot define module Kernel.ErrorsTest.InDefinitionModule " <> + "because it is currently being defined in nofile:1", + ''' + defmodule Kernel.ErrorsTest.InDefinitionModule do + defmodule Elixir.Kernel.ErrorsTest.InDefinitionModule, do: true + end + ''' end - test :invalid_definition do + test "invalid definition" do assert_compile_fail CompileError, "nofile:1: invalid syntax in def 1.(hello)", - 'defmodule ErrorsTest, do: (def 1.(hello), do: true)' + 'defmodule Kernel.ErrorsTest.InvalidDefinition, do: (def 1.(hello), do: true)' end - test :duplicated_bitstring_size do + test "invalid pin in definition" do assert_compile_fail CompileError, - "nofile:1: duplicated size definition in bitstring", - '<<1 :: [size(12), size(13)]>>' + "nofile:1: cannot use ^hello on function/macro definition as there are no previous variables", + 'defmodule Kernel.ErrorsTest.InvalidDefinition, do: (def foo(^hello), do: :ok)' end - test :invalid_bitstring_specified do + test "invalid size in bitstrings" do assert_compile_fail CompileError, - "nofile:1: unknown bitstring specifier :atom", - '<<1 :: :atom>>' - - assert_compile_fail CompileError, - "nofile:1: unknown bitstring specifier unknown()", - '<<1 :: unknown>>' - - assert_compile_fail CompileError, - "nofile:1: unknown bitstring specifier another(12)", - '<<1 :: another(12)>>' - - assert_compile_fail CompileError, - "nofile:1: size in bitstring expects an integer or a variable as argument, got: :a", - '<<1 :: size(:a)>>' - - assert_compile_fail CompileError, - "nofile:1: unit in bitstring expects an integer as argument, got: :x", - '<<1 :: unit(:x)>>' + "nofile:1: cannot use ^x outside of match clauses", + 'x = 8; <> = <>' end - test :invalid_var! do - assert_compile_fail CompileError, - "nofile:1: expected var x to expand to an existing variable or be a part of a match", - 'var!(x)' - end + test "end of expression" do + # All valid examples + Code.eval_quoted ''' + 1; + 2; + 3 - test :invalid_alias do - assert_compile_fail CompileError, - "nofile:1: invalid value for keyword :as, expected an alias, got nested alias: Sample.Lists", - 'alias :lists, as: Sample.Lists' - end + (;) + (;1) + (1;) + (1; 2) - test :invalid_import_option do - assert_compile_fail CompileError, - "nofile:1: unsupported option :ops given to import", - 'import :lists, [ops: 1]' - end + fn -> 1; 2 end + fn -> ; end - test :invalid_rescue_clause do - assert_compile_fail CompileError, - "nofile:4: invalid rescue clause. The clause should match on an alias, a variable or be in the `var in [alias]` format", - 'try do\n1\nrescue\n%UndefinedFunctionError{arity: 1} -> false\nend' - end + if true do + ; + end - test :invalid_for_without_generators do - assert_compile_fail CompileError, - "nofile:1: for comprehensions must start with a generator", - 'for x, do: x' - end + try do + ; + catch + _, _ -> ; + after + ; + end + ''' - test :invalid_for_bit_generator do - assert_compile_fail CompileError, - "nofile:1: bitstring fields without size are not allowed in bitstring generators", - 'for << x :: binary <- "123" >>, do: x' - end + # All invalid examples + assert_compile_fail SyntaxError, + "nofile:1: syntax error before: ';'", + '1+;\n2' - test :unbound_cond do - assert_compile_fail CompileError, - "nofile:1: unbound variable _ inside cond. If you want the last clause to always match, " <> - "you probably meant to use: true ->", - 'cond do _ -> true end' + assert_compile_fail SyntaxError, + "nofile:1: syntax error before: ';'", + 'max(1, ;2)' end - test :fun_different_arities do - assert_compile_fail CompileError, - "nofile:1: cannot mix clauses with different arities in function definition", - 'fn x -> x; x, y -> x + y end' + test "new line error" do + assert_compile_fail SyntaxError, + "nofile:3: unexpectedly reached end of line. The current expression is invalid or incomplete", + 'if true do\n foo = [],\n baz\nend' end - test :new_line_error do + # As reported and discussed in + # https://github.com/elixir-lang/elixir/issues/4419. + test "characters literal are printed correctly in syntax errors" do assert_compile_fail SyntaxError, - "nofile:3: syntax error before: newline", - 'if true do\n foo = [],\n baz\nend' + "nofile:1: syntax error before: ?a", + ':ok ?a' + assert_compile_fail SyntaxError, + "nofile:1: syntax error before: ?\\s", + ':ok ?\\s' + assert_compile_fail SyntaxError, + "nofile:1: syntax error before: ?す" + ':ok ?す' end - test :invalid_var_or_function_on_guard do - assert_compile_fail CompileError, - "nofile:2: unknown variable something_that_does_not_exist or " <> - "cannot invoke function something_that_does_not_exist/0 inside guard", - ''' - case [] do - [] when something_that_does_not_exist == [] -> :ok - end - ''' + test "invalid \"fn do expr end\"" do + assert_compile_fail SyntaxError, + "nofile:1: unexpected token \"do\". Anonymous functions are written as:\n\n" <> + " fn pattern -> expression end\n\n" <> + "Syntax error before: do", + 'fn do :ok end' end - test :bodyless_function_with_guard do + test "bodyless function with guard" do assert_compile_fail CompileError, - "nofile:2: missing do keyword in def", + "nofile:2: missing :do option in \"def\"", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.BodyessFunctionWithGuard do def foo(n) when is_number(n) end ''' end - test :invalid_args_for_bodyless_clause do + test "invalid args for bodyless clause" do assert_compile_fail CompileError, - "nofile:2: can use only variables and \\\\ as arguments of bodyless clause", + "nofile:2: only variables and \\\\ are allowed as arguments in definition header.", ''' - defmodule ErrorsTest do - def foo(arg // nil) + defmodule Kernel.ErrorsTest.InvalidArgsForBodylessClause do + def foo(nil) def foo(_), do: :ok end ''' end - test :invalid_function_on_match do - assert_compile_fail CompileError, - "nofile:1: cannot invoke function something_that_does_not_exist/0 inside match", - 'case [] do; something_that_does_not_exist() -> :ok; end' - end - - test :invalid_remote_on_match do - assert_compile_fail CompileError, - "nofile:1: cannot invoke remote function Hello.something_that_does_not_exist/0 inside match", - 'case [] do; Hello.something_that_does_not_exist() -> :ok; end' - end - - test :invalid_remote_on_guard do - assert_compile_fail CompileError, - "nofile:1: cannot invoke remote function Hello.something_that_does_not_exist/0 inside guard", - 'case [] do; [] when Hello.something_that_does_not_exist == [] -> :ok; end' - end - - test :typespec_errors do + test "typespec errors" do assert_compile_fail CompileError, "nofile:2: type foo() undefined", ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.TypespecErrors1 do @type omg :: foo end ''' - assert_compile_fail CompileError, - "nofile:2: spec for undefined function ErrorsTest.omg/0", + message = "nofile:2: spec for undefined function omg/0" + assert_compile_fail CompileError, message, ''' - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.TypespecErrors2 do @spec omg :: atom end ''' end - test :bad_unquoting do + test "bad multi-call" do assert_compile_fail CompileError, - "nofile: invalid quoted expression: {:foo, 0, 1}", - ''' - defmodule ErrorsTest do - def range(unquote({:foo, 0, 1})), do: :ok - end - ''' + "nofile:1: invalid argument for alias, expected a compile time atom or alias, got: 42", + 'alias IO.{ANSI, 42}' + + assert_compile_fail CompileError, + "nofile:1: :as option is not supported by multi-alias call", + 'alias Elixir.{Map}, as: Dict' + + assert_compile_fail UndefinedFunctionError, + "function List.\"{}\"/1 is undefined or private", + '[List.{Chars}, "one"]' end - test :macros_error_stacktrace do - assert [{:erlang, :+, [1, :foo], _}, {ErrorsTest, :sample, 1, _}|_] = + test "macros error stacktrace" do + assert [{:erlang, :+, [1, :foo], _}, + {Kernel.ErrorsTest.MacrosErrorStacktrace, :sample, 1, _} | _] = rescue_stacktrace(""" - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.MacrosErrorStacktrace do defmacro sample(num), do: num + :foo def other, do: sample(1) end """) end - test :macros_function_clause_stacktrace do - assert [{__MODULE__, :sample, 1, _}|_] = + test "macros function clause stacktrace" do + assert [{__MODULE__, :sample, 1, _} | _] = rescue_stacktrace(""" - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.MacrosFunctionClauseStacktrace do import Kernel.ErrorsTest sample(1) end """) end - test :macros_interpreted_function_clause_stacktrace do - assert [{ErrorsTest, :sample, 1, _}|_] = + test "macros interpreted function clause stacktrace" do + assert [{Kernel.ErrorsTest.MacrosInterpretedFunctionClauseStacktrace, :sample, 1, _} | _] = rescue_stacktrace(""" - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.MacrosInterpretedFunctionClauseStacktrace do defmacro sample(0), do: 0 def other, do: sample(1) end """) end - test :macros_compiled_callback do - assert [{Kernel.ErrorsTest, :__before_compile__, [%Macro.Env{module: ErrorsTest}], _}|_] = + test "macros compiled callback" do + assert [{Kernel.ErrorsTest, :__before_compile__, [%Macro.Env{module: Kernel.ErrorsTest.MacrosCompiledCallback}], _} | _] = rescue_stacktrace(""" - defmodule ErrorsTest do + defmodule Kernel.ErrorsTest.MacrosCompiledCallback do Module.put_attribute(__MODULE__, :before_compile, Kernel.ErrorsTest) end """) end + test "failed remote call stacktrace includes file/line info" do + try do + bad_remote_call(1) + rescue + ArgumentError -> + stack = System.stacktrace + assert [{:erlang, :apply, [1, :foo, []], []}, + {__MODULE__, :bad_remote_call, 1, [file: _, line: _]} | _] = stack + end + end + + defp bad_remote_call(x), do: x.foo + defmacro sample(0), do: 0 defmacro before_compile(_) do @@ -704,14 +824,40 @@ defmodule Kernel.ErrorsTest do ## Helpers + defp assert_compile_fail(given_exception, string) do + case format_rescue(string) do + {^given_exception, _, _} -> :ok + {exception, _, _} -> + raise ExUnit.AssertionError, + left: inspect(exception), + right: inspect(given_exception), + message: "Expected match" + end + end + + defp assert_compile_fail(given_exception, given_message, string) do + {exception, message, _} = format_rescue(string) + + unless exception == given_exception and message =~ given_message do + raise ExUnit.AssertionError, + left: "#{inspect exception}[message: #{inspect message}]", + right: "#{inspect given_exception}[message: #{inspect given_message}]", + message: "Expected match" + end + end + defp rescue_stacktrace(expr) do + expr |> format_rescue() |> elem(2) + end + + defp format_rescue(expr) do result = try do - :elixir.eval(to_char_list(expr), []) + :elixir.eval(to_charlist(expr), []) nil rescue - _ -> System.stacktrace + error -> {error.__struct__, Exception.message(error), System.stacktrace} end - result || raise(ExUnit.AssertionError, message: "Expected function given to rescue_stacktrace to fail") + result || flunk("Expected expression to fail") end end diff --git a/lib/elixir/test/elixir/kernel/expansion_test.exs b/lib/elixir/test/elixir/kernel/expansion_test.exs index ca6048bd386..0441f083bbc 100644 --- a/lib/elixir/test/elixir/kernel/expansion_test.exs +++ b/lib/elixir/test/elixir/kernel/expansion_test.exs @@ -2,443 +2,1173 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.ExpansionTarget do defmacro seventeen, do: 17 + defmacro bar, do: "bar" end defmodule Kernel.ExpansionTest do - use ExUnit.Case, async: true + use ExUnit.Case, async: false - ## __block__ + describe "__block__" do + test "expands to nil when empty" do + assert expand(quote do: __block__()) == nil + end - test "__block__: expands to nil when empty" do - assert expand(quote do: __block__()) == nil - end + test "expands to argument when arity is 1" do + assert expand(quote do: __block__(1)) == 1 + end - test "__block__: expands to argument when arity is 1" do - assert expand(quote do: __block__(1)) == 1 - end + test "is recursive to argument when arity is 1" do + assert expand(quote do: __block__(_ = 1, __block__(2))) == quote do: __block__(_ = 1, 2) + end - test "__block__: is recursive to argument when arity is 1" do - assert expand(quote do: __block__(1, __block__(2))) == quote do: __block__(1, 2) + test "accumulates vars" do + assert expand(quote(do: (a = 1; a))) == quote do: (a = 1; a) + end end - test "__block__: accumulates vars" do - assert expand(quote(do: (a = 1; a))) == quote do: (a = 1; a) - end + describe "alias" do + test "expand args, defines alias and returns itself" do + alias true, as: True - ## alias + input = quote do: (alias :hello, as: World, warn: True) + {output, env} = expand_env(input, __ENV__) - test "alias: expand args, defines alias and returns itself" do - alias true, as: True + assert output == :hello + assert env.aliases == [{:"Elixir.True", true}, {:"Elixir.World", :hello}] + end - input = quote do: (alias :hello, as: World, warn: True) - {output, env} = expand_env(input, __ENV__) + test "invalid alias" do + assert_raise CompileError, ~r"invalid value for option :as, expected a simple alias, got nested alias: Sample.Lists", fn -> + expand(quote do: (alias :lists, as: Sample.Lists)) + end - assert output == quote do: (alias :hello, as: :"Elixir.World", warn: true) - assert env.aliases == [{:"Elixir.True", true}, {:"Elixir.World", :hello}] - end + assert_raise CompileError, ~r"invalid argument for alias, expected a compile time atom or alias, got: 1 \+ 2", fn -> + expand(quote do: (alias 1 + 2)) + end - ## __aliases__ + assert_raise CompileError, ~r"invalid value for option :as, expected an alias, got: :foobar", fn -> + expand(quote do: (alias :lists, as: :foobar)) + end + end - test "__aliases__: expands even if no alias" do - assert expand(quote do: World) == :"Elixir.World" - assert expand(quote do: Elixir.World) == :"Elixir.World" - end + test "invalid expansion" do + assert_raise CompileError, ~r"invalid alias: \"foo\.Foo\"", fn -> + expand(quote do: (foo = :foo; foo.Foo)) + end + end - test "__aliases__: expands with alias" do - alias Hello, as: World - assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Hello" - end + test "raises if :as is passed to multi-alias aliases" do + assert_raise CompileError, ~r":as option is not supported by multi-alias call", fn -> + expand(quote do: (alias Foo.{Bar, Baz}, as: BarBaz)) + end + end - test "__aliases__: expands with alias is recursive" do - alias Source, as: Hello - alias Hello, as: World - assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Source" + test "invalid options" do + assert_raise CompileError, ~r"unsupported option :ops given to alias", fn -> + expand(quote do: (alias Foo, ops: 1)) + end + end end - test "__aliases__: expands to elixir_aliases on runtime" do - assert expand(quote do: hello.World) == - quote do: :elixir_aliases.concat([hello(), :World]) - end + describe "__aliases__" do + test "expands even if no alias" do + assert expand(quote do: World) == :"Elixir.World" + assert expand(quote do: Elixir.World) == :"Elixir.World" + end - ## = + test "expands with alias" do + alias Hello, as: World + assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Hello" + end - test "=: sets context to match" do - assert expand(quote do: __ENV__.context = :match) == quote do: :match = :match + test "expands with alias is recursive" do + alias Source, as: Hello + alias Hello, as: World + assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Source" + end end - test "=: defines vars" do - {output, env} = expand_env(quote(do: a = 1), __ENV__) - assert output == quote(do: a = 1) - assert {:a, __MODULE__} in env.vars - end + describe "import" do + test "raises on invalid macro" do + assert_raise CompileError, + ~r"cannot import Kernel.invalid/1 because it is undefined or private", + fn -> expand(quote do: (import Kernel, only: [invalid: 1])) end + end - test "=: does not carry rhs imports" do - assert expand(quote(do: flatten([1,2,3]) = import List)) == - quote(do: flatten([1,2,3]) = import :"Elixir.List", []) - end + test "raises on invalid options" do + assert_raise CompileError, + ~r"invalid :only option for import, expected a keyword list with integer values", + fn -> expand(quote do: (import Kernel, only: [invalid: nil])) end - test "=: does not define _" do - {output, env} = expand_env(quote(do: _ = 1), __ENV__) - assert output == quote(do: _ = 1) - assert env.vars == [] - end + assert_raise CompileError, + ~r"invalid :except option for import, expected a keyword list with integer values", + fn -> expand(quote do: (import Kernel, except: [invalid: nil])) end - ## Pseudo vars + assert_raise CompileError, + ~r/invalid options for import, expected a keyword list, got: "invalid_options"/, + fn -> expand(quote do: (import Kernel, "invalid_options")) end + end - test "__MODULE__" do - assert expand(quote do: __MODULE__) == __MODULE__ - end + test "raises on conflicting options" do + assert_raise CompileError, + ~r":only and :except can only be given together to import when :only is either :functions or :macros", + fn -> expand(quote do: (import Kernel, only: [], except: [])) end + end - test "__DIR__" do - assert expand(quote do: __DIR__) == __DIR__ - end + test "invalid import option" do + assert_raise CompileError, + ~r"unsupported option :ops given to import", + fn -> expand(quote do: (import :lists, [ops: 1])) end + end - test "__CALLER__" do - assert expand(quote do: __CALLER__) == quote do: __CALLER__ + test "raises for non-compile-time module" do + assert_raise CompileError, ~r"invalid argument for import, .*, got: {:a, :tuple}", fn -> + expand(quote do: (import {:a, :tuple})) + end + end end - test "__ENV__" do - env = %{__ENV__ | line: 0} - assert expand_env(quote(do: __ENV__), env) == - {{:%{}, [], Map.to_list(env)}, env} - end + describe "require" do + test "raises for non-compile-time module" do + assert_raise CompileError, ~r"invalid argument for require, .*, got: {:a, :tuple}", fn -> + expand(quote do: (require {:a, :tuple})) + end + end - test "__ENV__.accessor" do - env = %{__ENV__ | line: 0} - assert expand_env(quote(do: __ENV__.file), env) == {__ENV__.file, env} - assert expand_env(quote(do: __ENV__.unknown), env) == - {quote(do: unquote({:%{}, [], Map.to_list(env)}).unknown), env} + test "invalid options" do + assert_raise CompileError, ~r"unsupported option :ops given to require", fn -> + expand(quote do: (require Foo, ops: 1)) + end + end end - ## Super + describe "=" do + test "sets context to match" do + assert expand(quote do: __ENV__.context = :match) == quote do: :match = :match + end + + test "defines vars" do + {output, env} = expand_env(quote(do: a = 1), __ENV__) + assert output == quote(do: a = 1) + assert {:a, __MODULE__} in env.vars + end - test "super: expand args" do - assert expand(quote do: super(a, b)) == quote do: super(a(), b()) + test "does not define _" do + {output, env} = expand_env(quote(do: _ = 1), __ENV__) + assert output == quote(do: _ = 1) + assert env.vars == [] + end end - ## Vars + describe "environment macros" do + test "__MODULE__" do + assert expand(quote do: __MODULE__) == __MODULE__ + end - test "vars: expand to local call" do - {output, env} = expand_env(quote(do: a), __ENV__) - assert output == quote(do: a()) - assert env.vars == [] - end + test "__DIR__" do + assert expand(quote do: __DIR__) == __DIR__ + end - test "vars: forces variable to exist" do - assert expand(quote do: (var!(a) = 1; var!(a))) + test "__CALLER__" do + assert expand(quote do: __CALLER__) == quote do: __CALLER__ + end - message = ~r"expected var a to expand to an existing variable or be a part of a match" - assert_raise CompileError, message, fn -> expand(quote do: var!(a)) end + test "__ENV__" do + env = %{__ENV__ | line: 0} + assert expand_env(quote(do: __ENV__), env) == + {{:%{}, [], Map.to_list(env)}, env} + end - message = ~r"expected var a \(context Unknown\) to expand to an existing variable or be a part of a match" - assert_raise CompileError, message, fn -> expand(quote do: var!(a, Unknown)) end + test "__ENV__.accessor" do + env = %{__ENV__ | line: 0} + assert expand_env(quote(do: __ENV__.file), env) == {__ENV__.file, env} + assert expand_env(quote(do: __ENV__.unknown), env) == + {quote(do: unquote({:%{}, [], Map.to_list(env)}).unknown), env} + end end - test "^: expands args" do - assert expand(quote do: ^a = 1) == quote do: ^a = 1 - end + describe "vars" do + test "expand to local call" do + {output, env} = expand_env(quote(do: a), __ENV__) + assert output == quote(do: a()) + assert env.vars == [] + end + + test "forces variable to exist" do + assert expand(quote do: (var!(a) = 1; var!(a))) - test "^: raises outside match" do - assert_raise CompileError, ~r"cannot use \^a outside of match clauses", fn -> - expand(quote do: ^a) + message = ~r"expected variable \"a\" to expand to an existing variable or be part of a match" + assert_raise CompileError, message, fn -> expand(quote do: var!(a)) end + + message = ~r"expected variable \"a\" \(context Unknown\) to expand to an existing variable or be part of a match" + assert_raise CompileError, message, fn -> expand(quote do: var!(a, Unknown)) end + end + + test "raises for _ used outside of a match" do + assert_raise CompileError, ~r"unbound variable _", fn -> + expand(quote do: {1, 2, _}) + end end end - test "^: raises without var" do - assert_raise CompileError, ~r"invalid argument for unary operator \^, expected an existing variable, got: \^1", fn -> - expand(quote do: ^1 = 1) + describe "^" do + test "expands args" do + assert expand(quote do: (a = 1; ^a = 1)) == quote do: (a = 1; ^a = 1) + end + + test "raises outside match" do + assert_raise CompileError, ~r"cannot use \^a outside of match clauses", fn -> + expand(quote do: ^a) + end + end + + test "raises without var" do + assert_raise CompileError, ~r"invalid argument for unary operator \^, expected an existing variable, got: \^1", fn -> + expand(quote do: ^1 = 1) + end + end + + test "raises when the var is undefined" do + assert_raise CompileError, ~r"unbound variable \^foo", fn -> + expand(quote do: ^foo = :foo) + end end end - ## Locals + describe "locals" do + test "expands to remote calls" do + assert {{:., _, [Kernel, :=~]}, _, [{:a, _, []}, {:b, _, []}]} = + expand(quote do: a =~ b) + end + + test "in matches" do + assert_raise CompileError, ~r"cannot invoke local foo/1 inside match, called as: foo\(:bar\)", fn -> + expand(quote do: foo(:bar) = :bar) + end + end + + test "in guards" do + assert expand(quote(do: fn pid when :erlang.==(pid, self) -> pid end)) |> clean_meta([:import, :context]) == + quote(do: fn pid when :erlang.==(pid, :erlang.self()) -> pid end) + + assert_raise CompileError, ~r"cannot invoke local foo/1 inside guard, called as: foo\(arg\)", fn -> + expand(quote do: fn arg when foo(arg) -> arg end) + end + end - test "locals: expands to remote calls" do - assert {{:., _, [Kernel, :=~]}, _, [{:a, _, []}, {:b, _, []}]} = - expand(quote do: a =~ b) + test "custom imports" do + assert expand(quote do: (import Kernel.ExpansionTarget; seventeen)) == + quote do: (:"Elixir.Kernel.ExpansionTarget"; 17) + end end - test "locals: expands to configured local" do - assert expand_env(quote(do: a), %{__ENV__ | local: Hello}) |> elem(0) == - quote(do: :"Elixir.Hello".a()) + describe "tuples" do + test "expanded as arguments" do + assert expand(quote(do: {a = 1, a})) == quote do: {a = 1, a()} + assert expand(quote(do: {b, a = 1, a})) == quote do: {b(), a = 1, a()} + end end - test "locals: in guards" do - assert expand(quote(do: fn pid when :erlang.==(pid, self) -> pid end)) == - quote(do: fn pid when :erlang.==(pid, :erlang.self()) -> pid end) + describe "maps" do + test "expanded as arguments" do + assert expand(quote(do: %{a: a = 1, b: a})) == quote do: %{a: a = 1, b: a()} + end + + test "with variables on keys" do + assert expand(quote(do: %{x = 1 => 1})) == + quote(do: %{x = 1 => 1}) + + assert_raise CompileError, + ~r"illegal use of variable x inside map key match,", + fn -> expand(quote do: (%{x => 1} = %{})) end + + assert_raise CompileError, + ~r"unbound variable \^x", + fn -> expand(quote do: ({x, %{^x => 1}} = %{})) end + end + + test "expects key-value pairs" do + assert_raise CompileError, ~r"expected key-value pairs in a map, got: :foo", fn -> + expand(quote do: unquote({:%{}, [], [:foo]})) + end + end end - test "locals: custom imports" do - assert expand(quote do: (import Kernel.ExpansionTarget; seventeen)) == - quote do: (import :"Elixir.Kernel.ExpansionTarget", []; 17) + defmodule User do + defstruct name: "", age: 0 end - ## Tuples + describe "structs" do + test "expanded as arguments" do + assert expand(quote(do: %User{})) == + quote do: %:"Elixir.Kernel.ExpansionTest.User"{age: 0, name: ""} - test "tuples: expanded as arguments" do - assert expand(quote(do: {a = 1, a})) == quote do: {a = 1, a()} - assert expand(quote(do: {b, a = 1, a})) == quote do: {b(), a = 1, a()} - end + assert expand(quote(do: %User{name: "john doe"})) == + quote do: %:"Elixir.Kernel.ExpansionTest.User"{age: 0, name: "john doe"} + end - ## Maps & structs + test "expects atoms" do + expand(quote do: %unknown{a: 1} = x) - test "maps: expanded as arguments" do - assert expand(quote(do: %{a: a = 1, b: a})) == quote do: %{a: a = 1, b: a()} - end + assert_raise CompileError, ~r"expected struct name to be a compile time atom or alias", fn -> + expand(quote do: %unknown{a: 1}) + end - test "structs: expanded as arguments" do - assert expand(quote(do: %:elixir{a: a = 1, b: a})) == - quote do: %:elixir{a: a = 1, b: a()} + assert_raise CompileError, ~r"expected struct name to be a compile time atom or alias", fn -> + expand(quote do: %unquote(1){a: 1}) + end - assert expand(quote(do: %:"Elixir.Kernel"{a: a = 1, b: a})) == - quote do: %:"Elixir.Kernel"{a: a = 1, b: a()} - end + assert_raise CompileError, ~r"expected struct name in a match to be a compile time atom, alias or a variable", fn -> + expand(quote do: %unquote(1){a: 1} = x) + end + end + + test "update syntax" do + expand(quote do: %{%{a: 0} | a: 1}) - test "structs: expects atoms" do - assert_raise CompileError, ~r"expected struct name to be a compile time atom or alias", fn -> - expand(quote do: %unknown{a: 1}) + assert_raise CompileError, ~r"cannot use map/struct update syntax in match", fn -> + expand(quote do: %{%{a: 0} | a: 1} = %{}) + end end - end - ## quote + test "dynamic syntax expands to itself" do + assert expand(quote do: (%x{} = 1)) == quote do: (%x{} = 1) + end - test "quote: expanded to raw forms" do - assert expand(quote do: (quote do: hello)) == {:{}, [], [:hello, [], __MODULE__]} + test "unknown ^keys in structs" do + message = ~r"unknown key \^my_key for struct Kernel\.ExpansionTest\.User" + assert_raise CompileError, message, fn -> + expand(quote do + my_key = :my_key + %User{^my_key => :my_value} = %{} + end) + end + end end - ## Anonymous calls + describe "quote" do + test "expanded to raw forms" do + assert expand(quote do: (quote do: hello)) == {:{}, [], [:hello, [], __MODULE__]} + end - test "anonymous calls: expands base and args" do - assert expand(quote do: a.(b)) == quote do: a().(b()) - end + test "raises if the :context option is nil or not a compile-time module" do + assert_raise CompileError, ~r"invalid :context for quote, .*, got: :erlang\.self\(\)", fn -> + expand(quote do: (quote context: self(), do: :ok)) + end - test "anonymous calls: raises on atom base" do - assert_raise CompileError, ~r"invalid function call :foo.()", fn -> - expand(quote do: :foo.(a)) + assert_raise CompileError, ~r"invalid :context for quote, .*, got: nil", fn -> + expand(quote do: (quote context: nil, do: :ok)) + end end - end - ## Remote calls + test "raises for missing do" do + assert_raise CompileError, ~r"missing :do option in \"quote\"", fn -> + expand(quote do: (quote context: Foo)) + end + end - test "remote calls: expands to erlang" do - assert expand(quote do: Kernel.is_atom(a)) == quote do: :erlang.is_atom(a()) - end + test "raises for invalid arguments" do + assert_raise CompileError, ~r"invalid arguments for \"quote\"", fn -> + expand(quote do: (quote 1 + 1)) + end + end - test "remote calls: expands macros" do - assert expand(quote do: Kernel.ExpansionTest.thirteen) == 13 + test "raises unless its options are a keyword list" do + assert_raise CompileError, ~r"invalid options for quote, expected a keyword list", fn -> + expand(quote do: (quote :foo, do: :foo)) + end + end end - test "remote calls: expands receiver and args" do - assert expand(quote do: a.is_atom(b)) == quote do: a().is_atom(b()) - assert expand(quote do: (a = :foo).is_atom(a)) == quote do: (a = :foo).is_atom(a()) - end + describe "anonymous calls" do + test "expands base and args" do + assert expand(quote do: a.(b)) == quote do: a().(b()) + end - test "remote calls: modules must be required for macros" do - assert expand(quote do: (require Kernel.ExpansionTarget; Kernel.ExpansionTarget.seventeen)) == - quote do: (require :"Elixir.Kernel.ExpansionTarget", []; 17) + test "raises on atom base" do + assert_raise CompileError, ~r"invalid function call :foo.()", fn -> + expand(quote do: :foo.(a)) + end + end end - test "remote calls: raises when not required" do - msg = ~r"you must require Kernel\.ExpansionTarget before invoking the macro Kernel\.ExpansionTarget\.seventeen/0" - assert_raise CompileError, msg, fn -> - expand(quote do: Kernel.ExpansionTarget.seventeen) + describe "remotes" do + test "expands to Erlang" do + assert expand(quote do: Kernel.is_atom(a)) == quote do: :erlang.is_atom(a()) end - end - ## Comprehensions + test "expands macros" do + assert expand(quote do: Kernel.ExpansionTest.thirteen) == 13 + end - test "variables inside comprehensions do not leak with enums" do - assert expand(quote do: (for(a <- b, do: c = 1); c)) == - quote do: (for(a <- b(), do: c = 1); c()) - end + test "expands receiver and args" do + assert expand(quote do: a.is_atom(b)) == quote do: a().is_atom(b()) + assert expand(quote do: (a = :foo).is_atom(a)) == quote do: (a = :foo).is_atom(a()) + end - test "variables inside comprehensions do not leak with binaries" do - assert expand(quote do: (for(<>, do: c = 1); c)) == - quote do: (for(<< <> <- b() >>, do: c = 1); c()) - end + test "modules must be required for macros" do + assert expand(quote do: (require Kernel.ExpansionTarget; Kernel.ExpansionTarget.seventeen)) == + quote do: (:"Elixir.Kernel.ExpansionTarget"; 17) + end - test "variables inside filters are available in blocks" do - assert expand(quote do: for(a <- b, c = a, do: c)) == - quote do: (for(a <- b(), c = a, do: c)) - end + test "raises when not required" do + msg = ~r"you must require Kernel\.ExpansionTarget before invoking the macro Kernel\.ExpansionTarget\.seventeen/0" + assert_raise CompileError, msg, fn -> + expand(quote do: Kernel.ExpansionTarget.seventeen) + end + end - test "variables inside comprehensions options do not leak" do - assert expand(quote do: (for(a <- c = b, into: [], do: 1); c)) == - quote do: (for(a <- c = b(), do: 1, into: []); c()) + test "in matches" do + assert_raise CompileError, + ~r"cannot invoke remote function Hello.something_that_does_not_exist/0 inside match", + fn -> expand(quote(do: Hello.something_that_does_not_exist() = :foo)) end - assert expand(quote do: (for(a <- b, into: c = [], do: 1); c)) == - quote do: (for(a <- b(), do: 1, into: c = []); c()) - end + assert_raise CompileError, + ~r"cannot invoke remote function :erlang.make_ref/0 inside match", + fn -> expand(quote(do: make_ref() = :foo)) end + end - ## Capture + test "in guards" do + assert_raise CompileError, + ~r"cannot invoke remote function Hello.something_that_does_not_exist/1 inside guard", + fn -> expand(quote do: fn arg when Hello.something_that_does_not_exist(arg) -> arg end) end - test "&: keeps locals" do - assert expand(quote do: &unknown/2) == - {:&, [], [{:/, [], [{:unknown,[],nil}, 2]}]} - assert expand(quote do: &unknown(&1, &2)) == - {:&, [], [{:/, [], [{:unknown,[],nil}, 2]}]} + assert_raise CompileError, + ~r"cannot invoke remote function :erlang.make_ref/0 inside guard", + fn -> expand(quote do: fn arg when make_ref() -> arg end) end + end end - test "&: expands remotes" do - assert expand(quote do: &List.flatten/2) == - quote do: :erlang.make_fun(:"Elixir.List", :flatten, 2) + describe "comprehensions" do + test "variables do not leak with enums" do + assert expand(quote do: (for(a <- b, do: c = 1); c)) == + quote do: (for(a <- b(), do: c = 1); c()) + end - assert expand(quote do: &Kernel.is_atom/1) == - quote do: :erlang.make_fun(:erlang, :is_atom, 1) - end + test "variables do not leak with binaries" do + assert expand(quote do: (for(<>, do: c = 1); c)) == + quote do: (for(<< <> <- b() >>, do: c = 1); c()) + end - test "&: expands macros" do + test "variables inside filters are available in blocks" do + assert expand(quote do: for(a <- b, c = a, do: c)) == + quote do: (for(a <- b(), c = a, do: c)) + end - assert expand(quote do: (require Kernel.ExpansionTarget; &Kernel.ExpansionTarget.seventeen/0)) == - quote do: (require :"Elixir.Kernel.ExpansionTarget", []; fn -> 17 end) - end + test "variables inside options do not leak" do + assert expand(quote do: (for(a <- c = b, into: [], do: 1); c)) == + quote do: (for(a <- c = b(), do: 1, into: []); c()) - ## fn + assert expand(quote do: (for(a <- b, into: c = [], do: 1); c)) == + quote do: (for(a <- b(), do: 1, into: c = []); c()) + end - test "fn: expands each clause" do - assert expand(quote do: fn x -> x; _ -> x end) == - quote do: fn x -> x; _ -> x() end - end + test "must start with generators" do + assert_raise CompileError, ~r"for comprehensions must start with a generator", fn -> + expand(quote do: (for is_atom(:foo), do: :foo)) + end - test "fn: does not share lexical in between clauses" do - assert expand(quote do: fn 1 -> import List; 2 -> flatten([1,2,3]) end) == - quote do: fn 1 -> import :"Elixir.List", []; 2 -> flatten([1,2,3]) end - end + assert_raise CompileError, ~r"for comprehensions must start with a generator", fn -> + expand(quote do: (for do: :foo)) + end + end - test "fn: expands guards" do - assert expand(quote do: fn x when x when __ENV__.context -> true end) == - quote do: fn x when x when :guard -> true end - end + test "requires size on binary generators" do + assert_raise CompileError, + ~r"a binary field without size is only allowed at the end of a binary pattern", + fn -> expand(quote do: (for <>, do: x)) end + end - test "fn: does not leak vars" do - assert expand(quote do: (fn x -> x end; x)) == - quote do: (fn x -> x end; x()) - end + test "require do option" do + assert_raise CompileError, + ~r"missing :do option in \"for\"", + fn -> expand(quote do: for _ <- 1..2) end + end - ## Cond + test "raise error for unknown options" do + assert_raise CompileError, + ~r"unsupported option :else given to for", + fn -> expand(quote do: for _ <- 1..2, do: 1, else: 1) end - test "cond: expands each clause" do - assert expand_and_clean(quote do: (cond do x = 1 -> x; _ -> x end)) == - quote do: (cond do x = 1 -> x; _ -> x() end) + assert_raise CompileError, + ~r"unsupported option :other given to for", + fn -> expand(quote do: for _ <- 1..2, do: 1, other: 1) end + end end - test "cond: does not share lexical in between clauses" do - assert expand_and_clean(quote do: (cond do 1 -> import List; 2 -> flatten([1,2,3]) end)) == - quote do: (cond do 1 -> import :"Elixir.List", []; 2 -> flatten([1,2,3]) end) - end + describe "with" do + test "variables do not leak" do + input = quote(do: (with({foo} <- {bar}, do: baz = :ok); baz)) + other = Macro.var(:other, :elixir_with) + result = quote do + case {bar()} do + {foo} -> baz = :ok + unquote(other) -> unquote(other) + end + baz() + end - test "cond: does not leaks vars on head" do - assert expand_and_clean(quote do: (cond do x = 1 -> x; y = 2 -> y end; :erlang.+(x, y))) == - quote do: (cond do x = 1 -> x; y = 2 -> y end; :erlang.+(x(), y())) - end + assert input |> expand() |> clean_meta([:export_vars, :generated]) == result + end - test "cond: leaks vars" do - assert expand_and_clean(quote do: (cond do 1 -> x = 1; 2 -> y = 2 end; :erlang.+(x, y))) == - quote do: (cond do 1 -> x = 1; 2 -> y = 2 end; :erlang.+(x, y)) - end + test "variables are available in do option" do + input = quote(do: (with({foo} <- {bar}, do: baz = foo); baz)) + other = Macro.var(:other, :elixir_with) + result = quote do + case {bar()} do + {foo} -> baz = foo + unquote(other) -> unquote(other) + end + baz() + end + + assert input |> expand() |> clean_meta([:export_vars, :generated]) == result + end - ## Case + test "variables inside else do not leak" do + input = quote(do: (with({foo} <- {bar}, do: :ok, else: (baz -> baz)); baz)) + other = Macro.var(:other, :elixir_with) + return = Macro.var(:return, :elixir_with) + result = quote do + case(case {bar()} do + {foo} -> {:ok, :ok} + unquote(other) -> {:error, unquote(other)} + end) do + {:ok, unquote(return)} -> unquote(return) + {:error, baz} -> baz + {:error, unquote(other)} -> :erlang.error({:with_clause, unquote(other)}) + end + baz() + end + + assert input |> expand() |> clean_meta([:export_vars, :generated]) == result + end - test "case: expands each clause" do - assert expand_and_clean(quote do: (case w do x -> x; _ -> x end)) == - quote do: (case w() do x -> x; _ -> x() end) - end + test "fails if \"do\" is missing" do + assert_raise CompileError, ~r"missing :do option in \"with\"", fn -> + expand(quote do: with(_ <- true, [])) + end + end - test "case: does not share lexical in between clauses" do - assert expand_and_clean(quote do: (case w do 1 -> import List; 2 -> flatten([1,2,3]) end)) == - quote do: (case w() do 1 -> import :"Elixir.List", []; 2 -> flatten([1,2,3]) end) - end + test "fails on invalid else option" do + assert_raise CompileError, ~r"expected -> clauses for :else in \"with\"", fn -> + expand(quote(do: with(_ <- true, do: :ok, else: [:error]))) + end - test "case: expands guards" do - assert expand_and_clean(quote do: (case w do x when x when __ENV__.context -> true end)) == - quote do: (case w() do x when x when :guard -> true end) - end + assert_raise CompileError, ~r"expected -> clauses for :else in \"with\"", fn -> + expand(quote(do: with(_ <- true, do: :ok, else: :error))) + end + end - test "case: does not leaks vars on head" do - assert expand_and_clean(quote do: (case w do x -> x; y -> y end; :erlang.+(x, y))) == - quote do: (case w() do x -> x; y -> y end; :erlang.+(x(), y())) - end + test "fails for invalid options" do + # Only the required "do" is present alongside the unexpected option. + assert_raise CompileError, ~r"unexpected option :foo in \"with\"", fn -> + expand(quote do: with(_ <- true, foo: :bar, do: :ok)) + end - test "case: leaks vars" do - assert expand_and_clean(quote do: (case w do x -> x = x; y -> y = y end; :erlang.+(x, y))) == - quote do: (case w() do x -> x = x; y -> y = y end; :erlang.+(x, y)) + # More options are present alongside the unexpected option. + assert_raise CompileError, ~r"unexpected option :foo in \"with\"", fn -> + expand(quote do: with(_ <- true, do: :ok, else: (_ -> :ok), foo: :bar)) + end + end end - ## Receive + describe "&" do + test "keeps locals" do + assert expand(quote do: &unknown/2) == + {:&, [], [{:/, [], [{:unknown, [], nil}, 2]}]} + assert expand(quote do: &unknown(&1, &2)) == + {:&, [], [{:/, [], [{:unknown, [], nil}, 2]}]} + end - test "receive: expands each clause" do - assert expand_and_clean(quote do: (receive do x -> x; _ -> x end)) == - quote do: (receive do x -> x; _ -> x() end) - end + test "expands remotes" do + assert expand(quote do: &List.flatten/2) == + quote(do: &:"Elixir.List".flatten/2) |> clean_meta([:import, :context]) - test "receive: does not share lexical in between clauses" do - assert expand_and_clean(quote do: (receive do 1 -> import List; 2 -> flatten([1,2,3]) end)) == - quote do: (receive do 1 -> import :"Elixir.List", []; 2 -> flatten([1,2,3]) end) - end + assert expand(quote do: &Kernel.is_atom/1) == + quote(do: &:erlang.is_atom/1) |> clean_meta([:import, :context]) + end - test "receive: expands guards" do - assert expand_and_clean(quote do: (receive do x when x when __ENV__.context -> true end)) == - quote do: (receive do x when x when :guard -> true end) - end + test "expands macros" do + assert expand(quote do: (require Kernel.ExpansionTarget; &Kernel.ExpansionTarget.seventeen/0)) == + quote do: (:"Elixir.Kernel.ExpansionTarget"; fn -> 17 end) + end - test "receive: does not leaks clause vars" do - assert expand_and_clean(quote do: (receive do x -> x; y -> y end; :erlang.+(x, y))) == - quote do: (receive do x -> x; y -> y end; :erlang.+(x(), y())) - end + test "fails on non-continuous" do + assert_raise CompileError, + ~r"capture &0 is not allowed", + fn -> expand(quote do: &foo(&0)) end + assert_raise CompileError, + ~r"capture &2 cannot be defined without &1", + fn -> expand(quote do: &(&2)) end + assert_raise CompileError, + ~r"capture &255 cannot be defined without &1", + fn -> expand(quote do: &(&255)) end + end - test "receive: leaks vars" do - assert expand_and_clean(quote do: (receive do x -> x = x; y -> y = y end; :erlang.+(x, y))) == - quote do: (receive do x -> x = x; y -> y = y end; :erlang.+(x, y)) - end + test "fails on block" do + assert_raise CompileError, + ~r"invalid args for &, block expressions are not allowed, got: \(\n 1\n 2\n\)", + fn -> expand(quote do: &(1;2)) end + end - test "receive: leaks vars on after" do - assert expand_and_clean(quote do: (receive do x -> x = x after y -> y; w = y end; :erlang.+(x, w))) == - quote do: (receive do x -> x = x after y() -> y(); w = y() end; :erlang.+(x, w)) - end + test "fails on other types" do + assert_raise CompileError, + ~r"invalid args for &, expected an expression in the format of &Mod.fun/arity, &local/arity or a capture containing at least one argument as &1, got: :foo", + fn -> expand(quote do: &:foo) end + end - ## Try + test "fails on invalid arity" do + assert_raise CompileError, + ~r"invalid arity for &, expected a number between 0 and 255, got: 256", + fn -> expand(quote do: &Mod.fun/256) end + end - test "try: expands do" do - assert expand(quote do: (try do x = y end; x)) == - quote do: (try do x = y() end; x()) + test "fails when no captures" do + assert_raise CompileError, + ~r"invalid args for &, expected an expression in the format of &Mod.fun/arity, &local/arity or a capture containing at least one argument as &1, got: foo()", + fn -> expand(quote do: &foo()) end + end + + test "fails on nested capture" do + assert_raise CompileError, + ~r"nested captures via & are not allowed: &\(&1\)", + fn -> expand(quote do: &(& &1)) end + end + + test "fails on integers" do + assert_raise CompileError, + ~r"unhandled &1 outside of a capture", + fn -> expand(quote do: &1) end + end end - test "try: expands catch" do - assert expand(quote do: (try do x catch x, y -> z = :erlang.+(x, y) end; z)) == - quote do: (try do x() catch x, y -> z = :erlang.+(x, y) end; z()) + describe "fn" do + test "expands each clause" do + assert expand(quote do: fn x -> x; _ -> x end) == + quote do: fn x -> x; _ -> x() end + end + + test "does not share lexical scope between clauses" do + assert expand(quote do: fn 1 -> import List; 2 -> flatten([1, 2, 3]) end) == + quote do: fn 1 -> :"Elixir.List"; 2 -> flatten([1, 2, 3]) end + end + + test "expands guards" do + assert expand(quote do: fn x when x when __ENV__.context -> true end) == + quote do: fn x when x when :guard -> true end + end + + test "does not leak vars" do + assert expand(quote do: (fn x -> x end; x)) == + quote do: (fn x -> x end; x()) + end + + test "raises on mixed arities" do + assert_raise CompileError, ~r"cannot mix clauses with different arities in anonymous functions", fn -> + expand(quote do: (fn x -> x; x, y -> x + y end)) + end + end end - test "try: expands after" do - assert expand(quote do: (try do x after z = y end; z)) == - quote do: (try do x() after z = y() end; z()) + describe "cond" do + test "expands each clause" do + assert expand(quote do: (cond do x = 1 -> x; true -> x end)) == + quote do: (cond do x = 1 -> x; true -> x() end) + end + + test "does not share lexical scope between clauses" do + assert expand(quote do: (cond do 1 -> import List; 2 -> flatten([1, 2, 3]) end)) == + quote do: (cond do 1 -> :"Elixir.List"; 2 -> flatten([1, 2, 3]) end) + end + + test "does not leaks vars on head" do + assert expand(quote do: (cond do x = 1 -> x; y = 2 -> y end; :erlang.+(x, y))) == + quote do: (cond do x = 1 -> x; y = 2 -> y end; :erlang.+(x(), y())) + end + + test "leaks vars" do + assert expand(quote do: (cond do 1 -> x = 1; 2 -> y = 2 end; :erlang.+(x, y))) == + quote do: (cond do 1 -> x = 1; 2 -> y = 2 end; :erlang.+(x, y)) + end + + test "expects exactly one do" do + assert_raise CompileError, ~r"missing :do option in \"cond\"", fn -> + expand(quote do: (cond [])) + end + + assert_raise CompileError, ~r"duplicated :do clauses given for \"cond\"", fn -> + expand(quote(do: (cond do: (x -> x), do: (y -> y)))) + end + end + + test "expects clauses" do + assert_raise CompileError, ~r"expected -> clauses for :do in \"cond\"", fn -> + expand(quote do: (cond do: :ok)) + end + + assert_raise CompileError, ~r"expected -> clauses for :do in \"cond\"", fn -> + expand(quote do: (cond do: [:not, :clauses])) + end + end + + test "expects one argument in clauses" do + assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"cond\"", fn -> + expand(quote do: (cond do _, _ -> :ok end)) + end + end + + test "raises for invalid arguments" do + assert_raise CompileError, ~r"invalid arguments for \"cond\"", fn -> + expand(quote do: (cond :foo)) + end + end + + test "raises with invalid options" do + assert_raise CompileError, ~r"unexpected option :foo in \"cond\"", fn -> + expand(quote do: (cond do: (1 -> 1), foo: :bar)) + end + end + + test "raises for _ in clauses" do + assert_raise CompileError, ~r"unbound variable _ inside \"cond\"\. If you want the last clause", fn -> + expand(quote(do: (cond do x -> x; _ -> :raise end))) + end + end end - test "try: expands else" do - assert expand(quote do: (try do x else z -> z end; z)) == - quote do: (try do x() else z -> z end; z()) + describe "case" do + test "expands each clause" do + assert expand(quote do: (case w do x -> x; _ -> x end)) == + quote do: (case w() do x -> x; _ -> x() end) + end + + test "does not share lexical scope between clauses" do + assert expand(quote do: (case w do 1 -> import List; 2 -> flatten([1, 2, 3]) end)) == + quote do: (case w() do 1 -> :"Elixir.List"; 2 -> flatten([1, 2, 3]) end) + end + + test "expands guards" do + assert expand(quote do: (case w do x when x when __ENV__.context -> true end)) == + quote do: (case w() do x when x when :guard -> true end) + end + + test "does not leaks vars on head" do + assert expand(quote do: (case w do x -> x; y -> y end; :erlang.+(x, y))) == + quote do: (case w() do x -> x; y -> y end; :erlang.+(x(), y())) + end + + test "leaks vars" do + assert expand(quote do: (case w do x -> x = x; y -> y = y end; :erlang.+(x, y))) == + quote do: (case w() do x -> x = x; y -> y = y end; :erlang.+(x, y)) + end + + test "expects exactly one do" do + assert_raise CompileError, ~r"missing :do option in \"case\"", fn -> + expand(quote(do: (case e, []))) + end + + assert_raise CompileError, ~r"duplicated :do clauses given for \"case\"", fn -> + expand(quote(do: (case e, do: (x -> x), do: (y -> y)))) + end + end + + test "expects clauses" do + assert_raise CompileError, ~r"expected -> clauses for :do in \"case\"", fn -> + expand(quote do: (case e do x end)) + end + + assert_raise CompileError, ~r"expected -> clauses for :do in \"case\"", fn -> + expand(quote do: (case e do [:not, :clauses] end)) + end + end + + test "expects exactly one argument in clauses" do + assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"case\"", fn -> + expand(quote do: (case e do _, _ -> :ok end)) + end + end + + test "fails with invalid arguments" do + assert_raise CompileError, ~r"invalid arguments for \"case\"", fn -> + expand(quote do: (case :foo, :bar)) + end + end + + test "fails for invalid options" do + assert_raise CompileError, ~r"unexpected option :foo in \"case\"", fn -> + expand(quote do: (case e, do: (x -> x), foo: :bar)) + end + end end - test "try: expands rescue" do - assert expand(quote do: (try do x rescue x -> x; Error -> x end; x)) == - quote do: (try do x() rescue unquote(:in)(x, _) -> x; unquote(:in)(_, [:"Elixir.Error"]) -> x() end; x()) + describe "receive" do + test "expands each clause" do + assert expand(quote do: (receive do x -> x; _ -> x end)) == + quote do: (receive do x -> x; _ -> x() end) + end + + test "does not share lexical scope between clauses" do + assert expand(quote do: (receive do 1 -> import List; 2 -> flatten([1, 2, 3]) end)) == + quote do: (receive do 1 -> :"Elixir.List"; 2 -> flatten([1, 2, 3]) end) + end + + test "expands guards" do + assert expand(quote do: (receive do x when x when __ENV__.context -> true end)) == + quote do: (receive do x when x when :guard -> true end) + end + + test "does not leaks clause vars" do + assert expand(quote do: (receive do x -> x; y -> y end; :erlang.+(x, y))) == + quote do: (receive do x -> x; y -> y end; :erlang.+(x(), y())) + end + + test "leaks vars" do + assert expand(quote do: (receive do x -> x = x; y -> y = y end; :erlang.+(x, y))) == + quote do: (receive do x -> x = x; y -> y = y end; :erlang.+(x, y)) + end + + test "leaks vars on after" do + assert expand(quote do: (receive do x -> x = x after y -> y; w = y end; :erlang.+(x, w))) == + quote do: (receive do x -> x = x after y() -> y(); w = y() end; :erlang.+(x, w)) + end + + test "expects exactly one do or after" do + assert_raise CompileError, ~r"missing :do/:after option in \"receive\"", fn -> + expand(quote do: (receive [])) + end + + assert_raise CompileError, ~r"duplicated :do clauses given for \"receive\"", fn -> + expand(quote(do: (receive do: (x -> x), do: (y -> y)))) + end + + assert_raise CompileError, ~r"duplicated :after clauses given for \"receive\"", fn -> + expand(quote(do: (receive do x -> x after y -> y after z -> z end))) + end + end + + test "expects clauses" do + assert_raise CompileError, ~r"expected -> clauses for :do in \"receive\"", fn -> + expand(quote do: (receive do x end)) + end + + assert_raise CompileError, ~r"expected -> clauses for :do in \"receive\"", fn -> + expand(quote do: (receive do [:not, :clauses] end)) + end + end + + test "expects on argument for do/after clauses" do + assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"receive\"", fn -> + expand(quote do: (receive do _, _ -> :ok end)) + end + + assert_raise CompileError, ~r"expected one arg for :after clauses \(->\) in \"receive\"", fn -> + expand(quote do: (receive do x -> x after _, _ -> :ok end)) + end + end + + test "expects a single clause for \"after\"" do + assert_raise CompileError, ~r"expected a single -> clause for :after in \"receive\"", fn -> + expand(quote do: (receive do x -> x after 1 -> y; 2 -> z end)) + end + end + + test "raises for invalid arguments" do + assert_raise CompileError, ~r"invalid arguments for \"receive\"", fn -> + expand(quote do: (receive :foo)) + end + end + + test "raises with invalid options" do + assert_raise CompileError, ~r"unexpected option :foo in \"receive\"", fn -> + expand(quote do: (receive do: (x -> x), foo: :bar)) + end + end end - ## Binaries + describe "try" do + test "expands catch" do + assert expand(quote do: (try do x catch x, y -> z = :erlang.+(x, y) end; z)) == + quote do: (try do x() catch x, y -> z = :erlang.+(x, y) end; z()) + end + + test "expands after" do + assert expand(quote do: (try do x after z = y end; z)) == + quote do: (try do x() after z = y() end; z()) + end + + test "expands else" do + assert expand(quote do: (try do x else z -> z end; z)) == + quote do: (try do x() else z -> z end; z()) + end + + test "expands rescue" do + assert expand(quote do: (try do x rescue x -> x; Error -> x end; x)) == + quote do: (try do x() rescue x -> x; unquote(:in)(_, [:"Elixir.Error"]) -> x() end; x()) + end + + test "expects more than do" do + assert_raise CompileError, ~r"missing :catch/:rescue/:after/:else option in \"try\"", fn -> + expand(quote do: (try do x = y end; x)) + end + end + + test "raises if do is missing" do + assert_raise CompileError, ~r"missing :do option in \"try\"", fn -> + expand(quote do: (try [])) + end + end + + test "expects at most one clause" do + assert_raise CompileError, ~r"duplicated :do clauses given for \"try\"", fn -> + expand(quote(do: (try do: e, do: f))) + end + + assert_raise CompileError, ~r"duplicated :rescue clauses given for \"try\"", fn -> + expand(quote(do: (try do e rescue x -> x rescue y -> y end))) + end + + assert_raise CompileError, ~r"duplicated :after clauses given for \"try\"", fn -> + expand(quote(do: (try do e after x = y after x = y end))) + end + + assert_raise CompileError, ~r"duplicated :else clauses given for \"try\"", fn -> + expand(quote(do: (try do e else x -> x else y -> y end))) + end + + assert_raise CompileError, ~r"duplicated :catch clauses given for \"try\"", fn -> + expand(quote(do: (try do e catch x -> x catch y -> y end))) + end + end + + test "raises with invalid arguments" do + assert_raise CompileError, ~r"invalid arguments for \"try\"", fn -> + expand(quote do: (try :foo)) + end + end + + test "raises with invalid options" do + assert_raise CompileError, ~r"unexpected option :foo in \"try\"", fn -> + expand(quote do: (try do: x, foo: :bar)) + end + end + + test "expects exactly one argument in rescue clauses" do + assert_raise CompileError, ~r"expected one arg for :rescue clauses \(->\) in \"try\"", fn -> + expand(quote do: (try do x rescue _, _ -> :ok end)) + end + end + + test "expects an alias, a variable, or \"var in [alias]\" as the argument of rescue clauses" do + assert_raise CompileError, ~r"invalid \"rescue\" clause\. The clause should match", fn -> + expand(quote do: (try do x rescue function(:call) -> :ok end)) + end + end + + test "expects one or two args for catch clauses" do + assert_raise CompileError, ~r"expected one or two args for :catch clauses \(->\) in \"try\"", fn -> + expand(quote do: (try do x catch _, _, _ -> :ok end)) + end + end + + test "expects clauses for rescue, else, catch" do + assert_raise CompileError, ~r"expected -> clauses for :rescue in \"try\"", fn -> + expand(quote do: (try do e rescue x end)) + end + + assert_raise CompileError, ~r"expected -> clauses for :rescue in \"try\"", fn -> + expand(quote do: (try do e rescue [:not, :clauses] end)) + end + + assert_raise CompileError, ~r"expected -> clauses for :catch in \"try\"", fn -> + expand(quote do: (try do e catch x end)) + end + + assert_raise CompileError, ~r"expected -> clauses for :catch in \"try\"", fn -> + expand(quote do: (try do e catch [:not, :clauses] end)) + end - test "bitstrings: expands modifiers" do - assert expand(quote do: (import Kernel.ExpansionTarget; << x :: seventeen >>)) == - quote do: (import :"Elixir.Kernel.ExpansionTarget", []; << x() :: [unquote(:size)(17)] >>) + assert_raise CompileError, ~r"expected -> clauses for :else in \"try\"", fn -> + expand(quote do: (try do e else x end)) + end - assert expand(quote do: (import Kernel.ExpansionTarget; << seventeen :: seventeen, x :: size(seventeen) >> = 1)) == - quote do: (import :"Elixir.Kernel.ExpansionTarget", []; - << seventeen :: [unquote(:size)(17)], x :: [unquote(:size)(seventeen)] >> = 1) + assert_raise CompileError, ~r"expected -> clauses for :else in \"try\"", fn -> + expand(quote do: (try do e else [:not, :clauses] end)) + end + end end - test "bitstrings: expands modifiers args" do - assert expand(quote do: (require Kernel.ExpansionTarget; << x :: size(Kernel.ExpansionTarget.seventeen) >>)) == - quote do: (require :"Elixir.Kernel.ExpansionTarget", []; << x() :: [unquote(:size)(17)] >>) + describe "bitstrings" do + test "inlines binaries inside interpolation" do + import Kernel.ExpansionTarget + assert expand(quote do: "foo#{bar()}" = "foobar") == + quote do: (<<"foo"::binary(), "bar"::binary()>> = "foobar") + end + + test "expands size * unit" do + import Kernel, except: [-: 2] + + assert expand(quote do: <>) == + quote do: <> + + assert expand(quote do: <>) == + quote do: <> + + assert expand(quote do: <>) == + quote do: <> + + assert expand(quote do: <>) == + quote do: <> + + assert expand(quote do: <>) == + quote do: <> + end + + test "expands binary/bitstring specifiers" do + import Kernel, except: [-: 2] + + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + + assert_raise CompileError, ~r"signed and unsigned specifiers are supported only on integer and float type", fn -> + expand(quote do: <>) + end + end + + test "expands utf* specifiers" do + import Kernel, except: [-: 2] + + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + + assert_raise CompileError, ~r"signed and unsigned specifiers are supported only on integer and float type", fn -> + expand(quote do: <>) + end + + assert_raise CompileError, ~r"size and unit are not supported on utf types", fn -> + expand(quote do: <>) + end + end + + test "expands numbers specifiers" do + import Kernel, except: [-: 2] + + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + assert expand(quote do: <>) == quote do: <> + + assert_raise CompileError, ~r"integer and float types require a size specifier if the unit specifier is given", fn -> + expand(quote do: <>) + end + end + + test "expands macro specifiers" do + import Kernel, except: [-: 2] + import Kernel.ExpansionTarget + + assert expand(quote do: <>) == + quote do: (<>) + + assert expand(quote do: (<> = 1)) == + quote do: (<> = 1) + end + + test "expands macro in args" do + import Kernel, except: [-: 2] + + assert expand(quote do: (require Kernel.ExpansionTarget; <>)) == + quote do: (:"Elixir.Kernel.ExpansionTarget"; <>) + end + + test "supports dynamic size" do + import Kernel, except: [-: 2] + assert expand(quote do: (var = 1; <>)) == + quote do: (var = 1; <>) + end + + test "raises on size or unit for literal bitstrings" do + assert_raise CompileError, ~r"literal <<>> in bitstring supports only type specifiers", fn -> + expand(quote do: << <<"foo">>::32 >>) + end + end + + test "raises on size or unit for literal strings" do + assert_raise CompileError, ~r"literal string in bitstring supports only endianess and type specifiers", fn -> + expand(quote do: <<"foo"::32>>) + end + end + + test "raises for invalid size * unit for floats" do + assert_raise CompileError, ~r"float requires size\*unit to be 32 or 64 \(default\), got: 128", fn -> + expand(quote do: <<12.3::32*4>>) + end + + assert_raise CompileError, ~r"float requires size\*unit to be 32 or 64 \(default\), got: 256", fn -> + expand(quote do: <<12.3::256>>) + end + end + + test "raises for invalid size" do + assert_raise CompileError, ~r"size in bitstring expects an integer or a variable as argument, got: :oops", fn -> + expand(quote do: <<"foo"::size(:oops)>>) + end + end + + test "raises for invalid unit" do + assert_raise CompileError, ~r"unit in bitstring expects an integer as argument, got: :oops", fn -> + expand(quote do: <<"foo"::size(8)-unit(:oops)>>) + end + end + + test "raises for unknown specifier" do + assert_raise CompileError, ~r"unknown bitstring specifier: unknown()", fn -> + expand(quote do: <<1::unknown>>) + end + end + + test "raises for conflicting specifiers" do + assert_raise CompileError, ~r"conflicting endianess specification for bit field", fn -> + expand(quote do: <<1::little-big>>) + end + + assert_raise CompileError, ~r"conflicting unit specification for bit field", fn -> + expand(quote do: <>) + end + end + + test "raises for invalid literals" do + assert_raise CompileError, ~r"invalid literal :foo in <<>>", fn -> + expand(quote do: <<:foo>>) + end + + assert_raise CompileError, ~r"invalid literal \[\] in <<>>", fn -> + expand(quote do: <<[]::size(8)>>) + end + end + + test "raises on binary fields with size in matches" do + assert expand(quote do: (<> = "foobar")) + + assert_raise CompileError, + ~r"a binary field without size is only allowed at the end of a binary pattern", + fn -> expand(quote do: (<> = "foobar")) end + end end - ## Invalid + describe "op ambiguity" do + test "raises when a call is ambiguous" do + message = ~r["a -1" looks like a function call but there is a variable named "a"] + assert_raise CompileError, message, fn -> + expand(quote do: (a = 1; a -1)) + end + end + end test "handles invalid expressions" do assert_raise CompileError, ~r"invalid quoted expression: {1, 2, 3}", fn -> @@ -446,7 +1176,27 @@ defmodule Kernel.ExpansionTest do end assert_raise CompileError, ~r"invalid quoted expression: #Function<", fn -> - expand(quote do: unquote({:sample, fn -> end})) + expand(quote do: unquote({:sample, fn -> nil end})) + end + + assert_raise CompileError, ~r"invalid pattern in match", fn -> + expand(quote do + case true do + true && true -> true + end + end) + end + + assert_raise CompileError, ~r"invalid call foo\(1\)\(2\)", fn -> + expand(quote do: foo(1)(2)) + end + + assert_raise CompileError, ~r"invalid call 1\.foo\(\)", fn -> + expand(quote do: 1.foo) + end + + assert_raise CompileError, ~r"unhandled operator ->", fn -> + expand(quote do: (foo -> bar)) end end @@ -456,12 +1206,9 @@ defmodule Kernel.ExpansionTest do 13 end - defp expand_and_clean(expr) do - cleaner = &Keyword.drop(&1, [:export]) - expr - |> expand_env(__ENV__) - |> elem(0) - |> Macro.prewalk(&Macro.update_meta(&1, cleaner)) + defp clean_meta(expr, vars) do + cleaner = &Keyword.drop(&1, vars) + Macro.prewalk(expr, &Macro.update_meta(&1, cleaner)) end defp expand(expr) do @@ -469,6 +1216,11 @@ defmodule Kernel.ExpansionTest do end defp expand_env(expr, env) do - :elixir_exp.expand(expr, env) + ExUnit.CaptureIO.capture_io(:stderr, fn -> + send self(), {:expand_env, :elixir_expand.expand(expr, env)} + end) + receive do + {:expand_env, result} -> result + end end end diff --git a/lib/elixir/test/elixir/kernel/fn_test.exs b/lib/elixir/test/elixir/kernel/fn_test.exs index 5e934d4ca9d..69defde212b 100644 --- a/lib/elixir/test/elixir/kernel/fn_test.exs +++ b/lib/elixir/test/elixir/kernel/fn_test.exs @@ -2,13 +2,36 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.FnTest do use ExUnit.Case, async: true - import CompileAssertion test "arithmetic constants on match" do - assert (fn 1 + 2 -> :ok end).(3) == :ok - assert (fn 1 - 2 -> :ok end).(-1) == :ok - assert (fn -1 -> :ok end).(-1) == :ok - assert (fn +1 -> :ok end).(1) == :ok + assert (fn -1 -> true end).(-1) + assert (fn +1 -> true end).(1) + end + + test "pin operator on match" do + x = 1 + refute (fn ^x -> true; _ -> false end).(0) + assert (fn ^x -> true; _ -> false end).(1) + refute (fn ^x -> true; _ -> false end).(1.0) + end + + test "guards with no args" do + fun = fn() when node() == :nonode@nohost -> true end + assert is_function(fun, 0) + end + + test "case function hoisting does not affect anonymous fns" do + result = + if atom?(0) do + user = :defined + user + else + (fn() -> + user = :undefined + user + end).() + end + assert result == :undefined end test "capture with access" do @@ -17,7 +40,7 @@ defmodule Kernel.FnTest do test "capture remote" do assert (&:erlang.atom_to_list/1).(:a) == 'a' - assert (&Atom.to_char_list/1).(:a) == 'a' + assert (&Atom.to_charlist/1).(:a) == 'a' assert (&List.flatten/1).([[0]]) == [0] assert (&(List.flatten/1)).([[0]]) == [0] @@ -32,9 +55,9 @@ defmodule Kernel.FnTest do end test "capture local with question mark" do - assert (&is_a?/2).(:atom, :a) - assert (&(is_a?/2)).(:atom, :a) - assert (&is_a?(&1, &2)).(:atom, :a) + assert (&atom?/1).(:a) + assert (&(atom?/1)).(:a) + assert (&atom?(&1)).(:a) end test "capture imported" do @@ -55,6 +78,7 @@ defmodule Kernel.FnTest do assert is_function &+/2 assert is_function &(&&/2) assert is_function & &1 + &2, 2 + assert is_function &and/2 end test "capture with variable module" do @@ -71,7 +95,7 @@ defmodule Kernel.FnTest do test "imported partial application" do import Record - assert (&record?(&1, :sample)).({:sample, 1}) + assert (&is_record(&1, :sample)).({:sample, 1}) end test "remote partial application" do @@ -94,7 +118,7 @@ defmodule Kernel.FnTest do assert (&[ 1, &1 ]).(2) == [ 1, 2 ] assert (&[ 1, &1, &2 ]).(2, 3) == [ 1, 2, 3 ] - assert (&[&1|&2]).(1, 2) == [1|2] + assert (&[&1 | &2]).(1, 2) == [1 | 2] end test "capture and partially apply on call" do @@ -112,38 +136,8 @@ defmodule Kernel.FnTest do assert (&fun.(&1, 2)).(1) == 3 end - test "failure on non-continuous" do - assert_compile_fail CompileError, "nofile:1: capture &2 cannot be defined without &1", "&(&2)" - end - - test "failure on integers" do - assert_compile_fail CompileError, "nofile:1: unhandled &1 outside of a capture", "&1" - assert_compile_fail CompileError, "nofile:1: capture &0 is not allowed", "&foo(&0)" - end - - test "failure on block" do - assert_compile_fail CompileError, - "nofile:1: invalid args for &, block expressions " <> - "are not allowed, got: (\n 1\n 2\n)", - "&(1;2)" - end - - test "failure on other types" do - assert_compile_fail CompileError, - "nofile:1: invalid args for &, expected an expression in the format of &Mod.fun/arity, " <> - "&local/arity or a capture containing at least one argument as &1, got: :foo", - "&:foo" - end - - test "failure when no captures" do - assert_compile_fail CompileError, - "nofile:1: invalid args for &, expected an expression in the format of &Mod.fun/arity, " <> - "&local/arity or a capture containing at least one argument as &1, got: foo()", - "&foo()" - end - - defp is_a?(:atom, atom) when is_atom(atom), do: true - defp is_a?(_, _), do: false + defp atom?(atom) when is_atom(atom), do: true + defp atom?(_), do: false defp atl(arg) do :erlang.atom_to_list(arg) diff --git a/lib/elixir/test/elixir/kernel/impl_test.exs b/lib/elixir/test/elixir/kernel/impl_test.exs new file mode 100644 index 00000000000..b4f3b508f2d --- /dev/null +++ b/lib/elixir/test/elixir/kernel/impl_test.exs @@ -0,0 +1,410 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule Kernel.ImplTest do + use ExUnit.Case + + defp capture_err(fun) do + ExUnit.CaptureIO.capture_io(:stderr, fun) + end + + defp purge(module) do + :code.purge(module) + :code.delete(module) + end + + setup do + on_exit fn -> purge(Kernel.ImplTest.ImplAttributes) end + end + + defmodule Behaviour do + @callback foo :: any + end + + defmodule MacroBehaviour do + @macrocallback bar :: any + end + + test "sets impl to boolean" do + defmodule ImplAttributes do + @behaviour Behaviour + + @impl true + def foo(), do: :ok + + @impl false + def foo(term) do + term + end + end + end + + test "sets impl to nil" do + assert_raise ArgumentError, ~r/expected impl attribute to contain a module or a boolean/, fn -> + defmodule ImplAttributes do + @behaviour Behaviour + @impl nil + def foo(), do: :ok + end + end + end + + test "sets impl to behaviour" do + defmodule ImplAttributes do + @behaviour Behaviour + @impl Behaviour + def foo(), do: :ok + end + end + + test "does not set impl" do + defmodule ImplAttributes do + @behaviour Behaviour + def foo(), do: :ok + end + end + + test "warns for undefined value" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour :abc + + @impl :abc + def foo(), do: :ok + end + """ + end) =~ "got @impl :abc for def foo/0 but the behaviour does not specify this callback. There are no known callbacks" + end + + test "warns for callbacks without impl and @impl has been set before" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @behaviour Kernel.ImplTest.MacroBehaviour + + @impl true + def foo(), do: :ok + + defmacro bar(), do: :ok + end + """ + end) =~ "module attribute @impl was not set for callback defmacro bar/0 (callback specified in Kernel.ImplTest.MacroBehaviour)" + end + + test "warns for callbacks without impl and @impl has been set after" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @behaviour Kernel.ImplTest.MacroBehaviour + + defmacro bar(), do: :ok + + @impl true + def foo(), do: :ok + end + """ + end) =~ "module attribute @impl was not set for callback defmacro bar/0 (callback specified in Kernel.ImplTest.MacroBehaviour)" + end + + test "warns when impl is set on private function" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @impl true + defp foo(), do: :ok + end + """ + end) =~ "defp foo/0 is private, @impl is always discarded for private functions/macros" + end + + test "warns when @impl is set and no function" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @impl true + end + """ + end) =~ "module attribute @impl was set but no definition follows it" + end + + test "warns for @impl true and no behaviour" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @impl true + def foo(), do: :ok + end + """ + end) =~ "got @impl true for def foo/0 but no behaviour was declared" + end + + test "warns for @impl true with callback name not in behaviour" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @impl true + def bar(), do: :ok + end + """ + end) =~ "got @impl true for def bar/0 but no behaviour specifies this callback" + end + + test "warns for @impl true with macro callback name not in behaviour" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.MacroBehaviour + @impl true + defmacro foo(), do: :ok + end + """ + end) =~ "got @impl true for defmacro foo/0 but no behaviour specifies this callback" + end + + test "warns for @impl true with callback kind not in behaviour" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.MacroBehaviour + @impl true + def foo(), do: :ok + end + """ + end) =~ "got @impl true for def foo/0 but no behaviour specifies this callback" + end + + test "warns for @impl true with wrong arity" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @impl true + def foo(arg), do: arg + end + """ + end) =~ "got @impl true for def foo/1 but no behaviour specifies this callback" + end + + test "warns for @impl false and there are no callbacks" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @impl false + def baz(term), do: term + end + """ + end) =~ "got @impl false for def baz/1 but no behaviour was declared" + end + + test "warns for @impl false and it is a callback" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @impl false + def foo(), do: :ok + end + """ + end) =~ "got @impl false for def foo/0 but it is a callback specified in Kernel.ImplTest.Behaviour" + end + + test "warns for @impl module and no behaviour" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @impl Kernel.ImplTest.Behaviour + def foo(), do: :ok + end + """ + end) =~ "got @impl Kernel.ImplTest.Behaviour for def foo/0 but no behaviour was declared" + end + + test "warns for @impl module with callback name not in behaviour" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @impl Kernel.ImplTest.Behaviour + def bar(), do: :ok + end + """ + end) =~ "got @impl Kernel.ImplTest.Behaviour for def bar/0 but the behaviour does not specify this callback" + end + + test "warns for @impl module with macro callback name not in behaviour" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.MacroBehaviour + @impl Kernel.ImplTest.MacroBehaviour + defmacro foo(), do: :ok + end + """ + end) =~ "got @impl Kernel.ImplTest.MacroBehaviour for defmacro foo/0 but the behaviour does not specify this callback" + end + + test "warns for @impl module with macro callback kind not in behaviour" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.MacroBehaviour + @impl Kernel.ImplTest.MacroBehaviour + def foo(), do: :ok + end + """ + end) =~ "got @impl Kernel.ImplTest.MacroBehaviour for def foo/0 but the behaviour does not specify this callback" + end + + test "warns for @impl module and callback belongs to another known module" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @behaviour Kernel.ImplTest.MacroBehaviour + @impl Kernel.ImplTest.Behaviour + def bar(), do: :ok + end + """ + end) =~ "got @impl Kernel.ImplTest.Behaviour for def bar/0 but the behaviour does not specify this callback" + end + + test "warns for @impl module and callback belongs to another unknown module" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @impl Kernel.ImplTest.MacroBehaviour + def bar(), do: :ok + end + """ + end) =~ "got @impl Kernel.ImplTest.MacroBehaviour for def bar/0 but the given behaviour was not declared with @behaviour" + end + + test "does not warn for no @impl when overriding callback" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + + def foo(), do: :overridable + + defoverridable Kernel.ImplTest.Behaviour + + def foo(), do: :overridden + end + """ + end) == "" + end + + test "does not warn for overridable function missing @impl" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + + def foo(), do: :overridable + + defoverridable Kernel.ImplTest.Behaviour + + @impl Kernel.ImplTest.Behaviour + def foo(), do: :overridden + end + """ + end) == "" + end + + test "warns correctly for missing @impl only for end-user implemented function" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @behaviour Kernel.ImplTest.MacroBehaviour + + def foo(), do: :overridable + + defoverridable Kernel.ImplTest.Behaviour + + def foo(), do: :overridden + + @impl true + defmacro bar(), do: :overridden + end + """ + end) =~ "module attribute @impl was not set for callback def foo/0 (callback specified in Kernel.ImplTest.Behaviour)" + end + + test "warns correctly for missing @impl even if it was set in overridable callback" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @behaviour Kernel.ImplTest.MacroBehaviour + + @impl Kernel.ImplTest.Behaviour + def foo(), do: :overridable + + defoverridable Kernel.ImplTest.Behaviour + + def foo(), do: :overridden + + @impl Kernel.ImplTest.MacroBehaviour + defmacro bar(), do: :overridden + end + """ + end) =~ "module attribute @impl was not set for callback def foo/0 (callback specified in Kernel.ImplTest.Behaviour)" + end + + test "warns correctly for incorrect @impl in overridable callback" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Kernel.ImplTest.ImplAttributes do + @behaviour Kernel.ImplTest.Behaviour + @behaviour Kernel.ImplTest.MacroBehaviour + + @impl Kernel.ImplTest.MacroBehaviour + def foo(), do: :overridable + + defoverridable Kernel.ImplTest.Behaviour + + @impl Kernel.ImplTest.Behaviour + def foo(), do: :overridden + end + """ + end) =~ "got @impl Kernel.ImplTest.MacroBehaviour for def foo/0 but the behaviour does not specify this callback" + end + + test "does not warn for overridable callback when using __before_compile__/1 hook" do + assert capture_err(fn -> + Code.eval_string """ + defmodule BeforeCompile do + defmacro __before_compile__(_) do + quote do + @behaviour Kernel.ImplTest.Behaviour + + def foo(), do: :overridable + + defoverridable Kernel.ImplTest.Behaviour + end + end + end + + defmodule Kernel.ImplTest.ImplAttributes do + @before_compile BeforeCompile + @behaviour Kernel.ImplTest.MacroBehaviour + + defmacro bar(), do: :overridable + + defoverridable Kernel.ImplTest.MacroBehaviour + + @impl Kernel.ImplTest.MacroBehaviour + defmacro bar(), do: :overridden + end + """ + end) == "" + end +end diff --git a/lib/elixir/test/elixir/kernel/import_test.exs b/lib/elixir/test/elixir/kernel/import_test.exs index 2b107cdd83f..00a25953b74 100644 --- a/lib/elixir/test/elixir/kernel/import_test.exs +++ b/lib/elixir/test/elixir/kernel/import_test.exs @@ -3,14 +3,35 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.ImportTest do use ExUnit.Case, async: true + # This should not warn due to the empty only + import URI, only: [] + defmodule ImportAvailable do defmacro flatten do [flatten: 1] end end + test "multi-call" do + assert [List, String] = import Elixir.{List, unquote(:String)} + assert keymember?([a: 1], :a, 0) + assert valid?("ø") + end + + test "blank multi-call" do + assert [] = import List.{} + # Buggy local duplicate is untouched + assert duplicate([1], 2) == [1] + end + + test "multi-call with options" do + assert [List] = import Elixir.{List}, only: [] + # Buggy local duplicate is untouched + assert duplicate([1], 2) == [1] + end + test "import all" do - import :lists + assert :lists = import :lists assert flatten([1, [2], 3]) == [1, 2, 3] end @@ -20,8 +41,10 @@ defmodule Kernel.ImportTest do end test "import except one" do - import :lists, except: [each: 2] + import :lists, except: [duplicate: 2] assert flatten([1, [2], 3]) == [1, 2, 3] + # Buggy local duplicate is untouched + assert duplicate([1], 2) == [1] end test "import only via macro" do @@ -35,7 +58,7 @@ defmodule Kernel.ImportTest do end test "import with options via macro" do - import :lists, dynamic_opts + import :lists, dynamic_opts() assert flatten([1, [2], 3]) == [1, 2, 3] end @@ -47,6 +70,22 @@ defmodule Kernel.ImportTest do assert duplicate([1], 2) == [1] end + test "import except none respects previous import with except" do + import :lists, except: [duplicate: 2] + import :lists, except: [] + assert append([1], [2, 3]) == [1, 2, 3] + # Buggy local duplicate is untouched + assert duplicate([1], 2) == [1] + end + + test "import except none respects previous import with only" do + import :lists, only: [append: 2] + import :lists, except: [] + assert append([1], [2, 3]) == [1, 2, 3] + # Buggy local duplicate is untouched + assert duplicate([1], 2) == [1] + end + defmodule Underscored do def hello(x), do: x def __underscore__(x), do: x @@ -61,7 +100,7 @@ defmodule Kernel.ImportTest do assert __underscore__(3) == 3 end - test "import non underscored" do + test "import non-underscored" do import ExplicitUnderscored, only: [__underscore__: 1] import Underscored assert hello(2) == 2 @@ -104,11 +143,18 @@ defmodule Kernel.ImportTest do assert flatten([1, [2], 3]) == [1, 2, 3] end + test "import only removes the non-import part" do + import List + import List, only: :macros + # Buggy local duplicate is used because we asked only for macros + assert duplicate([1], 2) == [1] + end + test "import lexical on if" do if false do - import :lists + import List flatten([1, [2], 3]) - flunk + flunk() else # Buggy local duplicate is untouched assert duplicate([1], 2) == [1] @@ -118,9 +164,9 @@ defmodule Kernel.ImportTest do test "import lexical on case" do case true do false -> - import :lists + import List flatten([1, [2], 3]) - flunk + flunk() true -> # Buggy local duplicate is untouched assert duplicate([1], 2) == [1] @@ -129,9 +175,9 @@ defmodule Kernel.ImportTest do test "import lexical on try" do try do - import :lists + import List flatten([1, [2], 3]) - flunk + flunk() catch _, _ -> # Buggy local duplicate is untouched diff --git a/lib/elixir/test/elixir/kernel/lexical_tracker_test.exs b/lib/elixir/test/elixir/kernel/lexical_tracker_test.exs index da2a9cf4931..b16ae939ff8 100644 --- a/lib/elixir/test/elixir/kernel/lexical_tracker_test.exs +++ b/lib/elixir/test/elixir/kernel/lexical_tracker_test.exs @@ -6,43 +6,109 @@ defmodule Kernel.LexicalTrackerTest do alias Kernel.LexicalTracker, as: D setup do - {:ok, [pid: D.start_link]} + {:ok, pid} = D.start_link("dest") + {:ok, [pid: pid]} end - test "can add remote dispatches", config do - D.remote_dispatch(config[:pid], String) - assert D.remotes(config[:pid]) == [String] + test "gets the destination", config do + assert D.dest(config[:pid]) == "dest" end - test "can add imports", config do - D.add_import(config[:pid], String, 1, true) - assert D.remotes(config[:pid]) == [String] + test "can add remote references", config do + D.remote_reference(config[:pid], String, :runtime) + assert D.remote_references(config[:pid]) == {[], [String]} + + D.remote_reference(config[:pid], String, :compile) + assert D.remote_references(config[:pid]) == {[String], []} + + D.remote_reference(config[:pid], String, :runtime) + assert D.remote_references(config[:pid]) == {[String], []} + end + + test "can add remote dispatches with {function, arity} and line", config do + D.remote_dispatch(config[:pid], String, {:upcase, 1}, 1, :runtime) + assert D.remote_dispatches(config[:pid]) == {%{}, %{String => %{{:upcase, 1} => [1]}}} + assert D.remote_references(config[:pid]) == {[], [String]} + + D.remote_dispatch(config[:pid], String, {:upcase, 1}, 1, :compile) + assert D.remote_dispatches(config[:pid]) == + {%{String => %{{:upcase, 1} => [1]}}, %{String => %{{:upcase, 1} => [1]}}} + assert D.remote_references(config[:pid]) == {[String], []} + + D.remote_dispatch(config[:pid], String, {:upcase, 1}, 1, :runtime) + assert D.remote_dispatches(config[:pid]) == + {%{String => %{{:upcase, 1} => [1]}}, %{String => %{{:upcase, 1} => [1]}}} + assert D.remote_references(config[:pid]) == {[String], []} + + D.remote_dispatch(config[:pid], String, {:upcase, 1}, 2, :runtime) + assert D.remote_dispatches(config[:pid]) == + {%{String => %{{:upcase, 1} => [1]}}, %{String => %{{:upcase, 1} => [2, 1]}}} + assert D.remote_references(config[:pid]) == {[String], []} + end + + test "can add module imports", config do + D.add_import(config[:pid], String, [], 1, true) + D.import_dispatch(config[:pid], String, {:upcase, 1}, 1, :compile) + assert D.remote_references(config[:pid]) == {[String], []} + assert D.remote_dispatches(config[:pid]) == + {%{String => %{{:upcase, 1} => [1]}}, %{}} + + D.import_dispatch(config[:pid], String, {:upcase, 1}, 1, :runtime) + assert D.remote_references(config[:pid]) == {[String], []} + assert D.remote_dispatches(config[:pid]) == + {%{String => %{{:upcase, 1} => [1]}}, %{String => %{{:upcase, 1} => [1]}}} + end + + test "can add module with {function, arity} imports", config do + D.add_import(config[:pid], String, [upcase: 1], 1, true) + D.import_dispatch(config[:pid], String, {:upcase, 1}, 1, :compile) + assert D.remote_references(config[:pid]) == {[String], []} end test "can add aliases", config do D.add_alias(config[:pid], String, 1, true) - assert D.remotes(config[:pid]) == [String] + D.alias_dispatch(config[:pid], String) + assert D.remote_references(config[:pid]) == {[], []} + end + + test "unused module imports", config do + D.add_import(config[:pid], String, [], 1, true) + assert D.collect_unused_imports(config[:pid]) == [{String, 1}] + end + + test "used module imports are not unused", config do + D.add_import(config[:pid], String, [], 1, true) + D.import_dispatch(config[:pid], String, {:upcase, 1}, 1, :compile) + assert D.collect_unused_imports(config[:pid]) == [] end - test "unused imports", config do - D.add_import(config[:pid], String, 1, true) - assert D.collect_unused_imports(config[:pid]) == [{String,1}] + test "unused {module, function, arity} imports", config do + D.add_import(config[:pid], String, [upcase: 1], 1, true) + assert D.collect_unused_imports(config[:pid]) == [{String, 1}, {{String, :upcase, 1}, 1}] end - test "used imports are not unused", config do - D.add_import(config[:pid], String, 1, true) - D.import_dispatch(config[:pid], String) + test "used {module, function, arity} imports are not unused", config do + D.add_import(config[:pid], String, [upcase: 1], 1, true) + D.add_import(config[:pid], String, [downcase: 1], 1, true) + D.import_dispatch(config[:pid], String, {:upcase, 1}, 1, :compile) + assert D.collect_unused_imports(config[:pid]) == [{{String, :downcase, 1}, 1}] + end + + test "overwriting {module, function, arity} import with module import", config do + D.add_import(config[:pid], String, [upcase: 1], 1, true) + D.add_import(config[:pid], String, [], 1, true) + D.import_dispatch(config[:pid], String, {:downcase, 1}, 1, :compile) assert D.collect_unused_imports(config[:pid]) == [] end test "imports with no warn are not unused", config do - D.add_import(config[:pid], String, 1, false) + D.add_import(config[:pid], String, [], 1, false) assert D.collect_unused_imports(config[:pid]) == [] end test "unused aliases", config do D.add_alias(config[:pid], String, 1, true) - assert D.collect_unused_aliases(config[:pid]) == [{String,1}] + assert D.collect_unused_aliases(config[:pid]) == [{String, 1}] end test "used aliases are not unused", config do @@ -55,4 +121,108 @@ defmodule Kernel.LexicalTrackerTest do D.add_alias(config[:pid], String, 1, false) assert D.collect_unused_aliases(config[:pid]) == [] end + + test "does not tag aliases nor types" do + {{{compile, runtime}, {compile_dispatches, runtime_dispatches}}, _binding} = + Code.eval_string(""" + defmodule Kernel.LexicalTrackerTest.Sample do + alias Foo.Bar, as: Bar, warn: false + @type bar :: Foo.Bar.t + @opaque bar2 :: Foo.Bar.t + @typep bar3 :: Foo.Bar.t + @callback foo :: Foo.Bar.t + @macrocallback foo2(Foo.Bar.t) :: Foo.Bar.t + @spec foo(bar3) :: Foo.Bar.t + def foo(_), do: :bar + refs = Kernel.LexicalTracker.remote_references(__ENV__.module) + dispatches = Kernel.LexicalTracker.remote_dispatches(__ENV__.module) + {refs, dispatches} + end |> elem(3) + """) + + refute Elixir.Bar in runtime + refute Map.has_key?(runtime_dispatches, Elixir.Bar) + refute Elixir.Bar in compile + refute Map.has_key?(compile_dispatches, Elixir.Bar) + + refute Foo.Bar in runtime + refute Map.has_key?(runtime_dispatches, Foo.Bar) + refute Foo.Bar in compile + refute Map.has_key?(compile_dispatches, Foo.Bar) + end + + test "remote dispatches" do + {{compile_remote_calls, runtime_remote_calls}, []} = + Code.eval_string(""" + defmodule RemoteDispatches do + import Record + require Integer + alias Remote, as: R + + def a do + _ = extract(1, 2) + _ = is_record(1) + _ = Integer.is_even(2) + + NotAModule + Remote.func() + R.func() + &extract/2 + &is_record/1 + &R.func/0 + &Remote.func/0 + &Integer.is_even/1 + %Macro.Env{} + end + + &extract/2 + &is_record/1 + &R.func/0 + &Remote.func/0 + &Integer.is_even/1 + + &is_record/1; def b(a), do: is_record(a) + + %Macro.Env{} + + Kernel.LexicalTracker.remote_dispatches(__ENV__.module) + end |> elem(3) + """) + + compile_remote_calls = unroll_dispatches(compile_remote_calls) + assert {6, Kernel, :def, 2} in compile_remote_calls + assert {8, Record, :is_record, 1} in compile_remote_calls + assert {9, Integer, :is_even, 1} in compile_remote_calls + assert {15, Record, :is_record, 1} in compile_remote_calls + assert {18, Integer, :is_even, 1} in compile_remote_calls + assert {19, Macro.Env, :__struct__, 1} in compile_remote_calls + assert {22, Record, :extract, 2} in compile_remote_calls + assert {23, Record, :is_record, 1} in compile_remote_calls + assert {24, Remote, :func, 0} in compile_remote_calls + assert {25, Remote, :func, 0} in compile_remote_calls + assert {26, Integer, :is_even, 1} in compile_remote_calls + assert {28, Kernel, :def, 2} in compile_remote_calls + assert {28, Record, :is_record, 1} in compile_remote_calls + assert {30, Macro.Env, :__struct__, 1} in compile_remote_calls + assert {32, Kernel.LexicalTracker, :remote_dispatches, 1} in compile_remote_calls + + runtime_remote_calls = unroll_dispatches(runtime_remote_calls) + assert {7, Record, :extract, 2} in runtime_remote_calls + assert {8, :erlang, :is_tuple, 1} in runtime_remote_calls + assert {12, Remote, :func, 0} in runtime_remote_calls + assert {13, Remote, :func, 0} in runtime_remote_calls + assert {14, Record, :extract, 2} in runtime_remote_calls + assert {15, :erlang, :is_tuple, 1} in runtime_remote_calls + assert {16, Remote, :func, 0} in runtime_remote_calls + assert {17, Remote, :func, 0} in runtime_remote_calls + assert {18, :erlang, :==, 2} in runtime_remote_calls + assert {28, :erlang, :is_tuple, 1} in runtime_remote_calls + end + + defp unroll_dispatches(dispatches) do + for {module, fals} <- dispatches, + {{func, arity}, lines} <- fals, + line <- lines, + do: {line, module, func, arity} + end end diff --git a/lib/elixir/test/elixir/kernel/macros_test.exs b/lib/elixir/test/elixir/kernel/macros_test.exs index fd2f7511e7c..c10771cae62 100644 --- a/lib/elixir/test/elixir/kernel/macros_test.exs +++ b/lib/elixir/test/elixir/kernel/macros_test.exs @@ -9,10 +9,12 @@ defmodule Kernel.MacrosTest.Nested do end defmodule Kernel.MacrosTest do - require Kernel.MacrosTest.Nested, as: Nested - use ExUnit.Case, async: true + doctest Macro + + Kernel.MacrosTest.Nested = require Kernel.MacrosTest.Nested, as: Nested + defmacro my_macro do quote do: 1 + 1 end @@ -25,30 +27,41 @@ defmodule Kernel.MacrosTest do quote do: 1 + unquote(value) end - test :require do + defp by_two(x), do: x * 2 + + defmacro my_macro_with_local(value) do + value = by_two(by_two(value)) + quote do: 1 + unquote(value) + end + + test "require" do assert Kernel.MacrosTest.Nested.value == 1 end - test :require_with_alias do + test "require with alias" do assert Nested.value == 1 end - test :local_but_private_macro do - assert my_private_macro == 4 + test "local but private macro" do + assert my_private_macro() == 4 end - test :local_with_defaults_macro do - assert my_macro_with_default == 6 + test "local with defaults macro" do + assert my_macro_with_default() == 6 end - test :macros_cannot_be_called_dynamically do + test "local with local call" do + assert my_macro_with_local(4) == 17 + end + + test "macros cannot be called dynamically" do x = Nested assert_raise UndefinedFunctionError, fn -> x.value end end - test :bang_do_block do + test "macros with bang and do block have proper precedence" do import Kernel.MacrosTest.Nested assert (do_identity! do 1 end) == 1 assert (Kernel.MacrosTest.Nested.do_identity! do 1 end) == 1 end -end \ No newline at end of file +end diff --git a/lib/elixir/test/elixir/kernel/overridable_test.exs b/lib/elixir/test/elixir/kernel/overridable_test.exs index 1397e53d793..4e056cdc18f 100644 --- a/lib/elixir/test/elixir/kernel/overridable_test.exs +++ b/lib/elixir/test/elixir/kernel/overridable_test.exs @@ -1,10 +1,6 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.Overridable do - defmacrop super? do - Module.overridable?(__CALLER__.module, __CALLER__.function) - end - def sample do 1 end @@ -17,37 +13,7 @@ defmodule Kernel.Overridable do 1 end - def explicit_nested_super do - {super?, 2} - end - - false = Module.overridable? __MODULE__, {:explicit_nested_super, 0} - - defoverridable [sample: 0, with_super: 0, without_super: 0, explicit_nested_super: 0] - - true = Module.overridable? __MODULE__, {:explicit_nested_super, 0} - - def explicit_nested_super do - {super, super?, 1} - end - - true = Module.overridable? __MODULE__, {:explicit_nested_super, 0} - - defoverridable [explicit_nested_super: 0] - - true = Module.overridable? __MODULE__, {:explicit_nested_super, 0} - - def implicit_nested_super do - {super?, 1} - end - - defoverridable [implicit_nested_super: 0] - - def implicit_nested_super do - {super, super?, 0} - end - - def super_with_explicit_args(x, y) do + def super_with_multiple_args(x, y) do x + y end @@ -59,8 +25,11 @@ defmodule Kernel.Overridable do 13 end - defoverridable [implicit_nested_super: 0, - super_with_explicit_args: 2, many_clauses: 1] + defoverridable [sample: 0, with_super: 0, without_super: 0, + super_with_multiple_args: 2, many_clauses: 1] + + true = Module.overridable? __MODULE__, {:without_super, 0} + true = Module.overridable? __MODULE__, {:with_super, 0} def without_super do :without_super @@ -70,15 +39,7 @@ defmodule Kernel.Overridable do super() + 2 end - def no_overridable do - {:no_overridable, super?} - end - - def explicit_nested_super do - {super, super?, 0} - end - - def super_with_explicit_args(x, y) do + def super_with_multiple_args(x, y) do super x, y * 2 end @@ -93,11 +54,81 @@ defmodule Kernel.Overridable do def many_clauses(x) do super(x) end + + ## Macros + + defmacro overridable_macro(x) do + quote do + unquote(x) + 100 + end + end + + defoverridable overridable_macro: 1 + + defmacro overridable_macro(x) do + quote do + unquote(super(x)) + 1_000 + end + end + + defmacrop private_macro(x \\ raise "never called") + + defmacrop private_macro(x) do + quote do + unquote(x) + 100 + end + end + + defoverridable private_macro: 1 + + defmacrop private_macro(x) do + quote do + unquote(super(x)) + 1_000 + end + end + + def private_macro_call(val \\ 11) do + private_macro(val) + end +end + +defmodule Kernel.OverridableExampleBehaviour do + @callback required_callback :: any + @callback optional_callback :: any + @macrocallback required_macro_callback(arg :: any) :: Macro.t + @macrocallback optional_macro_callback(arg :: any, arg2 :: any) :: Macro.t + @optional_callbacks optional_callback: 0, optional_macro_callback: 1 end defmodule Kernel.OverridableTest do + defmodule OverridableOrder do + def not_private(str) do + process_url(https://github.com/str) + end + + def process_url(https://github.com/_str) do + :first + end + + # There was a bug where the order in which we removed + # overridable expressions lead to errors. This module + # aims to guarantee removing process_url/1 before we + # remove the function that depends on it does not cause + # errors. If it compiles, it works! + defoverridable [process_url: 1, not_private: 1] + + def process_url(https://github.com/_str) do + :second + end + end + require Kernel.Overridable, as: Overridable - use ExUnit.Case, async: true + use ExUnit.Case + + defp purge(module) do + :code.purge(module) + :code.delete(module) + end test "overridable is made concrete if no other is defined" do assert Overridable.sample == 1 @@ -111,20 +142,8 @@ defmodule Kernel.OverridableTest do assert Overridable.without_super == :without_super end - test "overridable overridden with nested super" do - assert Overridable.explicit_nested_super == {{{false, 2}, true, 1}, true, 0} - end - - test "overridable node overridden with nested super" do - assert Overridable.implicit_nested_super == {{false, 1}, true, 0} - end - - test "calling super with explicit args" do - assert Overridable.super_with_explicit_args(1, 2) == 5 - end - - test "function without overridable returns false for super?" do - assert Overridable.no_overridable == {:no_overridable, false} + test "calling super with multiple args" do + assert Overridable.super_with_multiple_args(1, 2) == 5 end test "overridable with many clauses" do @@ -135,18 +154,127 @@ defmodule Kernel.OverridableTest do end test "overridable definitions are private" do - refute {:"with_super (overridable 0)", 0} in Overridable.__info__(:exports) + refute {:"with_super (overridable 0)", 0} in Overridable.module_info(:exports) + refute {:"with_super (overridable 1)", 0} in Overridable.module_info(:exports) + end + + test "overridable macros" do + a = 11 + assert Overridable.overridable_macro(a) == 1111 + assert Overridable.private_macro_call() == 1111 end test "invalid super call" do - try do - :elixir.eval 'defmodule Foo.Forwarding do\ndef bar, do: 1\ndefoverridable [bar: 0]\ndef foo, do: super\nend', [] - flunk "expected eval to fail" - rescue - error -> - assert Exception.message(error) == - "nofile:4: no super defined for foo/0 in module Foo.Forwarding. " <> - "Overridable functions available are: bar/0" + message = + "nofile:4: no super defined for foo/0 in module Kernel.OverridableOrder.Forwarding. " <> + "Overridable functions available are: bar/0" + assert_raise CompileError, message, fn -> + Code.eval_string """ + defmodule Kernel.OverridableOrder.Forwarding do + def bar(), do: 1 + defoverridable bar: 0 + def foo(), do: super() + end + """ + end + + purge Kernel.OverridableOrder.Forwarding + end + + test "undefined functions can't be marked as overridable" do + message = "cannot make function foo/2 overridable because it was not defined" + assert_raise ArgumentError, message, fn -> + Code.eval_string """ + defmodule Kernel.OverridableOrder.Foo do + defoverridable foo: 2 + end + """ + end + + purge Kernel.OverridableOrder.Foo + end + + test "overrides with behaviour" do + defmodule OverridableWithBehaviour do + @behaviour Elixir.Kernel.OverridableExampleBehaviour + + def required_callback(), do: "original" + + def optional_callback(), do: "original" + + def not_a_behaviour_callback(), do: "original" + + defmacro required_macro_callback(boolean) do + quote do + if unquote(boolean) do + "original" + end + end + end + + defoverridable Elixir.Kernel.OverridableExampleBehaviour + + defmacro optional_macro_callback(arg1, arg2), do: {arg1, arg2} + + assert Module.overridable? __MODULE__, {:required_callback, 0} + assert Module.overridable? __MODULE__, {:optional_callback, 0} + assert Module.overridable? __MODULE__, {:required_macro_callback, 1} + refute Module.overridable? __MODULE__, {:optional_macro_callback, 1} + refute Module.overridable? __MODULE__, {:not_a_behaviour_callback, 1} + end + end + + test "undefined module can't be passed as argument to defoverridable" do + message = "cannot pass module Kernel.OverridableTest.Bar as argument to defoverridable/1 because it was not defined" + assert_raise ArgumentError, message, fn -> + Code.eval_string """ + defmodule Kernel.OverridableTest.Foo do + defoverridable Kernel.OverridableTest.Bar + end + """ + end + purge Kernel.OverridableTest.Foo + end + + test "module without @behaviour can't be passed as argument to defoverridable" do + message = "cannot pass module Kernel.OverridableExampleBehaviour as argument to defoverridable/1" <> + " because its corresponding behaviour is missing. Did you forget to add " <> + "@behaviour Kernel.OverridableExampleBehaviour?" + assert_raise ArgumentError, message, fn -> + Code.eval_string """ + defmodule Kernel.OverridableTest.Foo do + defoverridable Kernel.OverridableExampleBehaviour + end + """ + end + purge Kernel.OverridableTest.Foo + end + + test "module with no callbacks can't be passed as argument to defoverridable" do + message = "cannot pass module Kernel.OverridableTest.Bar as argument to defoverridable/1 because it does not define any callbacks" + assert_raise ArgumentError, message, fn -> + Code.eval_string """ + defmodule Kernel.OverridableTest.Bar do + end + defmodule Kernel.OverridableTest.Foo do + @behaviour Kernel.OverridableTest.Bar + defoverridable Kernel.OverridableTest.Bar + end + """ + end + purge Kernel.OverridableTest.Bar + purge Kernel.OverridableTest.Foo + end + + test "atom which is not a module can't be passed as argument to defoverridable" do + message = "cannot pass module :abc as argument to defoverridable/1 because it was not defined" + assert_raise ArgumentError, message, fn -> + Code.eval_string """ + defmodule Kernel.OverridableTest.Foo do + defoverridable :abc + end + """ end + purge Kernel.OverridableTest.Foo end end diff --git a/lib/elixir/test/elixir/kernel/parallel_compiler_test.exs b/lib/elixir/test/elixir/kernel/parallel_compiler_test.exs new file mode 100644 index 00000000000..e4f07309c8b --- /dev/null +++ b/lib/elixir/test/elixir/kernel/parallel_compiler_test.exs @@ -0,0 +1,78 @@ +Code.require_file "../test_helper.exs", __DIR__ + +import PathHelpers + +defmodule Kernel.ParallelCompilerTest do + use ExUnit.Case + import ExUnit.CaptureIO + + test "compiles files solving dependencies" do + fixtures = [fixture_path("parallel_compiler/bar.ex"), fixture_path("parallel_compiler/foo.ex")] + assert capture_io(fn -> + assert [BarParallel, FooParallel] = Kernel.ParallelCompiler.files fixtures + end) =~ "message_from_foo" + after + Enum.map [FooParallel, BarParallel], fn mod -> + :code.purge(mod) + :code.delete(mod) + end + end + + test "compiles files with structs solving dependencies" do + fixtures = [fixture_path("parallel_struct/bar.ex"), fixture_path("parallel_struct/foo.ex")] + assert [BarStruct, FooStruct] = Kernel.ParallelCompiler.files(fixtures) |> Enum.sort + after + Enum.map [FooStruct, BarStruct], fn mod -> + :code.purge(mod) + :code.delete(mod) + end + end + + test "emits struct undefined error when local struct is undefined" do + fixtures = [fixture_path("parallel_struct/undef.ex")] + assert capture_io(fn -> + assert catch_exit(Kernel.ParallelCompiler.files(fixtures)) == {:shutdown, 1} + end) =~ "Undef.__struct__/1 is undefined, cannot expand struct Undef" + end + + test "does not hang on missing dependencies" do + fixtures = [fixture_path("parallel_compiler/bat.ex")] + assert capture_io(fn -> + assert catch_exit(Kernel.ParallelCompiler.files(fixtures)) == {:shutdown, 1} + end) =~ "== Compilation error" + end + + test "handles possible deadlocks" do + fixtures = [fixture_path("parallel_deadlock/foo.ex"), + fixture_path("parallel_deadlock/bar.ex")] + + msg = capture_io(fn -> + assert catch_exit(Kernel.ParallelCompiler.files fixtures) == {:shutdown, 1} + end) + + assert msg =~ "Compilation failed because of a deadlock between files." + assert msg =~ "fixtures/parallel_deadlock/foo.ex => BarDeadlock" + assert msg =~ "fixtures/parallel_deadlock/bar.ex => FooDeadlock" + assert msg =~ ~r"== Compilation error in file .+parallel_deadlock/foo\.ex ==" + assert msg =~ "** (CompileError) deadlocked waiting on module BarDeadlock" + assert msg =~ ~r"== Compilation error in file .+parallel_deadlock/bar\.ex ==" + assert msg =~ "** (CompileError) deadlocked waiting on module FooDeadlock" + end + + test "warnings as errors" do + warnings_as_errors = Code.compiler_options[:warnings_as_errors] + fixtures = [fixture_path("warnings_sample.ex")] + + try do + Code.compiler_options(warnings_as_errors: true) + + msg = capture_io :stderr, fn -> + assert catch_exit(Kernel.ParallelCompiler.files fixtures) == {:shutdown, 1} + end + + assert msg =~ "Compilation failed due to warnings while using the --warnings-as-errors option\n" + after + Code.compiler_options(warnings_as_errors: warnings_as_errors) + end + end +end diff --git a/lib/elixir/test/elixir/kernel/parallel_require_test.exs b/lib/elixir/test/elixir/kernel/parallel_require_test.exs new file mode 100644 index 00000000000..fc5ed945323 --- /dev/null +++ b/lib/elixir/test/elixir/kernel/parallel_require_test.exs @@ -0,0 +1,25 @@ +Code.require_file "../test_helper.exs", __DIR__ + +import PathHelpers + +defmodule Kernel.ParallelRequireTest do + use ExUnit.Case + import ExUnit.CaptureIO + + test "warnings as errors" do + warnings_as_errors = Code.compiler_options[:warnings_as_errors] + fixtures = [fixture_path("warnings_sample.ex")] + + try do + Code.compiler_options(warnings_as_errors: true) + + msg = capture_io :stderr, fn -> + assert catch_exit(Kernel.ParallelRequire.files fixtures) == {:shutdown, 1} + end + + assert msg =~ "Execution failed due to warnings while using the --warnings-as-errors option\n" + after + Code.compiler_options(warnings_as_errors: warnings_as_errors) + end + end +end diff --git a/lib/elixir/test/elixir/kernel/quote_test.exs b/lib/elixir/test/elixir/kernel/quote_test.exs index 2d8de03c0ae..bed4b83a79a 100644 --- a/lib/elixir/test/elixir/kernel/quote_test.exs +++ b/lib/elixir/test/elixir/kernel/quote_test.exs @@ -3,30 +3,35 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.QuoteTest do use ExUnit.Case, async: true - test :list do + test "list" do assert quote(do: [1, 2, 3]) == [1, 2, 3] end - test :tuple do + test "tuple" do assert quote(do: {:a, 1}) == {:a, 1} end - test :keep_line do - ## DO NOT MOVE THIS LINE - assert quote(location: :keep, do: bar(1, 2, 3)) == {:bar, [keep: 16], [1, 2, 3]} + test "keep line" do + # DO NOT MOVE THIS LINE + assert quote(location: :keep, do: bar(1, 2, 3)) == + {:bar, [file: Path.relative_to_cwd(__ENV__.file), keep: 16], [1, 2, 3]} end - test :fixed_line do + test "fixed line" do assert quote(line: 3, do: bar(1, 2, 3)) == {:bar, [line: 3], [1, 2, 3]} end - test :quote_line_var do - ## DO NOT MOVE THIS LINE + test "quote line var" do + # DO NOT MOVE THIS LINE line = __ENV__.line - assert quote(line: line, do: bar(1, 2, 3)) == {:bar, [line: 25], [1, 2, 3]} + assert quote(line: line, do: bar(1, 2, 3)) == {:bar, [line: 26], [1, 2, 3]} end - test :unquote_call do + test "generated" do + assert quote(generated: true, do: bar(1)) == {:bar, [generated: true], [1]} + end + + test "unquote call" do assert quote(do: foo(bar)[unquote(:baz)]) == quote(do: foo(bar)[:baz]) assert quote(do: unquote(:bar)()) == quote(do: bar()) assert quote(do: unquote(:bar)(1) do 2 + 3 end) == quote(do: bar(1) do 2 + 3 end) @@ -34,7 +39,7 @@ defmodule Kernel.QuoteTest do assert quote(do: foo.unquote(:bar)(1)) == quote(do: foo.bar(1)) assert quote(do: foo.unquote(:bar)(1) do 2 + 3 end) == quote(do: foo.bar(1) do 2 + 3 end) assert quote(do: foo.unquote({:bar, [], nil})) == quote(do: foo.bar) - assert quote(do: foo.unquote({:bar, [], [1,2]})) == quote(do: foo.bar(1,2)) + assert quote(do: foo.unquote({:bar, [], [1, 2]})) == quote(do: foo.bar(1, 2)) assert Code.eval_quoted(quote(do: Foo.unquote(Bar))) == {Elixir.Foo.Bar, []} assert Code.eval_quoted(quote(do: Foo.unquote(quote do: Bar))) == {Elixir.Foo.Bar, []} @@ -44,7 +49,7 @@ defmodule Kernel.QuoteTest do end end - test :nested_quote do + test "nested quote" do assert {:quote, _, [[do: {:unquote, _, _}]]} = quote(do: quote(do: unquote(x))) end @@ -58,8 +63,8 @@ defmodule Kernel.QuoteTest do end end - test :nested_quote_in_macro do - assert nested_quote_in_macro == 1 + test "nested quote in macro" do + assert nested_quote_in_macro() == 1 end Enum.each [foo: 1, bar: 2, baz: 3], fn {k, v} -> @@ -68,30 +73,30 @@ defmodule Kernel.QuoteTest do end end - test :dynamic_definition_with_unquote do + test "dynamic definition with unquote" do assert foo(1) == 2 assert bar(2) == 4 assert baz(3) == 6 end - test :splice_on_root do + test "splice on root" do contents = [1, 2, 3] assert quote(do: (unquote_splicing(contents))) == quote do: (1; 2; 3) end - test :splice_with_tail do + test "splice with tail" do contents = [1, 2, 3] - assert quote(do: [unquote_splicing(contents)|[1, 2, 3]]) == + assert quote(do: [unquote_splicing(contents) | [1, 2, 3]]) == [1, 2, 3, 1, 2, 3] - assert quote(do: [unquote_splicing(contents)|val]) == + assert quote(do: [unquote_splicing(contents) | val]) == quote(do: [1, 2, 3 | val]) - assert quote(do: [unquote_splicing(contents)|unquote([4])]) == + assert quote(do: [unquote_splicing(contents) | unquote([4])]) == quote(do: [1, 2, 3, 4]) end - test :splice_on_stab do + test "splice on stab" do {fun, []} = Code.eval_quoted(quote(do: fn(unquote_splicing([1, 2, 3])) -> :ok end), []) assert fun.(1, 2, 3) == :ok @@ -101,9 +106,9 @@ defmodule Kernel.QuoteTest do assert fun.(1, 2, 3) == :ok end - test :splice_on_definition do + test "splice on definition" do defmodule Hello do - def world([unquote_splicing(["foo", "bar"])|rest]) do + def world([unquote_splicing(["foo", "bar"]) | rest]) do rest end end @@ -111,7 +116,7 @@ defmodule Kernel.QuoteTest do assert Hello.world(["foo", "bar", "baz"]) == ["baz"] end - test :splice_on_map do + test "splice on map" do assert %{unquote_splicing([foo: :bar])} == %{foo: :bar} assert %{unquote_splicing([foo: :bar]), baz: :bat} == %{foo: :bar, baz: :bat} assert %{unquote_splicing([foo: :bar]), :baz => :bat} == %{foo: :bar, baz: :bat} @@ -121,33 +126,27 @@ defmodule Kernel.QuoteTest do assert %{map | unquote_splicing([foo: :bar])} == %{foo: :bar} end - test :when do - assert [{:->,_,[[{:when,_,[1,2,3,4]}],5]}] = quote(do: (1, 2, 3 when 4 -> 5)) - assert [{:->,_,[[{:when,_,[1,2,3,4]}],5]}] = quote(do: ((1, 2, 3) when 4 -> 5)) + test "when" do + assert [{:->, _, [[{:when, _, [1, 2, 3, 4]}], 5]}] = quote(do: (1, 2, 3 when 4 -> 5)) + assert [{:->, _, [[{:when, _, [1, 2, 3, 4]}], 5]}] = quote(do: ((1, 2, 3) when 4 -> 5)) - assert [{:->,_,[[{:when,_,[1,2,3,{:when,_,[4,5]}]}],6]}] = + assert [{:->, _, [[{:when, _, [1, 2, 3, {:when, _, [4, 5]}]}], 6]}] = quote(do: ((1, 2, 3) when 4 when 5 -> 6)) end - test :stab do - assert [{:->, _, [[], nil]}] = (quote do -> end) - assert [{:->, _, [[], nil]}] = (quote do: (->)) - - assert [{:->, _, [[1], nil]}] = (quote do 1 -> end) - assert [{:->, _, [[1], nil]}] = (quote do: (1 ->)) - + test "stab" do assert [{:->, _, [[], 1]}] = (quote do -> 1 end) assert [{:->, _, [[], 1]}] = (quote do: (-> 1)) end - test :bind_quoted do + test "bind quoted" do assert quote(bind_quoted: [foo: 1 + 2], do: foo) == {:__block__, [], [ {:=, [], [{:foo, [], Kernel.QuoteTest}, 3]}, {:foo, [], Kernel.QuoteTest} ]} end - test :literals do + test "literals" do assert (quote do: []) == [] assert (quote do: nil) == nil assert (quote do [] end) == [] @@ -158,23 +157,43 @@ defmodule Kernel.QuoteTest do [line: 3] end - test :with_dynamic_opts do - assert quote(dynamic_opts, do: bar(1, 2, 3)) == {:bar, [line: 3], [1, 2, 3]} + test "with dynamic opts" do + assert quote(dynamic_opts(), do: bar(1, 2, 3)) == {:bar, [line: 3], [1, 2, 3]} end - test :unary_with_integer_precedence do - assert quote(do: +1.foo) == quote(do: (+1).foo) + test "unary with integer precedence" do + assert quote(do: +1.foo) == quote(do: +(1.foo)) assert quote(do: @1.foo) == quote(do: (@1).foo) assert quote(do: &1.foo) == quote(do: (&1).foo) end - test :operators_slash_arity do + test "operators slash arity" do assert {:/, _, [{:+, _, _}, 2]} = quote do: +/2 assert {:/, _, [{:&&, _, _}, 3]} = quote do: &&/3 end + + test "pipe precedence" do + assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} = + quote do: (foo |> bar |> baz) + + assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} = + quote do: (foo do end |> bar |> baz) + + assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} = + quote do: (foo |> bar do end |> baz) + + assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} = + quote do: (foo |> bar |> baz do end) + + assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} = + quote do: (foo do end |> bar |> baz do end) + + assert {:|>, _, [{:|>, _, [{:foo, _, _}, {:bar, _, _}]}, {:baz, _, _}]} = + quote do: (foo do end |> bar do end |> baz do end) + end end -## DO NOT MOVE THIS LINE +# DO NOT MOVE THIS LINE defmodule Kernel.QuoteTest.Errors do defmacro defadd do quote location: :keep do @@ -192,26 +211,26 @@ defmodule Kernel.QuoteTest.ErrorsTest do import Kernel.QuoteTest.Errors # Defines the add function - defadd + defadd() - test :inside_function_error do + test "inside function error" do assert_raise ArithmeticError, fn -> add(:a, :b) end mod = Kernel.QuoteTest.ErrorsTest - file = __ENV__.file |> Path.relative_to_cwd |> String.to_char_list - assert [{^mod, :add, 2, [file: ^file, line: 181]}|_] = System.stacktrace + file = __ENV__.file |> Path.relative_to_cwd |> String.to_charlist + assert [{^mod, :add, 2, [file: ^file, line: 200]} | _] = System.stacktrace end - test :outside_function_error do + test "outside function error" do assert_raise RuntimeError, fn -> - will_raise + will_raise() end mod = Kernel.QuoteTest.ErrorsTest - file = __ENV__.file |> Path.relative_to_cwd |> String.to_char_list - assert [{^mod, _, _, [file: ^file, line: 209]}|_] = System.stacktrace + file = __ENV__.file |> Path.relative_to_cwd |> String.to_charlist + assert [{^mod, _, _, [file: ^file, line: 228]} | _] = System.stacktrace end end @@ -261,38 +280,38 @@ defmodule Kernel.QuoteTest.VarHygieneTest do end end - test :no_interference do + test "no interference" do a = 10 - no_interference + no_interference() assert a == 10 end - test :cross_module_interference do - cross_module_no_interference - cross_module_interference - assert read_cross_module == 1 + test "cross module interference" do + cross_module_no_interference() + cross_module_interference() + assert read_cross_module() == 1 end - test :write_interference do - write_interference + test "write interference" do + write_interference() assert a == 1 end - test :read_interference do + test "read interference" do a = 10 - read_interference + read_interference() end - test :nested do + test "nested" do assert (nested 1 do nested 2 do - :ok + _ = :ok end end) == 1 end - test :hat do - assert hat == 1 + test "hat" do + assert hat() == 1 end end @@ -313,7 +332,7 @@ defmodule Kernel.QuoteTest.AliasHygieneTest do alias Dict, as: SuperDict - test :annotate_aliases do + test "annotate aliases" do assert {:__aliases__, [alias: false], [:Foo, :Bar]} = quote(do: Foo.Bar) assert {:__aliases__, [alias: false], [:Dict, :Bar]} = @@ -322,23 +341,23 @@ defmodule Kernel.QuoteTest.AliasHygieneTest do quote(do: SuperDict.Bar) end - test :expand_aliases do + test "expand aliases" do assert Code.eval_quoted(quote do: SuperDict.Bar) == {Elixir.Dict.Bar, []} assert Code.eval_quoted(quote do: alias!(SuperDict.Bar)) == {Elixir.SuperDict.Bar, []} end - test :expand_aliases_without_macro do + test "expand aliases without macro" do alias HashDict, as: SuperDict assert SuperDict.Bar == Elixir.HashDict.Bar end - test :expand_aliases_with_macro_does_not_expand_source_alias do + test "expand aliases with macro does not expand source alias" do alias HashDict, as: Dict, warn: false require Kernel.QuoteTest.AliasHygiene assert Kernel.QuoteTest.AliasHygiene.dict == Elixir.Dict.Bar end - test :expand_aliases_with_macro_has_higher_preference do + test "expand aliases with macro has higher preference" do alias HashDict, as: SuperDict, warn: false require Kernel.QuoteTest.AliasHygiene assert Kernel.QuoteTest.AliasHygiene.super_dict == Elixir.Dict.Bar @@ -348,6 +367,14 @@ end defmodule Kernel.QuoteTest.ImportsHygieneTest do use ExUnit.Case, async: true + # We are redefining |> and using it inside the quote + # and only inside the quote. This code should still compile. + defmacro x |> f do + quote do + unquote(x) |> unquote(f) + end + end + defmacrop get_list_length do quote do length('hello') @@ -366,11 +393,11 @@ defmodule Kernel.QuoteTest.ImportsHygieneTest do end end - test :expand_imports do + test "expand imports" do import Kernel, except: [length: 1] - assert get_list_length == 5 - assert get_list_length_with_partial == 5 - assert get_list_length_with_function == 5 + assert get_list_length() == 5 + assert get_list_length_with_partial() == 5 + assert get_list_length_with_function() == 5 end defmacrop get_string_length do @@ -381,19 +408,19 @@ defmodule Kernel.QuoteTest.ImportsHygieneTest do end end - test :lazy_expand_imports do + test "lazy expand imports" do import Kernel, except: [length: 1] import String, only: [length: 1] - assert get_string_length == 5 + assert get_string_length() == 5 end - test :lazy_expand_imports_no_conflicts do + test "lazy expand imports no conflicts" do import Kernel, except: [length: 1] import String, only: [length: 1] - assert get_list_length == 5 - assert get_list_length_with_partial == 5 - assert get_list_length_with_function == 5 + assert get_list_length() == 5 + assert get_list_length_with_partial() == 5 + assert get_list_length_with_function() == 5 end defmacrop with_length do @@ -404,7 +431,21 @@ defmodule Kernel.QuoteTest.ImportsHygieneTest do end end - test :explicitly_overridden_imports do - assert with_length == 5 + test "explicitly overridden imports" do + assert with_length() == 5 + end + + defmodule BinaryUtils do + defmacro int32 do + quote do + integer-size(32) + end + end + end + + test "checks the context also for variables to zero-arity functions" do + import BinaryUtils + {:int32, meta, __MODULE__} = quote do: int32 + assert meta[:import] == BinaryUtils end end diff --git a/lib/elixir/test/elixir/kernel/raise_test.exs b/lib/elixir/test/elixir/kernel/raise_test.exs index 2b6567e949a..fc82b7b06c8 100644 --- a/lib/elixir/test/elixir/kernel/raise_test.exs +++ b/lib/elixir/test/elixir/kernel/raise_test.exs @@ -133,7 +133,7 @@ defmodule Kernel.RaiseTest do end end - test :rescue_with_underscore_no_exception do + test "rescue with underscore no exception" do result = try do RescueUndefinedModule.go rescue @@ -143,7 +143,7 @@ defmodule Kernel.RaiseTest do assert result end - test :rescue_with_higher_precedence_than_catch do + test "rescue with higher precedence than catch" do result = try do RescueUndefinedModule.go catch @@ -155,7 +155,7 @@ defmodule Kernel.RaiseTest do assert result end - test :rescue_runtime_error do + test "rescue runtime error" do result = try do raise "an exception" rescue @@ -177,7 +177,7 @@ defmodule Kernel.RaiseTest do refute result end - test :rescue_named_runtime_error do + test "rescue named runtime error" do result = try do raise "an exception" rescue @@ -189,7 +189,7 @@ defmodule Kernel.RaiseTest do assert result == "an exception" end - test :rescue_argument_error_from_elixir do + test "rescue argument error from elixir" do result = try do raise ArgumentError, "" rescue @@ -199,37 +199,37 @@ defmodule Kernel.RaiseTest do assert result end - test :rescue_named_with_underscore do + test "rescue named without aliases" do result = try do raise "an exception" rescue - x in _ -> Exception.message(x) + x -> Exception.message(x) end assert result == "an exception" end - test :wrap_custom_erlang_error do + test "wrap custom Erlang error" do result = try do :erlang.error(:sample) rescue x in [RuntimeError, ErlangError] -> Exception.message(x) end - assert result == "erlang error: :sample" + assert result == "Erlang error: :sample" end - test :undefined_function_error do + test "undefined function error" do result = try do DoNotExist.for_sure() rescue x in [UndefinedFunctionError] -> Exception.message(x) end - assert result == "undefined function: DoNotExist.for_sure/0" + assert result == "function DoNotExist.for_sure/0 is undefined (module DoNotExist is not available)" end - test :function_clause_error do + test "function clause error" do result = try do zero(1) rescue @@ -239,7 +239,7 @@ defmodule Kernel.RaiseTest do assert result == "no function clause matching in Kernel.RaiseTest.zero/1" end - test :badarg_error do + test "badarg error" do result = try do :erlang.error(:badarg) rescue @@ -249,7 +249,7 @@ defmodule Kernel.RaiseTest do assert result == "argument error" end - test :tuple_badarg_error do + test "tuple badarg error" do result = try do :erlang.error({:badarg, [1, 2, 3]}) rescue @@ -259,7 +259,7 @@ defmodule Kernel.RaiseTest do assert result == "argument error: [1, 2, 3]" end - test :badarith_error do + test "badarith error" do result = try do :erlang.error(:badarith) rescue @@ -269,7 +269,7 @@ defmodule Kernel.RaiseTest do assert result == "bad argument in arithmetic expression" end - test :badarity_error do + test "badarity error" do fun = fn(x) -> x end string = "#{inspect(fun)} with arity 1 called with 2 arguments (1, 2)" @@ -282,10 +282,11 @@ defmodule Kernel.RaiseTest do assert result == string end - test :badfun_error do - x = :example + test "badfun error" do + # Avoid "invalid function call" warning in >= OTP 19 + x = fn -> :example end result = try do - x.(2) + x.().(2) rescue x in [BadFunctionError] -> Exception.message(x) end @@ -293,7 +294,7 @@ defmodule Kernel.RaiseTest do assert result == "expected a function, got: :example" end - test :badmatch_error do + test "badmatch error" do x = :example result = try do ^x = zero(0) @@ -304,7 +305,45 @@ defmodule Kernel.RaiseTest do assert result == "no match of right hand side value: 0" end - test :case_clause_error do + test "bad key error" do + result = try do + %{%{} | foo: :bar} + rescue + x in [KeyError] -> Exception.message(x) + end + + assert result == "key :foo not found" + + result = try do + %{}.foo + rescue + x in [KeyError] -> Exception.message(x) + end + + assert result == "key :foo not found in: %{}" + end + + test "bad map error" do + result = try do + %{zero(0) | foo: :bar} + rescue + x in [BadMapError] -> Exception.message(x) + end + + assert result == "expected a map, got: 0" + end + + test "bad boolean error" do + result = try do + 1 and true + rescue + x in [BadBooleanError] -> Exception.message(x) + end + + assert result == "expected a boolean on left-side of \"and\", got: 1" + end + + test "case clause error" do x = :example result = try do case zero(0) do @@ -317,7 +356,7 @@ defmodule Kernel.RaiseTest do assert result == "no case clause matching: 0" end - test :cond_clause_error do + test "cond clause error" do result = try do cond do !zero(0) -> :ok @@ -329,7 +368,7 @@ defmodule Kernel.RaiseTest do assert result == "no cond clause evaluated to a true value" end - test :try_clause_error do + test "try clause error" do f = fn() -> :example end result = try do try do @@ -345,28 +384,28 @@ defmodule Kernel.RaiseTest do assert result == "no try clause matching: :example" end - test :undefined_function_error_as_erlang_error do + test "undefined function error as Erlang error" do result = try do DoNotExist.for_sure() rescue x in [ErlangError] -> Exception.message(x) end - assert result == "undefined function: DoNotExist.for_sure/0" + assert result == "function DoNotExist.for_sure/0 is undefined (module DoNotExist is not available)" end defmacrop exceptions do [ErlangError] end - test :with_macros do + test "with macros" do result = try do DoNotExist.for_sure() rescue - x in exceptions -> Exception.message(x) + x in exceptions() -> Exception.message(x) end - assert result == "undefined function: DoNotExist.for_sure/0" + assert result == "function DoNotExist.for_sure/0 is undefined (module DoNotExist is not available)" end defp zero(0), do: 0 diff --git a/lib/elixir/test/elixir/kernel/sigils_test.exs b/lib/elixir/test/elixir/kernel/sigils_test.exs index 28e9f232771..ff71fb043f2 100644 --- a/lib/elixir/test/elixir/kernel/sigils_test.exs +++ b/lib/elixir/test/elixir/kernel/sigils_test.exs @@ -3,19 +3,19 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.SigilsTest do use ExUnit.Case, async: true - test :sigil_s do + test "sigil s" do assert ~s(foo) == "foo" assert ~s(f#{:o}o) == "foo" assert ~s(f\no) == "f\no" end - test :sigil_s_with_heredoc do + test "sigil s with heredoc" do assert " foo\n\n" == ~s""" f#{:o}o\n """ end - test :sigil_S do + test "sigil S" do assert ~S(foo) == "foo" assert ~S[foo] == "foo" assert ~S{foo} == "foo" @@ -25,22 +25,35 @@ defmodule Kernel.SigilsTest do assert ~S/foo/ == "foo" assert ~S|foo| == "foo" assert ~S(f#{o}o) == "f\#{o}o" + assert ~S(f\#{o}o) == "f\\\#{o}o" assert ~S(f\no) == "f\\no" + assert ~S(foo\)) == "foo)" + assert ~S[foo\]] == "foo]" end - test :sigil_S_with_heredoc do + test "sigil S newline" do + assert ~S(foo\ +bar) in ["foo\\\nbar", "foo\\\r\nbar"] + end + + test "sigil S with heredoc" do assert " f\#{o}o\\n\n" == ~S""" f#{o}o\n """ end - test :sigil_c do + test "sigil s/S expand to binary when possible" do + assert Macro.expand(quote(do: ~s(foo)), __ENV__) == "foo" + assert Macro.expand(quote(do: ~S(foo)), __ENV__) == "foo" + end + + test "sigil c" do assert ~c(foo) == 'foo' assert ~c(f#{:o}o) == 'foo' assert ~c(f\no) == 'f\no' end - test :sigil_C do + test "sigil C" do assert ~C(foo) == 'foo' assert ~C[foo] == 'foo' assert ~C{foo} == 'foo' @@ -51,11 +64,18 @@ defmodule Kernel.SigilsTest do assert ~C(f\no) == 'f\\no' end - test :sigil_w do + test "sigil w" do assert ~w() == [] assert ~w(foo bar baz) == ["foo", "bar", "baz"] assert ~w(foo #{:bar} baz) == ["foo", "bar", "baz"] + assert ~w(#{""}) == [] + assert ~w(foo #{""}) == ["foo"] + assert ~w(#{" foo bar "}) == ["foo", "bar"] + + assert ~w(foo\ #{:bar}) == ["foo", "bar"] + assert ~w(foo\ bar) == ["foo", "bar"] + assert ~w( foo bar @@ -66,7 +86,7 @@ defmodule Kernel.SigilsTest do assert ~w(foo bar baz)a == [:foo, :bar, :baz] assert ~w(foo bar baz)c == ['foo', 'bar', 'baz'] - bad_modifier = quote do: ~w(foo bar baz)x + bad_modifier = quote(do: ~w(foo bar baz)x) assert %ArgumentError{} = catch_error(Code.eval_quoted(bad_modifier)) assert ~w(Foo Bar)a == [:"Foo", :"Bar"] @@ -78,9 +98,12 @@ defmodule Kernel.SigilsTest do assert Macro.expand(quote(do: ~w(a b c)a), __ENV__) == [:a, :b, :c] end - test :sigil_W do + test "sigil W" do + assert ~W() == [] assert ~W(foo #{bar} baz) == ["foo", "\#{bar}", "baz"] + assert ~W(foo\ bar) == ["foo\\", "bar"] + assert ~W( foo bar @@ -98,7 +121,7 @@ defmodule Kernel.SigilsTest do assert ~W(Foo.Bar.Baz)a == [:"Foo.Bar.Baz"] end - test :sigils_matching do + test "sigils matching" do assert ~s(f\(oo) == "f(oo" assert ~s(fo\)o) == "fo)o" assert ~s(f\(o\)o) == "f(o)o" diff --git a/lib/elixir/test/elixir/kernel/special_forms_test.exs b/lib/elixir/test/elixir/kernel/special_forms_test.exs new file mode 100644 index 00000000000..6ea7b059793 --- /dev/null +++ b/lib/elixir/test/elixir/kernel/special_forms_test.exs @@ -0,0 +1,7 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule Kernel.SpecialFormsTest do + use ExUnit.Case, async: true + + doctest Kernel.SpecialForms +end diff --git a/lib/elixir/test/elixir/kernel/string_tokenizer_test.exs b/lib/elixir/test/elixir/kernel/string_tokenizer_test.exs new file mode 100644 index 00000000000..4e90654152b --- /dev/null +++ b/lib/elixir/test/elixir/kernel/string_tokenizer_test.exs @@ -0,0 +1,74 @@ +Code.require_file "../test_helper.exs", __DIR__ + +# TODO: Remove this check once we depend only on 20 +if :erlang.system_info(:otp_release) >= '20' do +defmodule Kernel.StringTokenizerTest do + use ExUnit.Case, async: true + + @hello_world String.to_atom("こんにちは世界") + + test "tokenizes vars" do + assert {:"_12", _, nil} = Code.string_to_quoted!("_12") + assert {:"ola", _, nil} = Code.string_to_quoted!("ola") + assert {:"ólá", _, nil} = Code.string_to_quoted!("ólá") + assert {:"óLÁ", _, nil} = Code.string_to_quoted!("óLÁ") + assert {:"ólá?", _, nil} = Code.string_to_quoted!("ólá?") + assert {:"ólá!", _, nil} = Code.string_to_quoted!("ólá!") + assert {@hello_world, _, nil} = Code.string_to_quoted!("こんにちは世界") + assert {:error, _} = Code.string_to_quoted("v@r") + assert {:error, _} = Code.string_to_quoted("1var") + end + + test "tokenizes atoms" do + assert :"_12" = Code.string_to_quoted!(":_12") + assert :"ola" = Code.string_to_quoted!(":ola") + assert :"ólá" = Code.string_to_quoted!(":ólá") + assert :"ólá?" = Code.string_to_quoted!(":ólá?") + assert :"ólá!" = Code.string_to_quoted!(":ólá!") + assert :"ól@" = Code.string_to_quoted!(":ól@") + assert :"ól@!" = Code.string_to_quoted!(":ól@!") + assert :"ó@@!" = Code.string_to_quoted!(":ó@@!") + assert :"Ola" = Code.string_to_quoted!(":Ola") + assert :"Ólá" = Code.string_to_quoted!(":Ólá") + assert :"ÓLÁ" = Code.string_to_quoted!(":ÓLÁ") + assert :"ÓLÁ?" = Code.string_to_quoted!(":ÓLÁ?") + assert :"ÓLÁ!" = Code.string_to_quoted!(":ÓLÁ!") + assert :"ÓL@!" = Code.string_to_quoted!(":ÓL@!") + assert :"Ó@@!" = Code.string_to_quoted!(":Ó@@!") + assert @hello_world = Code.string_to_quoted!(":こんにちは世界") + assert {:error, _} = Code.string_to_quoted(":123") + assert {:error, _} = Code.string_to_quoted(":@123") + end + + test "tokenizes keywords" do + assert ["_12": 0] = Code.string_to_quoted!("[_12: 0]") + assert ["ola": 0] = Code.string_to_quoted!("[ola: 0]") + assert ["ólá": 0] = Code.string_to_quoted!("[ólá: 0]") + assert ["ólá?": 0] = Code.string_to_quoted!("[ólá?: 0]") + assert ["ólá!": 0] = Code.string_to_quoted!("[ólá!: 0]") + assert ["ól@": 0] = Code.string_to_quoted!("[ól@: 0]") + assert ["ól@!": 0] = Code.string_to_quoted!("[ól@!: 0]") + assert ["ó@@!": 0] = Code.string_to_quoted!("[ó@@!: 0]") + assert ["Ola": 0] = Code.string_to_quoted!("[Ola: 0]") + assert ["Ólá": 0] = Code.string_to_quoted!("[Ólá: 0]") + assert ["ÓLÁ": 0] = Code.string_to_quoted!("[ÓLÁ: 0]") + assert ["ÓLÁ?": 0] = Code.string_to_quoted!("[ÓLÁ?: 0]") + assert ["ÓLÁ!": 0] = Code.string_to_quoted!("[ÓLÁ!: 0]") + assert ["ÓL@!": 0] = Code.string_to_quoted!("[ÓL@!: 0]") + assert ["Ó@@!": 0] = Code.string_to_quoted!("[Ó@@!: 0]") + assert [{@hello_world, 0}] = Code.string_to_quoted!("[こんにちは世界: 0]") + assert {:error, _} = Code.string_to_quoted("[123: 0]") + assert {:error, _} = Code.string_to_quoted("[@123: 0]") + end + + test "tokenizes aliases" do + assert {:__aliases__, _, [:Ola]} = Code.string_to_quoted!("Ola") + assert {:__aliases__, _, [:M_123]} = Code.string_to_quoted!("M_123") + assert {:error, _} = Code.string_to_quoted("Óla") + assert {:error, _} = Code.string_to_quoted("Olá") + assert {:error, _} = Code.string_to_quoted("Ol@") + assert {:error, _} = Code.string_to_quoted("Ola?") + assert {:error, _} = Code.string_to_quoted("Ola!") + end +end +end \ No newline at end of file diff --git a/lib/elixir/test/elixir/kernel/typespec_test.exs b/lib/elixir/test/elixir/kernel/typespec_test.exs index 80908987812..af71370917a 100644 --- a/lib/elixir/test/elixir/kernel/typespec_test.exs +++ b/lib/elixir/test/elixir/kernel/typespec_test.exs @@ -1,445 +1,579 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Kernel.TypespecTest do - use ExUnit.Case, async: true + use ExUnit.Case + + import ExUnit.CaptureIO + + alias Kernel.TypespecTest.TestTypespec + + defstruct [:hello] # This macro allows us to focus on the result of the # definition and not on the hassles of handling test - # module - defmacrop test_module([{:do, block}]) do + # module. + defmacrop test_module(do: block) do quote do - {:module, _, binary, _} = defmodule TestTypespec do + {:module, _, bytecode, _} = defmodule TestTypespec do unquote(block) end :code.delete(TestTypespec) :code.purge(TestTypespec) - binary + bytecode end end - defp types(module) do - Kernel.Typespec.beam_types(module) + defp types(bytecode) do + Kernel.Typespec.beam_types(bytecode) |> Enum.sort end @skip_specs [__info__: 1] - defp specs(module) do - Kernel.Typespec.beam_specs(module) + defp specs(bytecode) do + Kernel.Typespec.beam_specs(bytecode) |> Enum.reject(fn {sign, _} -> sign in @skip_specs end) |> Enum.sort() end - defp callbacks(module) do - Kernel.Typespec.beam_callbacks(module) + defp callbacks(bytecode) do + Kernel.Typespec.beam_callbacks(bytecode) |> Enum.sort end test "invalid type specification" do - assert_raise CompileError, ~r"invalid type specification: mytype = 1", fn -> + assert_raise CompileError, ~r"invalid type specification: my_type = 1", fn -> + test_module do + @type my_type = 1 + end + end + end + + test "unexpected expression in typespec" do + assert_raise CompileError, ~r"unexpected expression in typespec: \"foobar\"", fn -> test_module do - @type mytype = 1 + @type my_type :: "foobar" end end end test "invalid function specification" do - assert_raise CompileError, ~r"invalid function type specification: myfun = 1", fn -> + assert_raise CompileError, ~r"invalid type specification: \"not a spec\"", fn -> test_module do - @spec myfun = 1 + @spec "not a spec" + end + end + + assert_raise CompileError, ~r"invalid type specification: 1 :: 2", fn -> + test_module do + @spec 1 :: 2 end end end test "@type with a single type" do - module = test_module do - @type mytype :: term + bytecode = test_module do + @type my_type :: term end - assert [type: {:mytype, {:type, _, :term, []}, []}] = - types(module) + assert [type: {:my_type, {:type, _, :term, []}, []}] = + types(bytecode) end test "@type with an atom" do - module = test_module do - @type mytype :: :atom + bytecode = test_module do + @type my_type :: :foo end - assert [type: {:mytype, {:atom, _, :atom}, []}] = - types(module) + assert [type: {:my_type, {:atom, _, :foo}, []}] = + types(bytecode) end test "@type with an atom alias" do - module = test_module do - @type mytype :: Atom + bytecode = test_module do + @type my_type :: Atom end - assert [type: {:mytype, {:atom, _, Atom}, []}] = - types(module) + assert [type: {:my_type, {:atom, _, Atom}, []}] = + types(bytecode) end test "@type with an integer" do - module = test_module do - @type mytype :: 10 + bytecode = test_module do + @type my_type :: 10 end - assert [type: {:mytype, {:integer, _, 10}, []}] = - types(module) + assert [type: {:my_type, {:integer, _, 10}, []}] = + types(bytecode) end test "@type with a negative integer" do - module = test_module do - @type mytype :: -10 + bytecode = test_module do + @type my_type :: -10 end - assert [type: {:mytype, {:op, _, :-, {:integer, _, 10}}, []}] = - types(module) + assert [type: {:my_type, {:op, _, :-, {:integer, _, 10}}, []}] = + types(bytecode) end test "@type with a remote type" do - module = test_module do - @type mytype :: Remote.Some.type - @type mytype_arg :: Remote.type(integer) + bytecode = test_module do + @type my_type :: Remote.Some.type + @type my_type_arg :: Remote.type(integer) end - assert [type: {:mytype, {:remote_type, _, [{:atom, _, Remote.Some}, {:atom, _, :type}, []]}, []}, - type: {:mytype_arg, {:remote_type, _, [{:atom, _, Remote}, {:atom, _, :type}, [{:type, _, :integer, []}]]}, []}] = - types(module) + assert [type: {:my_type, {:remote_type, _, [{:atom, _, Remote.Some}, {:atom, _, :type}, []]}, []}, + type: {:my_type_arg, {:remote_type, _, [{:atom, _, Remote}, {:atom, _, :type}, [{:type, _, :integer, []}]]}, []}] = + types(bytecode) end test "@type with a binary" do - module = test_module do - @type mytype :: binary + bytecode = test_module do + @type my_type :: binary end - assert [type: {:mytype, {:type, _, :binary, []}, []}] = - types(module) + assert [type: {:my_type, {:type, _, :binary, []}, []}] = + types(bytecode) end test "@type with an empty binary" do - module = test_module do - @type mytype :: <<>> + bytecode = test_module do + @type my_type :: <<>> end - assert [type: {:mytype, {:type, _, :binary, [{:integer, _, 0}, {:integer, _, 0}]}, []}] = - types(module) + assert [type: {:my_type, {:type, _, :binary, [{:integer, _, 0}, {:integer, _, 0}]}, []}] = + types(bytecode) end test "@type with a binary with a base size" do - module = test_module do - @type mytype :: <<_ :: 3>> + bytecode = test_module do + @type my_type :: <<_::3>> end - assert [type: {:mytype, {:type, _, :binary, [{:integer, _, 3}, {:integer, _, 0}]}, []}] = - types(module) + assert [type: {:my_type, {:type, _, :binary, [{:integer, _, 3}, {:integer, _, 0}]}, []}] = + types(bytecode) end test "@type with a binary with a unit size" do - module = test_module do - @type mytype :: <<_ :: _ * 8>> + bytecode = test_module do + @type my_type :: <<_::_*8>> end - assert [type: {:mytype, {:type, _, :binary, [{:integer, _, 0}, {:integer, _, 8}]}, []}] = - types(module) + assert [type: {:my_type, {:type, _, :binary, [{:integer, _, 0}, {:integer, _, 8}]}, []}] = + types(bytecode) end - test "@type with a range" do - module = test_module do - @type mytype :: range(1, 10) + test "@type with a binary with a size and unit size" do + bytecode = test_module do + @type my_type :: <<_::3, _::_*8>> end - assert [type: {:mytype, {:type, _, :range, [{:integer, _, 1}, {:integer, _, 10}]}, []}] = - types(module) + assert [type: {:my_type, {:type, _, :binary, [{:integer, _, 3}, {:integer, _, 8}]}, []}] = + types(bytecode) + end + + test "@type with invalid binary spec" do + assert_raise CompileError, fn -> + test_module do + @type my_type :: <<_::3*8>> + end + end end test "@type with a range op" do - module = test_module do - @type mytype :: 1..10 + bytecode = test_module do + @type my_type :: 1..10 + end + + assert [type: {:my_type, {:type, _, :range, [{:integer, _, 1}, {:integer, _, 10}]}, []}] = + types(bytecode) + end + + test "@type with a keyword map" do + bytecode = test_module do + @type my_type :: %{hello: :world} end - assert [type: {:mytype, {:type, _, :range, [{:integer, _, 1}, {:integer, _, 10}]}, []}] = - types(module) + assert [type: {:my_type, + {:type, _, :map, [ + {:type, _, :map_field_exact, [{:atom, _, :hello}, {:atom, _, :world}]} + ]}, + []}] = types(bytecode) end test "@type with a map" do - module = test_module do - @type mytype :: %{hello: :world} + bytecode = test_module do + @type my_type :: %{required(:a) => :b, optional(:c) => :d} end - assert [type: {:mytype, + assert [type: {:my_type, {:type, _, :map, [ - {:type, _, :map_field_assoc, {:atom, _, :hello}, {:atom, _, :world}} + {:type, _, :map_field_exact, [{:atom, _, :a}, {:atom, _, :b}]}, + {:type, _, :map_field_assoc, [{:atom, _, :c}, {:atom, _, :d}]} ]}, - []}] = types(module) + []}] = types(bytecode) end test "@type with a struct" do - module = test_module do - @type mytype :: %User{hello: :world} + bytecode = test_module do + defstruct [hello: nil, other: nil] + @type my_type :: %TestTypespec{hello: :world} end - assert [type: {:mytype, + assert [type: {:my_type, {:type, _, :map, [ - {:type, _, :map_field_assoc, {:atom, _, :__struct__}, {:atom, _, User}}, - {:type, _, :map_field_assoc, {:atom, _, :hello}, {:atom, _, :world}} + {:type, _, :map_field_exact, [{:atom, _, :__struct__}, {:atom, _, TestTypespec}]}, + {:type, _, :map_field_exact, [{:atom, _, :hello}, {:atom, _, :world}]}, + {:type, _, :map_field_exact, [{:atom, _, :other}, {:type, _, :term, []}]} + ]}, + []}] = types(bytecode) + end + + test "@type with undefined struct" do + assert_raise UndefinedFunctionError, fn -> + test_module do + @type my_type :: %ThisModuleDoesNotExist{} + end + end + + assert_raise CompileError, ~r"struct is not defined for TestTypespec", fn -> + test_module do + @type my_type :: %TestTypespec{} + end + end + end + + test "@type with a struct with undefined field" do + assert_raise CompileError, ~r"undefined field no_field on struct TestTypespec", fn -> + test_module do + defstruct [:hello, :eric] + @type my_type :: %TestTypespec{no_field: :world} + end + end + end + + test "@type when overriding Elixir built-in" do + assert_raise CompileError, ~r"type struct/0 is a builtin type", fn -> + test_module do + @type struct :: :oops + end + end + end + + test "@type when overriding Erlang built-in" do + assert_raise CompileError, ~r"type list/0 is a builtin type", fn -> + test_module do + @type list :: :oops + end + end + end + + test "@type with public record" do + bytecode = test_module do + require Record + Record.defrecord :timestamp, [date: 1, time: 2] + @type my_type :: record(:timestamp, time: :foo) + end + + assert [type: {:my_type, + {:type, _, :tuple, [ + {:atom, 0, :timestamp}, {:type, 0, :term, []}, {:atom, 0, :foo} + ]}, + []}] = types(bytecode) + end + + test "@type with private record" do + bytecode = test_module do + require Record + Record.defrecordp :timestamp, [date: 1, time: 2] + @type my_type :: record(:timestamp, time: :foo) + end + + assert [type: {:my_type, + {:type, _, :tuple, [ + {:atom, 0, :timestamp}, {:type, 0, :term, []}, {:atom, 0, :foo} ]}, - []}] = types(module) + []}] = types(bytecode) end - test "@type with a tuple" do - module = test_module do - @type mytype :: tuple - @type mytype1 :: {} - @type mytype2 :: {1, 2} + test "@type with undefined record" do + assert_raise CompileError, ~r"unknown record :this_record_does_not_exist", fn -> + test_module do + @type my_type :: record(:this_record_does_not_exist, []) + end end + end - assert [type: {:mytype, {:type, _, :tuple, :any}, []}, - type: {:mytype1, {:type, _, :tuple, []}, []}, - type: {:mytype2, {:type, _, :tuple, [{:integer, _, 1}, {:integer, _, 2}]}, []}] = - types(module) + test "@type with a record with undefined field" do + assert_raise CompileError, ~r"undefined field no_field on record :timestamp", fn -> + test_module do + require Record + Record.defrecord :timestamp, [date: 1, time: 2] + @type my_type :: record(:timestamp, no_field: :foo) + end + end + end + + test "@type with an invalid map notation" do + assert_raise CompileError, ~r"invalid map specification", fn -> + test_module do + @type content :: %{atom | String.t => term} + end + end end test "@type with list shortcuts" do - module = test_module do - @type mytype :: [] - @type mytype1 :: [integer] - @type mytype2 :: [integer, ...] + bytecode = test_module do + @type my_type :: [] + @type my_type1 :: [integer] + @type my_type2 :: [integer, ...] end - assert [type: {:mytype, {:type, _, :nil, []}, []}, - type: {:mytype1, {:type, _, :list, [{:type, _, :integer, []}]}, []}, - type: {:mytype2, {:type, _, :nonempty_list, [{:type, _, :integer, []}]}, []}] = - types(module) + assert [type: {:my_type, {:type, _, nil, []}, []}, + type: {:my_type1, {:type, _, :list, [{:type, _, :integer, []}]}, []}, + type: {:my_type2, {:type, _, :nonempty_list, [{:type, _, :integer, []}]}, []}] = + types(bytecode) end test "@type with a fun" do - module = test_module do - @type mytype :: (... -> any) + bytecode = test_module do + @type my_type :: (... -> any) end - assert [type: {:mytype, {:type, _, :fun, []}, []}] = - types(module) + assert [type: {:my_type, {:type, _, :fun, []}, []}] = + types(bytecode) end test "@type with a fun with multiple arguments and return type" do - module = test_module do - @type mytype :: (integer, integer -> integer) + bytecode = test_module do + @type my_type :: (integer, integer -> integer) end - assert [type: {:mytype, {:type, _, :fun, [{:type, _, :product, + assert [type: {:my_type, {:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]}, {:type, _, :integer, []}]}, []}] = - types(module) + types(bytecode) end test "@type with a fun with no arguments and return type" do - module = test_module do - @type mytype :: (() -> integer) + bytecode = test_module do + @type my_type :: (() -> integer) end - assert [type: {:mytype, {:type, _, :fun, [{:type, _, :product, []}, + assert [type: {:my_type, {:type, _, :fun, [{:type, _, :product, []}, {:type, _, :integer, []}]}, []}] = - types(module) + types(bytecode) end test "@type with a fun with any arity and return type" do - module = test_module do - @type mytype :: (... -> integer) + bytecode = test_module do + @type my_type :: (... -> integer) end - assert [type: {:mytype, {:type, _, :fun, [{:type, _, :any}, + assert [type: {:my_type, {:type, _, :fun, [{:type, _, :any}, {:type, _, :integer, []}]}, []}] = - types(module) + types(bytecode) end test "@type with a union" do - module = test_module do - @type mytype :: integer | char_list | atom + bytecode = test_module do + @type my_type :: integer | charlist | atom end - assert [type: {:mytype, {:type, _, :union, [{:type, _, :integer, []}, - {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :char_list}, []]}, + assert [type: {:my_type, {:type, _, :union, [{:type, _, :integer, []}, + {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :charlist}, []]}, {:type, _, :atom, []}]}, []}] = - types(module) + types(bytecode) end test "@type with keywords" do - module = test_module do - @type mytype :: [first: integer, step: integer, last: integer] + bytecode = test_module do + @type my_type :: [first: integer, step: integer, last: integer] end - assert [type: {:mytype, {:type, _, :list, [ + assert [type: {:my_type, {:type, _, :list, [ {:type, _, :union, [ {:type, _, :tuple, [{:atom, _, :first}, {:type, _, :integer, []}]}, {:type, _, :tuple, [{:atom, _, :step}, {:type, _, :integer, []}]}, {:type, _, :tuple, [{:atom, _, :last}, {:type, _, :integer, []}]} ]} - ]}, []}] = types(module) + ]}, []}] = types(bytecode) end test "@type with parameters" do - module = test_module do - @type mytype(x) :: x - @type mytype1(x) :: list(x) - @type mytype2(x, y) :: {x, y} + bytecode = test_module do + @type my_type(x) :: x + @type my_type1(x) :: list(x) + @type my_type2(x, y) :: {x, y} end - assert [type: {:mytype, {:var, _, :x}, [{:var, _, :x}]}, - type: {:mytype1, {:type, _, :list, [{:var, _, :x}]}, [{:var, _, :x}]}, - type: {:mytype2, {:type, _, :tuple, [{:var, _, :x}, {:var, _, :y}]}, [{:var, _, :x}, {:var, _, :y}]}] = - types(module) + assert [type: {:my_type, {:var, _, :x}, [{:var, _, :x}]}, + type: {:my_type1, {:type, _, :list, [{:var, _, :x}]}, [{:var, _, :x}]}, + type: {:my_type2, {:type, _, :tuple, [{:var, _, :x}, {:var, _, :y}]}, [{:var, _, :x}, {:var, _, :y}]}] = + types(bytecode) end test "@type with annotations" do - module = test_module do - @type mytype :: (named :: integer) - @type mytype1 :: (a :: integer -> integer) + bytecode = test_module do + @type my_type :: (named :: integer) + @type my_type1 :: (a :: integer -> integer) end - assert [type: {:mytype, {:ann_type, _, [{:var, _, :named}, {:type, _, :integer, []}]}, []}, - type: {:mytype1, {:type, _, :fun, [{:type, _, :product, [{:ann_type, _, [{:var, _, :a}, {:type, _, :integer, []}]}]}, {:type, _, :integer, []}]}, []}] = - types(module) + assert [type: {:my_type, {:ann_type, _, [{:var, _, :named}, {:type, _, :integer, []}]}, []}, + type: {:my_type1, {:type, _, :fun, [{:type, _, :product, [{:ann_type, _, [{:var, _, :a}, {:type, _, :integer, []}]}]}, {:type, _, :integer, []}]}, []}] = + types(bytecode) end test "@opaque(type)" do - module = test_module do - @opaque mytype(x) :: x + bytecode = test_module do + @opaque my_type(x) :: x end - assert [opaque: {:mytype, {:var, _, :x}, [{:var, _, :x}]}] = - types(module) + assert [opaque: {:my_type, {:var, _, :x}, [{:var, _, :x}]}] = + types(bytecode) end test "@type + opaque" do - module = test_module do - @type mytype :: tuple - @opaque mytype1 :: {} + bytecode = test_module do + @type my_type :: tuple + @opaque my_type1 :: {} end - assert [opaque: {:mytype1, _, []}, - type: {:mytype, _, []},] = - types(module) + assert [opaque: {:my_type1, _, []}, + type: {:my_type, _, []}, ] = + types(bytecode) end - test "@type from structs" do - module = test_module do - defstruct name: nil, age: 0 :: non_neg_integer + test "@type unquote fragment" do + quoted = quote unquote: false do + name = :my_type + type = :foo + @type unquote(name)() :: unquote(type) + end + bytecode = test_module do + Module.eval_quoted(__MODULE__, quoted) end - assert [type: {:t, {:type, _, :map, [ - {:type, _, :map_field_assoc, {:atom, _, :name}, {:type, _, :term, []}}, - {:type, _, :map_field_assoc, {:atom, _, :age}, {:type, _, :non_neg_integer, []}}, - {:type, _, :map_field_assoc, {:atom, _, :__struct__}, {:atom, _, TestTypespec}} - ]}, []}] = types(module) + assert [type: {:my_type, {:atom, _, :foo}, []}] = + types(bytecode) end - test "@type from dynamic structs" do - module = test_module do - fields = [name: nil, age: 0] - defstruct fields + test "@type with module attributes" do + bytecode = test_module do + @keyword Keyword + @type kw :: @keyword.t + @type kw(value) :: @keyword.t(value) end - assert [type: {:t, {:type, _, :map, [ - {:type, _, :map_field_assoc, {:atom, _, :name}, {:type, _, :term, []}}, - {:type, _, :map_field_assoc, {:atom, _, :age}, {:type, _, :term, []}}, - {:type, _, :map_field_assoc, {:atom, _, :__struct__}, {:atom, _, TestTypespec}} - ]}, []}] = types(module) + assert [type: {:kw, {:remote_type, _, [{:atom, _, Keyword}, {:atom, _, :t}, []]}, _}, + type: {:kw, {:remote_type, _, [{:atom, _, Keyword}, {:atom, _, :t}, [{:var, _, :value}]]}, [{:var, _, :value}]}] = + types(bytecode) end - test "@type unquote fragment" do - module = test_module do - quoted = quote unquote: false do - name = :mytype - type = :atom - @type unquote(name)() :: unquote(type) + test "invalid remote @type with module attribute that does not evaluate to a module" do + assert_raise CompileError, ~r/\(@foo is "bar"\)/, fn -> + test_module do + @foo "bar" + @type t :: @foo.t end - Module.eval_quoted(__MODULE__, quoted) |> elem(0) end - - assert [type: {:mytype, {:atom, _, :atom}, []}] = - types(module) end test "defines_type?" do test_module do - @type mytype :: tuple - @type mytype(a) :: [a] - assert Kernel.Typespec.defines_type?(__MODULE__, :mytype, 0) - assert Kernel.Typespec.defines_type?(__MODULE__, :mytype, 1) - refute Kernel.Typespec.defines_type?(__MODULE__, :mytype, 2) + @type my_type :: tuple + @type my_type(a) :: [a] + assert Kernel.Typespec.defines_type?(__MODULE__, :my_type, 0) + assert Kernel.Typespec.defines_type?(__MODULE__, :my_type, 1) + refute Kernel.Typespec.defines_type?(__MODULE__, :my_type, 2) end end test "@spec(spec)" do - module = test_module do - def myfun1(x), do: x - def myfun2(), do: :ok - def myfun3(x, y), do: {x, y} - def myfun4(x), do: x - @spec myfun1(integer) :: integer - @spec myfun2() :: integer - @spec myfun3(integer, integer) :: {integer, integer} - @spec myfun4(x :: integer) :: integer + bytecode = test_module do + def my_fun1(x), do: x + def my_fun2(), do: :ok + def my_fun3(x, y), do: {x, y} + def my_fun4(x), do: x + @spec my_fun1(integer) :: integer + @spec my_fun2() :: integer + @spec my_fun3(integer, integer) :: {integer, integer} + @spec my_fun4(x :: integer) :: integer + end + + assert [{{:my_fun1, 1}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]}]}, + {{:my_fun2, 0}, [{:type, _, :fun, [{:type, _, :product, []}, {:type, _, :integer, []}]}]}, + {{:my_fun3, 2}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]}, {:type, _, :tuple, [{:type, _, :integer, []}, {:type, _, :integer, []}]}]}]}, + {{:my_fun4, 1}, [{:type, _, :fun, [{:type, _, :product, [{:ann_type, _, [{:var, _, :x}, {:type, _, :integer, []}]}]}, {:type, _, :integer, []}]}]}] = + specs(bytecode) + end + + test "@spec(spec) for unreachable private function" do + # Run it inside capture_io/2 so that the "my_fun/1 is unused" + # warning doesn't get printed among the ExUnit test results. + output = ExUnit.CaptureIO.capture_io :stderr, fn -> + bytecode = test_module do + defp my_fun(x), do: x + @spec my_fun(integer) :: integer + end + + assert [] == specs(bytecode) end - assert [{{:myfun1, 1}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]}]}, - {{:myfun2, 0}, [{:type, _, :fun, [{:type, _, :product, []}, {:type, _, :integer, []}]}]}, - {{:myfun3, 2}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]}, {:type, _, :tuple, [{:type, _, :integer, []}, {:type, _, :integer, []}]}]}]}, - {{:myfun4, 1}, [{:type, _, :fun, [{:type, _, :product, [{:ann_type, _, [{:var, _, :x}, {:type, _, :integer, []}]}]}, {:type, _, :integer, []}]}]}] = - specs(module) + assert output != "" end test "@spec(spec) with guards" do - module = test_module do - def myfun1(x), do: x - @spec myfun1(x) :: boolean when [x: integer] + bytecode = test_module do + def my_fun1(x), do: x + @spec my_fun1(x) :: boolean when [x: integer] - def myfun2(x), do: x - @spec myfun2(x) :: x when [x: var] + def my_fun2(x), do: x + @spec my_fun2(x) :: x when [x: var] - def myfun3(_x, y), do: y - @spec myfun3(x, y) :: y when [y: x, x: var] + def my_fun3(_x, y), do: y + @spec my_fun3(x, y) :: y when [y: x, x: var] end - assert [{{:myfun1, 1}, [{:type, _, :bounded_fun, [{:type, _, :fun, [{:type, _, :product, [{:var, _, :x}]}, {:type, _, :boolean, []}]}, [{:type, _, :constraint, [{:atom, _, :is_subtype}, [{:var, _, :x}, {:type, _, :integer, []}]]}]]}]}, - {{:myfun2, 1}, [{:type, _, :fun, [{:type, _, :product, [{:var, _, :x}]}, {:var, _, :x}]}]}, - {{:myfun3, 2}, [{:type, _, :bounded_fun, [{:type, _, :fun, [{:type, _, :product, [{:var, _, :x}, {:var, _, :y}]}, {:var, _, :y}]}, [{:type, _, :constraint, [{:atom, _, :is_subtype}, [{:var, _, :y}, {:var, _, :x}]]}]]}]}] = - specs(module) + assert [{{:my_fun1, 1}, [{:type, _, :bounded_fun, [{:type, _, :fun, [{:type, _, :product, [{:var, _, :x}]}, {:type, _, :boolean, []}]}, [{:type, _, :constraint, [{:atom, _, :is_subtype}, [{:var, _, :x}, {:type, _, :integer, []}]]}]]}]}, + {{:my_fun2, 1}, [{:type, _, :fun, [{:type, _, :product, [{:var, _, :x}]}, {:var, _, :x}]}]}, + {{:my_fun3, 2}, [{:type, _, :bounded_fun, [{:type, _, :fun, [{:type, _, :product, [{:var, _, :x}, {:var, _, :y}]}, {:var, _, :y}]}, [{:type, _, :constraint, [{:atom, _, :is_subtype}, [{:var, _, :y}, {:var, _, :x}]]}]]}]}] = + specs(bytecode) end test "@callback(callback)" do - module = test_module do - @callback myfun(integer) :: integer - @callback myfun() :: integer - @callback myfun(integer, integer) :: {integer, integer} + bytecode = test_module do + @callback my_fun(integer) :: integer + @callback my_fun() :: integer + @callback my_fun(integer, integer) :: {integer, integer} end - assert [{{:myfun, 0}, [{:type, _, :fun, [{:type, _, :product, []}, {:type, _, :integer, []}]}]}, - {{:myfun, 1}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]}]}, - {{:myfun, 2}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]}, {:type, _, :tuple, [{:type, _, :integer, []}, {:type, _, :integer, []}]}]}]}] = - callbacks(module) + assert [{{:my_fun, 0}, [{:type, _, :fun, [{:type, _, :product, []}, {:type, _, :integer, []}]}]}, + {{:my_fun, 1}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]}]}, + {{:my_fun, 2}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]}, {:type, _, :tuple, [{:type, _, :integer, []}, {:type, _, :integer, []}]}]}]}] = + callbacks(bytecode) end test "@spec + @callback" do - module = test_module do - def myfun(x), do: x - @spec myfun(integer) :: integer - @spec myfun(char_list) :: char_list - @callback cb(integer) :: integer + bytecode = test_module do + def my_fun(x), do: x + @spec my_fun(integer) :: integer + @spec my_fun(charlist) :: charlist + @callback cb(integer) :: integer end assert [{{:cb, 1}, [{:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]}]}] = - callbacks(module) + callbacks(bytecode) - assert [{{:myfun, 1}, [ + assert [{{:my_fun, 1}, [ {:type, _, :fun, [{:type, _, :product, [ - {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :char_list}, []]}]}, - {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :char_list}, []]}]}, + {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :charlist}, []]}]}, + {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :charlist}, []]}]}, {:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]}]}] = - specs(module) + specs(bytecode) end test "block handling" do - module = test_module do + bytecode = test_module do @spec foo((() -> [ integer ])) :: integer def foo(_), do: 1 end @@ -447,44 +581,204 @@ defmodule Kernel.TypespecTest do [{:type, _, :fun, [{:type, _, :product, [ {:type, _, :fun, [{:type, _, :product, []}, {:type, _, :list, [{:type, _, :integer, []}]}]}]}, {:type, _, :integer, []}]}]}] = - specs(module) + specs(bytecode) end # Conversion to AST test "type_to_ast" do quoted = [ - (quote do: @type with_ann() :: (t :: atom())), - (quote do: @type empty_tuple_type() :: {}), - (quote do: @type imm_type_1() :: 1), - (quote do: @type imm_type_2() :: :atom), - (quote do: @type simple_type() :: integer()), - (quote do: @type param_type(p) :: [p]), - (quote do: @type union_type() :: integer() | binary() | boolean()), - (quote do: @type binary_type1() :: <<_ :: _ * 8>>), - (quote do: @type binary_type2() :: <<_ :: 3 * 8>>), - (quote do: @type binary_type3() :: <<_ :: 3>>), - (quote do: @type tuple_type() :: {integer()}), - (quote do: @type ftype() :: (() -> any()) | (() -> integer()) | ((integer() -> integer()))), - (quote do: @type cl() :: char_list()), - (quote do: @type ab() :: as_boolean(term())), - (quote do: @type vaf() :: (... -> any())), - (quote do: @type rng() :: 1 .. 10), - (quote do: @type opts() :: [first: integer(), step: integer(), last: integer()]), - (quote do: @type ops() :: {+1,-1}), - (quote do: @type my_map() :: %{hello: :world}), - (quote do: @type my_struct() :: %User{hello: :world}), - ] |> Enum.sort - - module = test_module do - Module.eval_quoted __MODULE__, quote do: (unquote_splicing(quoted)) - end - - types = types(module) + quote(do: @type with_ann() :: (t :: atom())), + quote(do: @type a_tuple() :: tuple()), + quote(do: @type empty_tuple() :: {}), + quote(do: @type one_tuple() :: {:foo}), + quote(do: @type two_tuple() :: {:foo, :bar}), + quote(do: @type imm_type_1() :: 1), + quote(do: @type imm_type_2() :: :foo), + quote(do: @type simple_type() :: integer()), + quote(do: @type param_type(p) :: [p]), + quote(do: @type union_type() :: integer() | binary() | boolean()), + quote(do: @type binary_type1() :: <<_::_*8>>), + quote(do: @type binary_type2() :: <<_::3>>), + quote(do: @type binary_type3() :: <<_::3, _::_*8>>), + quote(do: @type tuple_type() :: {integer()}), + quote(do: @type ftype() :: (() -> any()) | (() -> integer()) | ((integer() -> integer()))), + quote(do: @type cl() :: charlist()), + quote(do: @type st() :: struct()), + quote(do: @type ab() :: as_boolean(term())), + quote(do: @type kw() :: keyword()), + quote(do: @type kwt() :: keyword(term())), + quote(do: @type vaf() :: (... -> any())), + quote(do: @type rng() :: 1..10), + quote(do: @type opts() :: [first: integer(), step: integer(), last: integer()]), + quote(do: @type ops() :: {+1, -1}), + quote(do: @type a_map() :: map()), + quote(do: @type empty_map() :: %{}), + quote(do: @type my_map() :: %{hello: :world}), + quote(do: @type my_req_map() :: %{required(0) => :foo}), + quote(do: @type my_opt_map() :: %{optional(0) => :foo}), + quote(do: @type my_struct() :: %Kernel.TypespecTest{hello: :world}), + quote(do: @type list1() :: list()), + quote(do: @type list2() :: [0]), + quote(do: @type list3() :: [...]), + quote(do: @type list4() :: [0, ...]), + quote(do: @type nil_list() :: []), + ] + |> Enum.sort + + bytecode = test_module do + Module.eval_quoted __MODULE__, quoted + end + + types = types(bytecode) + + Enum.each(Enum.zip(types, quoted), fn {{:type, type}, definition} -> + ast = Kernel.Typespec.type_to_ast(type) + assert Macro.to_string(quote(do: @type unquote(ast))) == Macro.to_string(definition) + end) + end + + # This is a test that implements all types specified in lib/elixir/pages/Typespecs.md + test "test documented types and their AST" do + defmodule SomeStruct do + defstruct [:key] + end + + quoted = [ + ## Basic types + quote(do: @type basic_any() :: any()), + quote(do: @type basic_none() :: none()), + quote(do: @type basic_atom() :: atom()), + quote(do: @type basic_map() :: map()), + quote(do: @type basic_pid() :: pid()), + quote(do: @type basic_port() :: port()), + quote(do: @type basic_reference() :: reference()), + quote(do: @type basic_struct() :: struct()), + quote(do: @type basic_tuple() :: tuple()), + # Numbers + quote(do: @type basic_float() :: float()), + quote(do: @type basic_integer() :: integer()), + quote(do: @type basic_neg_integer() :: neg_integer()), + quote(do: @type basic_non_neg_integer() :: non_neg_integer()), + quote(do: @type basic_pos_integer() :: pos_integer()), + # Lists + quote(do: @type basic_list_type() :: list(integer())), + quote(do: @type basic_nonempty_list_type() :: nonempty_list(integer())), + quote(do: @type basic_maybe_improper_list_type() :: maybe_improper_list(integer(), atom())), + quote(do: @type basic_nonempty_improper_list_type() :: nonempty_improper_list(integer(), atom())), + quote(do: @type basic_nonempty_maybe_improper_list_type() :: nonempty_maybe_improper_list(integer(), atom())), + + ## Literals + quote(do: @type literal_atom() :: :atom), + quote(do: @type literal_integer() :: 1), + quote(do: @type literal_integers() :: 1..10), + quote(do: @type literal_empty_bitstring() :: <<>>), + quote(do: @type literal_size_0() :: <<_::0>>), + quote(do: @type literal_unit_1() :: <<_::_*1>>), + quote(do: @type literal_size_1_unit_8() :: <<_::100, _::_*256>>), + quote(do: @type literal_function_arity_any() :: (... -> integer())), + quote(do: @type literal_function_arity_0() :: (() -> integer())), + quote(do: @type literal_function_arity_2() :: (integer(), atom() -> integer())), + quote(do: @type literal_list_type() :: [integer()]), + quote(do: @type literal_empty_list() :: []), + quote(do: @type literal_list_nonempty() :: [...]), + quote(do: @type literal_nonempty_list_type() :: [atom(), ...]), + quote(do: @type literal_keyword_list_fixed_key() :: [key: integer()]), + quote(do: @type literal_keyword_list_fixed_key2() :: [{:key, integer()}]), + quote(do: @type literal_keyword_list_type_key() :: [{binary(), integer()}]), + quote(do: @type literal_empty_map() :: %{}), + quote(do: @type literal_map_with_key() :: %{key: integer()}), + quote(do: @type literal_map_with_required_key() :: %{required(bitstring()) => integer()}), + quote(do: @type literal_map_with_optional_key() :: %{optional(bitstring()) => integer()}), + # TODO: Remove by 1.5, when the following line with give a warning + quote(do: @type literal_map_with_arrow() :: %{any() => any()}), + quote(do: @type literal_struct_all_fields_any_type() :: %SomeStruct{}), + quote(do: @type literal_struct_all_fields_key_type() :: %SomeStruct{key: integer()}), + quote(do: @type literal_empty_tuple() :: {}), + quote(do: @type literal_2_element_tuple() :: {1, 2}), + + ## Built-in types + quote(do: @type builtin_term() :: term()), + quote(do: @type builtin_arity() :: arity()), + quote(do: @type builtin_as_boolean() :: as_boolean(:t)), + quote(do: @type builtin_binary() :: binary()), + quote(do: @type builtin_bitstring() :: bitstring()), + quote(do: @type builtin_boolean() :: boolean()), + quote(do: @type builtin_byte() :: byte()), + quote(do: @type builtin_char() :: char()), + quote(do: @type builtin_charlist() :: charlist()), + quote(do: @type builtin_nonempty_charlist() :: nonempty_charlist()), + quote(do: @type builtin_fun() :: fun()), + quote(do: @type builtin_function() :: function()), + quote(do: @type builtin_identifier() :: identifier()), + quote(do: @type builtin_iodata() :: iodata()), + quote(do: @type builtin_iolist() :: iolist()), + quote(do: @type builtin_keyword() :: keyword()), + quote(do: @type builtin_keyword_value_type() :: keyword(:t)), + quote(do: @type builtin_list() :: list()), + quote(do: @type builtin_nonempty_list() :: nonempty_list()), + quote(do: @type builtin_maybe_improper_list() :: maybe_improper_list()), + quote(do: @type builtin_nonempty_maybe_improper_list() :: nonempty_maybe_improper_list()), + quote(do: @type builtin_mfa() :: mfa()), + quote(do: @type builtin_module() :: module()), + quote(do: @type builtin_no_return() :: no_return()), + quote(do: @type builtin_node() :: node()), + quote(do: @type builtin_number() :: number()), + quote(do: @type builtin_struct() :: struct()), + quote(do: @type builtin_timeout() :: timeout()), + + ## Remote types + quote(do: @type remote_enum_t0() :: Enum.t()), + quote(do: @type remote_keyword_t1() :: Keyword.t(integer())), + ] + |> Enum.sort + + bytecode = test_module do + Module.eval_quoted __MODULE__, quoted + end + + types = types(bytecode) Enum.each(Enum.zip(types, quoted), fn {{:type, type}, definition} -> ast = Kernel.Typespec.type_to_ast(type) - assert Macro.to_string(quote do: @type unquote(ast)) == Macro.to_string(definition) + ast_string = Macro.to_string(quote(do: @type unquote(ast))) + + case type do + # These cases do not translate directly to their own string version. + {:basic_list_type, _, _} -> + assert ast_string == "@type(basic_list_type() :: [integer()])" + + {:basic_nonempty_list_type, _, _} -> + assert ast_string == "@type(basic_nonempty_list_type() :: [integer(), ...])" + + {:literal_empty_bitstring, _, _} -> + assert ast_string == "@type(literal_empty_bitstring() :: <<_::0>>)" + + {:literal_keyword_list_fixed_key, _, _} -> + assert ast_string == "@type(literal_keyword_list_fixed_key() :: [{:key, integer()}])" + + {:literal_keyword_list_fixed_key2, _, _} -> + assert ast_string == "@type(literal_keyword_list_fixed_key2() :: [{:key, integer()}])" + + # TODO: Remove by 1.5 + {:literal_map_with_arrow, _, _} -> + assert ast_string == "@type(literal_map_with_arrow() :: %{optional(any()) => any()})" + + {:literal_struct_all_fields_any_type, _, _} -> + assert ast_string == "@type(literal_struct_all_fields_any_type() :: %Kernel.TypespecTest.SomeStruct{key: term()})" + + {:literal_struct_all_fields_key_type, _, _} -> + assert ast_string == "@type(literal_struct_all_fields_key_type() :: %Kernel.TypespecTest.SomeStruct{key: integer()})" + + {:builtin_fun, _, _} -> + assert ast_string == "@type(builtin_fun() :: (... -> any()))" + + {:builtin_nonempty_list, _, _} -> + assert ast_string == "@type(builtin_nonempty_list() :: [...])" + + _ -> + assert ast_string == Macro.to_string(definition) + end end) end @@ -496,54 +790,122 @@ defmodule Kernel.TypespecTest do test "spec_to_ast" do quoted = [ - (quote do: @spec a() :: integer()), - (quote do: @spec a(atom()) :: integer() | [{}]), - (quote do: @spec a(b) :: integer() when [b: integer()]), - (quote do: @spec a(b) :: b when [b: var]), - (quote do: @spec a(c :: atom()) :: atom()), - ] |> Enum.sort + quote(do: @spec foo() :: integer()), + quote(do: @spec foo(atom()) :: integer() | [{}]), + quote(do: @spec foo(arg) :: integer() when [arg: integer()]), + quote(do: @spec foo(arg) :: arg when [arg: var]), + quote(do: @spec foo(arg :: atom()) :: atom()), + ] + |> Enum.sort - module = test_module do - def a, do: 1 - def a(a), do: a - Module.eval_quoted __MODULE__, quote do: (unquote_splicing(quoted)) + bytecode = test_module do + def foo(), do: 1 + def foo(arg), do: arg + Module.eval_quoted __MODULE__, quote(do: (unquote_splicing(quoted))) end - specs = Enum.flat_map(specs(module), fn {{_, _}, specs} -> + specs = Enum.flat_map(specs(bytecode), fn {{_, _}, specs} -> Enum.map(specs, fn spec -> - quote do: @spec unquote(Kernel.Typespec.spec_to_ast(:a, spec)) + quote(do: @spec unquote(Kernel.Typespec.spec_to_ast(:foo, spec))) end) - end) |> Enum.sort + end) + |> Enum.sort Enum.each(Enum.zip(specs, quoted), fn {spec, definition} -> assert Macro.to_string(spec) == Macro.to_string(definition) end) end - test "typedoc retrieval" do - {:module, _, binary, _} = defmodule T do - @typedoc "A" - @type a :: any - @typep b :: any - @typedoc "C" - @opaque c(x, y) :: {x, y} - @type d :: any - @spec uses_b() :: b - def uses_b(), do: nil + test "retrieval invalid data" do + assert Kernel.Typespec.beam_types(Unknown) == nil + assert Kernel.Typespec.beam_specs(Unknown) == nil + end + + defmodule SampleCallbacks do + @callback first(integer) :: integer + @callback foo(atom(), binary) :: binary + @callback bar(External.hello, my_var :: binary) :: binary + @callback guarded(my_var) :: my_var when my_var: binary + @callback orr(atom | integer) :: atom + @callback literal(123, {atom}, :foo, [integer], true) :: atom + @macrocallback last(integer) :: Macro.t + @macrocallback last() :: atom + @optional_callbacks bar: 2, last: 0 + end + + test "callbacks" do + assert SampleCallbacks.behaviour_info(:callbacks) |> Enum.sort() == + ["MACRO-last": 1, "MACRO-last": 2, bar: 2, first: 1, foo: 2, guarded: 1, literal: 5, orr: 1] + end + + test "optional callbacks" do + assert SampleCallbacks.behaviour_info(:optional_callbacks) |> Enum.sort() == + ["MACRO-last": 1, bar: 2] + end + + test "default is not supported" do + assert_raise ArgumentError, fn -> + test_module do + @callback hello(num \\ 0 :: integer) :: integer + end + end + + assert_raise ArgumentError, fn -> + test_module do + @callback hello(num :: integer \\ 0) :: integer + end + end + + assert_raise ArgumentError, fn -> + test_module do + @macrocallback hello(num \\ 0 :: integer) :: Macro.t + end + end + + assert_raise ArgumentError, fn -> + test_module do + @macrocallback hello(num :: integer \\ 0) :: Macro.t + end + end + + assert_raise ArgumentError, fn -> + test_module do + @spec hello(num \\ 0 :: integer) :: integer + end end - :code.delete(T) - :code.purge(T) + assert_raise ArgumentError, fn -> + test_module do + @spec hello(num :: integer \\ 0) :: integer + end + end + end - assert [ - {{:c, 2}, "C"}, - {{:a, 0}, "A"} - ] = Kernel.Typespec.beam_typedocs(binary) + test "@spec shows readable error message when return type is missing" do + assert_raise CompileError, ~r"type specification missing return type: my_fun\(integer\)", fn -> + test_module do + @spec my_fun(integer) + end + end end - test "retrieval invalid data" do - assert Kernel.Typespec.beam_typedocs(Unknown) == nil - assert Kernel.Typespec.beam_types(Unknown) == nil - assert Kernel.Typespec.beam_specs(Unknown) == nil + test "warns on discouraged types" do + message = capture_io(:stderr, fn -> + test_module do + @type foo :: string() + @type bar :: nonempty_string() + end + end) + + string_discouraged = + "string() type use is discouraged. " <> + "For character lists, use charlist() type, for strings, String.t()\n" + + nonempty_string_discouraged = + "nonempty_string() type use is discouraged. " <> + "For non-empty character lists, use nonempty_charlist() type, for strings, String.t()\n" + + assert message =~ string_discouraged + assert message =~ nonempty_string_discouraged end end diff --git a/lib/elixir/test/elixir/kernel/warning_test.exs b/lib/elixir/test/elixir/kernel/warning_test.exs index fbfa7dad5f6..48865960cfe 100644 --- a/lib/elixir/test/elixir/kernel/warning_test.exs +++ b/lib/elixir/test/elixir/kernel/warning_test.exs @@ -8,26 +8,175 @@ defmodule Kernel.WarningTest do capture_io(:stderr, fun) end - test :unused_variable do - assert capture_err(fn -> + test "unused variable" do + output = capture_err(fn -> Code.eval_string """ defmodule Sample do def hello(arg), do: nil + + if true do + user = :warning + else + :nothing + end + end + """ + end) + assert output =~ "variable \"arg\" is unused" + assert output =~ "variable \"user\" is unused" + after + purge Sample + end + + test "unused variable in redefined function in different file" do + output = capture_err(fn -> + Code.eval_string """ + defmodule Sample do + defmacro __using__(_) do + quote location: :keep do + def function(arg) + end + end + end + """ + Code.eval_string(""" + defmodule RedefineSample do + use Sample + def function(var123), do: nil + end + """, [], file: "redefine_sample.ex") + end) + assert output =~ "redefine_sample.ex:3" + assert output =~ "variable \"var123\" is unused" + after + purge Sample + purge RedefineSample + end + + test "useless literal" do + message = "code block contains unused literal \"oops\"" + + assert capture_err(fn -> + Code.eval_string """ + "oops" + :ok + """ + end) =~ message + + assert capture_err(fn -> + Code.eval_string """ + fn -> + "oops" + :ok + end + """ + end) =~ message + + assert capture_err(fn -> + Code.eval_string """ + try do + "oops" + :ok + after + :ok + end + """ + end) =~ message + end + + test "useless attr" do + message = capture_err(fn -> + Code.eval_string """ + defmodule Sample do + @foo 1 + @bar 1 + @foo + + def bar do + @bar + :ok + end + end + """ + end) + + assert message =~ "module attribute @foo in code block has no effect as it is never returned " + assert message =~ "module attribute @bar in code block has no effect as it is never returned " + after + purge Sample + end + + test "useless var" do + message = "variable foo in code block has no effect as it is never returned " + + assert capture_err(fn -> + Code.eval_string """ + foo = 1 + foo + :ok + """ + end) =~ message + + assert capture_err(fn -> + Code.eval_string """ + fn -> + foo = 1 + foo + :ok + end + """ + end) =~ message + + assert capture_err(fn -> + Code.eval_string """ + try do + foo = 1 + foo + :ok + after + :ok + end + """ + end) =~ message + + assert capture_err(fn -> + Code.eval_string """ + node() + :ok + """ + end) == "" + end + + test "underscored variable on match" do + assert capture_err(fn -> + Code.eval_string """ + {_arg, _arg} = {1, 1} + """ + end) =~ "the underscored variable \"_arg\" appears more than once in a match" + end + + test "underscored variable on assign" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Sample do + def fun(_var) do + _var + 1 + end end """ - end) =~ "warning: variable arg is unused" + end) =~ "the underscored variable \"_var\" is used after being set" after purge Sample end - test :unused_function do + test "unused function" do assert capture_err(fn -> Code.eval_string """ defmodule Sample1 do defp hello, do: nil end """ - end) =~ "warning: function hello/0 is unused" + end) =~ "function hello/0 is unused" assert capture_err(fn -> Code.eval_string """ @@ -47,37 +196,37 @@ defmodule Kernel.WarningTest do defp d(x), do: x end """ - end) =~ "warning: function c/2 is unused" + end) =~ "function c/2 is unused" after purge [Sample1, Sample2, Sample3] end - test :unused_cyclic_functions do + test "unused cyclic functions" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do - defp a, do: b - defp b, do: a + defp a, do: b() + defp b, do: a() end """ - end) =~ "warning: function a/0 is unused" + end) =~ "function a/0 is unused" after purge Sample end - test :unused_macro do + test "unused macro" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do defmacrop hello, do: nil end """ - end) =~ "warning: macro hello/0 is unused" + end) =~ "macro hello/0 is unused" after purge Sample end - test :shadowing do + test "shadowing" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @@ -94,7 +243,7 @@ defmodule Kernel.WarningTest do purge Sample end - test :unused_default_args do + test "unused default args" do assert capture_err(fn -> Code.eval_string ~S""" defmodule Sample1 do @@ -102,7 +251,7 @@ defmodule Kernel.WarningTest do defp b(arg1 \\ 1, arg2 \\ 2, arg3 \\ 3), do: [arg1, arg2, arg3] end """ - end) =~ "warning: default arguments in b/3 are never used" + end) =~ "default arguments in b/3 are never used" assert capture_err(fn -> Code.eval_string ~S""" @@ -111,7 +260,7 @@ defmodule Kernel.WarningTest do defp b(arg1 \\ 1, arg2 \\ 2, arg3 \\ 3), do: [arg1, arg2, arg3] end """ - end) =~ "warning: the first 2 default arguments in b/3 are never used" + end) =~ "the first 2 default arguments in b/3 are never used" assert capture_err(fn -> Code.eval_string ~S""" @@ -120,7 +269,7 @@ defmodule Kernel.WarningTest do defp b(arg1 \\ 1, arg2 \\ 2, arg3 \\ 3), do: [arg1, arg2, arg3] end """ - end) =~ "warning: the first default argument in b/3 is never used" + end) =~ "the first default argument in b/3 is never used" assert capture_err(fn -> Code.eval_string ~S""" @@ -134,26 +283,54 @@ defmodule Kernel.WarningTest do purge [Sample1, Sample2, Sample3, Sample4] end - test :unused_import do + test "unused import" do assert capture_err(fn -> Code.compile_string """ defmodule Sample do - import :lists, only: [flatten: 1] + import :lists def a, do: nil end """ - end) =~ "warning: unused import :lists" + end) =~ "unused import :lists\n" assert capture_err(fn -> Code.compile_string """ - import :lists, only: [flatten: 1] + import :lists """ - end) =~ "warning: unused import :lists" + end) =~ "unused import :lists\n" after - purge [Sample] + purge Sample + end + + test "unused import of one of the functions in :only" do + output = capture_err(fn -> + Code.compile_string """ + defmodule Sample do + import String, only: [upcase: 1, downcase: 1, trim: 1] + def a, do: upcase("hello") + end + """ + end) + assert output =~ "unused import String.downcase/1" + assert output =~ "unused import String.trim/1" + after + purge Sample + end + + test "unused import of any of the functions in :only" do + assert capture_err(fn -> + Code.compile_string """ + defmodule Sample do + import String, only: [upcase: 1, downcase: 1] + def a, do: nil + end + """ + end) =~ "unused import String\n" + after + purge Sample end - test :unused_alias do + test "unused alias" do assert capture_err(fn -> Code.compile_string """ defmodule Sample do @@ -161,12 +338,26 @@ defmodule Kernel.WarningTest do def a, do: nil end """ - end) =~ "warning: unused alias List" + end) =~ "unused alias List" after - purge [Sample] + purge Sample end - test :unused_inside_dynamic_module do + test "unused alias when also import" do + assert capture_err(fn -> + Code.compile_string """ + defmodule Sample do + alias :lists, as: List + import MapSet + new() + end + """ + end) =~ "unused alias List" + after + purge Sample + end + + test "unused inside dynamic module" do import List, only: [flatten: 1], warn: false assert capture_err(fn -> @@ -174,19 +365,32 @@ defmodule Kernel.WarningTest do import String, only: [downcase: 1] def world do - flatten([1,2,3]) + flatten([1, 2, 3]) end end - end) =~ "warning: unused import String" + end) =~ "unused import String" after - purge [Sample] + purge Sample + end + + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + test "duplicate map keys" do + assert capture_err(fn -> + defmodule DuplicateKeys do + assert %{a: :b, a: :c} == %{a: :c} + assert %{1 => 2, 1 => 3} == %{1 => 3} + assert %{:a => :b, a: :c} == %{a: :c} + end + end) =~ "key :a will be overridden in map" + end end - test :unused_guard do + test "unused guard" do assert capture_err(fn -> Code.eval_string """ - defmodule Sample1 do - def is_atom_case do + defmodule Sample do + def atom_case do v = "bc" case v do _ when is_atom(v) -> :ok @@ -195,12 +399,16 @@ defmodule Kernel.WarningTest do end end """ - end) =~ "nofile:5: warning: the guard for this clause evaluates to 'false'" + end) =~ "this check/guard will always yield the same result" + after + purge Sample + end + test "previous clause always matches" do assert capture_err(fn -> Code.eval_string """ - defmodule Sample2 do - def is_binary_cond do + defmodule Sample do + def binary_cond do v = "bc" cond do is_binary(v) -> :bin @@ -209,37 +417,37 @@ defmodule Kernel.WarningTest do end end """ - end) =~ "nofile:6: warning: this clause cannot match because a previous clause at line 5 always matches" + end) =~ "this clause cannot match because a previous clause at line 5 always matches" after - purge [Sample1, Sample2] + purge Sample end - test :empty_clause do + test "empty clause" do assert capture_err(fn -> Code.eval_string """ defmodule Sample1 do def hello end """ - end) =~ "warning: empty clause provided for nonexistent function or macro hello/0" + end) =~ "implementation not provided for predefined def hello/0" after - purge [Sample1] + purge Sample1 end - test :used_import_via_alias do + test "used import via alias" do assert capture_err(fn -> Code.eval_string """ defmodule Sample1 do import List, only: [flatten: 1] defmacro generate do - List.duplicate(quote(do: flatten([1,2,3])), 100) + List.duplicate(quote(do: flatten([1, 2, 3])), 100) end end defmodule Sample2 do import Sample1 - generate + generate() end """ end) == "" @@ -247,7 +455,7 @@ defmodule Kernel.WarningTest do purge [Sample1, Sample2] end - test :clause_not_match do + test "clause not match" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @@ -255,12 +463,12 @@ defmodule Kernel.WarningTest do def hello, do: nil end """ - end) =~ "warning: this clause cannot match because a previous clause at line 2 always matches" + end) =~ "this clause cannot match because a previous clause at line 2 always matches" after purge Sample end - test :clause_with_defaults_should_be_first do + test "clause with defaults should be first" do assert capture_err(fn -> Code.eval_string ~S""" defmodule Sample do @@ -268,53 +476,52 @@ defmodule Kernel.WarningTest do def hello(arg \\ 0), do: nil end """ - end) =~ "warning: clause with defaults should be the first clause in def hello/1" + end) =~ "definitions with multiple clauses and default values require a header" after purge Sample end - test :unused_with_local_with_overridable do + test "clauses with default should use fun head" do assert capture_err(fn -> - Code.eval_string """ + Code.eval_string ~S""" defmodule Sample do - def hello, do: world - defp world, do: :ok - defoverridable [hello: 0] - def hello, do: :ok + def hello(arg \\ 0), do: nil + def hello(arg), do: nil end """ - end) =~ "warning: function world/0 is unused" + end) =~ "definitions with multiple clauses and default values require a header" after purge Sample end - test :used_with_local_with_reattached_overridable do + test "unused with local with overridable" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do - def hello, do: world + def hello, do: world() defp world, do: :ok - defoverridable [hello: 0, world: 0] + defoverridable [hello: 0] + def hello, do: :ok end """ - end) == "" + end) =~ "function world/0 is unused" after purge Sample end - test :undefined_module_attribute do + test "undefined module attribute" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @foo end """ - end) =~ "warning: undefined module attribute @foo, please remove access to @foo or explicitly set it to nil before access" + end) =~ "undefined module attribute @foo, please remove access to @foo or explicitly set it before access" after purge Sample end - test :undefined_module_attribute_in_function do + test "undefined module attribute in function" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @@ -323,36 +530,36 @@ defmodule Kernel.WarningTest do end end """ - end) =~ "warning: undefined module attribute @foo, please remove access to @foo or explicitly set it to nil before access" + end) =~ "undefined module attribute @foo, please remove access to @foo or explicitly set it before access" after purge Sample end - test :undefined_module_attribute_with_file do + test "undefined module attribute with file" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @foo end """ - end) =~ "warning: undefined module attribute @foo, please remove access to @foo or explicitly set it to nil before access" + end) =~ "undefined module attribute @foo, please remove access to @foo or explicitly set it before access" after purge Sample end - test :in_guard_empty_list do + test "in guard empty list" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do def a(x) when x in [], do: x end """ - end) =~ "warning: the guard for this clause evaluates to 'false'" + end) =~ "this check/guard will always yield the same result" after purge Sample end - test :no_effect_operator do + test "no effect operator" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @@ -362,58 +569,70 @@ defmodule Kernel.WarningTest do end end """ - end) =~ "warning: use of operator != has no effect" + end) =~ "use of operator != has no effect" after purge Sample end - test :undefined_function_for_behaviour do + test "badarg warning" do + assert capture_err(fn -> + assert_raise ArgumentError, fn -> + Code.eval_string """ + defmodule Sample do + Atom.to_string "abc" + end + """ + end + end) =~ "this expression will fail with ArgumentError" + after + purge [Sample] + end + + test "undefined function for behaviour" do assert capture_err(fn -> Code.eval_string """ defmodule Sample1 do - use Behaviour - defcallback foo + @callback foo :: term end defmodule Sample2 do @behaviour Sample1 end """ - end) =~ "warning: undefined behaviour function foo/0 (for behaviour Sample1)" + end) =~ "undefined behaviour function foo/0 (for behaviour Sample1)" after - purge [Sample1, Sample2, Sample3] + purge [Sample1, Sample2] end - test :undefined_macro_for_behaviour do + test "undefined macro for behaviour" do assert capture_err(fn -> Code.eval_string """ defmodule Sample1 do - use Behaviour - defmacrocallback foo + @macrocallback foo :: Macro.t end defmodule Sample2 do @behaviour Sample1 end """ - end) =~ "warning: undefined behaviour macro foo/0 (for behaviour Sample1)" + end) =~ "undefined behaviour macro foo/0 (for behaviour Sample1)" after - purge [Sample1, Sample2, Sample3] + purge [Sample1, Sample2] end - test :undefined_behavior do + test "undefined behavior" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @behavior Hello end """ - end) =~ "warning: @behavior attribute is not supported, please use @behaviour instead" + end) =~ "@behavior attribute is not supported, please use @behaviour instead" after - purge [Sample] + purge Sample end - test :undefined_macro_for_protocol do + test "undefined macro for protocol" do assert capture_err(fn -> Code.eval_string """ defprotocol Sample1 do @@ -423,12 +642,12 @@ defmodule Kernel.WarningTest do defimpl Sample1, for: Atom do end """ - end) =~ "warning: undefined protocol function foo/1 (for protocol Sample1)" + end) =~ "undefined protocol function foo/1 (for protocol Sample1)" after purge [Sample1, Sample1.Atom] end - test :overidden_def do + test "overridden def" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @@ -437,25 +656,54 @@ defmodule Kernel.WarningTest do def foo(x, 2), do: x * 2 end """ - end) =~ "nofile:4: warning: clauses for the same def should be grouped together, def foo/2 was previously defined (nofile:2)" + end) =~ "clauses for the same def should be grouped together, def foo/2 was previously defined (nofile:2)" after - purge [Sample] + purge Sample end - test :warning_with_overridden_file do - assert capture_err(fn -> + test "warning with overridden file" do + output = capture_err(fn -> Code.eval_string """ defmodule Sample do @file "sample" def foo(x), do: :ok end """ - end) =~ "sample:3: warning: variable x is unused" + end) + assert output =~ "variable \"x\" is unused" + assert output =~ "sample:3" after - purge [Sample] + purge Sample end - test :typedoc_on_typep do + test "warning on codepoint escape" do + assert capture_err(fn -> + Code.eval_string "? " + end) =~ "found ? followed by codepoint 0x20 (space), please use \\s instead" + end + + test "duplicated docs" do + output = capture_err(fn -> + Code.eval_string """ + defmodule Sample do + @doc "Something" + @doc "Another" + def foo, do: :ok + + @doc false + @doc "Doc" + def bar, do: :ok + end + """ + end) + assert output =~ "redefining @doc attribute previously set at line 2" + assert output =~ "nofile:3: Sample (module)" + refute output =~ "nofile:7" + after + purge Sample + end + + test "typedoc on typep" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @@ -465,21 +713,93 @@ defmodule Kernel.WarningTest do def foo(), do: nil end """ - end) =~ "nofile:3: warning: type priv/0 is private, @typedoc's are always discarded for private types" + end) =~ "type priv/0 is private, @typedoc's are always discarded for private types" after - purge [Sample] + purge Sample + end + + test "attribute with no use" do + content = capture_err(fn -> + Code.eval_string """ + defmodule Sample do + @at "Something" + end + """ + end) + assert content =~ "module attribute @at was set but never used" + assert content =~ "nofile:2" + after + purge Sample end - test :typedoc_with_no_type do + test "typedoc with no type" do assert capture_err(fn -> Code.eval_string """ defmodule Sample do @typedoc "Something" end """ - end) =~ "nofile:1: warning: @typedoc provided but no type follows it" + end) =~ "module attribute @typedoc was set but no type follows it" after - purge [Sample] + purge Sample + end + + test "doc with no function" do + assert capture_err(fn -> + Code.eval_string """ + defmodule Sample do + @doc "Something" + end + """ + end) =~ "module attribute @doc was set but no definition follows it" + after + purge Sample + end + + test "pipe without explicit parentheses" do + assert capture_err(fn -> + Code.eval_string """ + [5, 6, 7, 3] + |> Enum.map_join "", &(Integer.to_string(&1)) + |> String.to_integer + """ + end) =~ "parentheses are required when piping into a function call" + end + + test "variable is being expanded to function call" do + output = capture_err(fn -> + Code.eval_string """ + self + defmodule Sample do + def my_node(), do: node + end + """ + end) + assert output =~ "variable \"self\" does not exist and is being expanded to \"self()\"" + assert output =~ "variable \"node\" does not exist and is being expanded to \"node()\"" + after + purge Sample + end + + defmodule User do + defstruct [:name] + end + + test ":__struct__ is ignored when using structs" do + assert capture_err(fn -> + Code.eval_string """ + assert %Kernel.WarningTest.User{__struct__: Ignored, name: "joe"} == + %Kernel.WarningTest.User{name: "joe"} + """, [], __ENV__ + end) =~ "key :__struct__ is ignored when using structs" + + assert capture_err(fn -> + Code.eval_string """ + user = %Kernel.WarningTest.User{name: "meg"} + assert %Kernel.WarningTest.User{user | __struct__: Ignored, name: "joe"} == + %Kernel.WarningTest.User{__struct__: Kernel.WarningTest.User, name: "joe"} + """, [], __ENV__ + end) =~ "key :__struct__ is ignored when using structs" end defp purge(list) when is_list(list) do diff --git a/lib/elixir/test/elixir/kernel/with_test.exs b/lib/elixir/test/elixir/kernel/with_test.exs new file mode 100644 index 00000000000..d49db60fb75 --- /dev/null +++ b/lib/elixir/test/elixir/kernel/with_test.exs @@ -0,0 +1,94 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule Kernel.WithTest do + use ExUnit.Case, async: true + + test "basic with" do + assert with({:ok, res} <- ok(41), do: res) == 41 + assert with(res <- four(), do: res + 10) == 14 + end + + test "matching with" do + assert with(_..42 <- 1..42, do: :ok) == :ok + assert with({:ok, res} <- :error, do: res) == :error + assert with({:ok, _} = res <- ok(42), do: elem(res, 1)) == 42 + end + + test "with guards" do + assert with(x when x < 2 <- four(), do: :ok) == 4 + assert with(x when x > 2 <- four(), do: :ok) == :ok + assert with(x when x < 2 when x == 4 <- four(), do: :ok) == :ok + end + + test "pin matching with" do + key = :ok + assert with({^key, res} <- ok(42), do: res) == 42 + end + + test "two levels with" do + result = with({:ok, n1} <- ok(11), n2 <- 22, do: n1 + n2) + assert result == 33 + + result = with(n1 <- 11, {:ok, n2} <- :error, do: n1 + n2) + assert result == :error + end + + test "binding inside with" do + result = + with {:ok, n1} <- ok(11), + n2 = n1 + 10, + {:ok, n3} <- ok(22), do: n2 + n3 + assert result == 43 + + result = + with {:ok, n1} <- ok(11), + n2 = n1 + 10, + {:ok, n3} <- error(), do: n2 + n3 + assert result == :error + end + + test "does not leak variables to else" do + state = 1 + result = with 1 <- state, state = 2, :ok <- error(), do: state, else: (_ -> state) + assert result == 1 + assert state == 1 + end + + test "errors in with" do + assert_raise RuntimeError, fn -> + with({:ok, res} <- oops(), do: res) + end + + assert_raise RuntimeError, fn -> + with({:ok, res} <- ok(42), res = res + oops(), do: res) + end + end + + test "else conditions" do + assert with({:ok, res} <- 41, do: res, else: ({:error, error} -> error; res -> res + 1)) == 42 + assert with({:ok, res} <- 41, do: res, else: (res when res == 41 -> res + 1; res -> res)) == 42 + assert with({:ok, res} <- 41, do: res, else: (_ -> :error)) == :error + end + + test "else conditions with match error" do + assert_raise WithClauseError, "no with clause matching: :error", fn -> + with({:ok, res} <- error(), do: res, else: ({:error, error} -> error)) + end + end + + defp four() do + 4 + end + + defp error() do + :error + end + + defp ok(num) do + {:ok, num} + end + + defp oops() do + raise("oops") + end +end diff --git a/lib/elixir/test/elixir/kernel_test.exs b/lib/elixir/test/elixir/kernel_test.exs index eee1a160d58..0a36842d3d8 100644 --- a/lib/elixir/test/elixir/kernel_test.exs +++ b/lib/elixir/test/elixir/kernel_test.exs @@ -3,14 +3,32 @@ Code.require_file "test_helper.exs", __DIR__ defmodule KernelTest do use ExUnit.Case, async: true + doctest Kernel + + defp empty_list(), do: [] + test "=~/2" do assert ("abcd" =~ ~r/c(d)/) == true assert ("abcd" =~ ~r/e/) == false + assert ("abcd" =~ ~R/c(d)/) == true + assert ("abcd" =~ ~R/e/) == false string = "^ab+cd*$" assert (string =~ "ab+") == true assert (string =~ "bb") == false + assert ("abcd" =~ ~r//) == true + assert ("abcd" =~ ~R//) == true + assert ("abcd" =~ "") == true + + assert ("" =~ ~r//) == true + assert ("" =~ ~R//) == true + assert ("" =~ "") == true + + assert ("" =~ "abcd") == false + assert ("" =~ ~r/abcd/) == false + assert ("" =~ ~R/abcd/) == false + assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn -> 1234 =~ "hello" end @@ -18,6 +36,34 @@ defmodule KernelTest do assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn -> 1234 =~ ~r"hello" end + + assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn -> + 1234 =~ ~R"hello" + end + + assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn -> + ~r"hello" =~ "hello" + end + + assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn -> + ~r"hello" =~ ~r"hello" + end + + assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn -> + :abcd =~ ~r// + end + + assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn -> + :abcd =~ "" + end + + assert_raise FunctionClauseError, "no function clause matching in Regex.match?/2", fn -> + "abcd" =~ nil + end + + assert_raise FunctionClauseError, "no function clause matching in Regex.match?/2", fn -> + "abcd" =~ :abcd + end end test "^" do @@ -30,87 +76,20 @@ defmodule KernelTest do end test "match?/2" do - assert match?(_, List.first(1)) == true - assert binding([:x]) == [] - a = List.first([0]) assert match?(b when b > a, 1) == true - assert binding([:b]) == [] + assert binding() == [a: 0] assert match?(b when b > a, -1) == false - assert binding([:b]) == [] - end - - test "nil?/1" do - assert nil?(nil) == true - assert nil?(0) == false - assert nil?(false) == false - end - - test "in/2" do - assert 2 in [1, 2, 3] - assert 2 in 1..3 - refute 4 in [1, 2, 3] - refute 4 in 1..3 - - list = [1, 2, 3] - assert 2 in list - refute 4 in list - end - - @at_list [4,5] - @at_range 6..8 - def fun_in(x) when x in [0], do: :list - def fun_in(x) when x in 1..3, do: :range - def fun_in(x) when x in @at_list, do: :at_list - def fun_in(x) when x in @at_range, do: :at_range - - test "in/2 in function guard" do - assert fun_in(0) == :list - assert fun_in(2) == :range - assert fun_in(5) == :at_list - assert fun_in(8) == :at_range - end - - defmacrop case_in(x, y) do - quote do - case 0 do - _ when unquote(x) in unquote(y) -> true - _ -> false - end - end - end - - test "in/2 in case guard" do - assert case_in(1, [1,2,3]) == true - assert case_in(1, 1..3) == true - assert case_in(2, 1..3) == true - assert case_in(3, 1..3) == true - assert case_in(-3, -1..-3) == true + assert binding() == [a: 0] end - test "paren as nil" do - assert nil?(()) == true - assert ((); ();) == nil - assert [ 1, (), 3 ] == [1, nil, 3 ] - assert [do: ()] == [do: nil] - assert {1, (), 3} == {1, nil, 3} - assert (Kernel.&& nil, ()) == nil - assert (Kernel.&& nil, ()) == nil - assert (() && ()) == nil - assert (if(() && ()) do - :ok - else - :error - end) == :error - end + def exported?, do: not_exported?() + defp not_exported?, do: true - test "__info__(:macros)" do - assert {:in, 2} in Kernel.__info__(:macros) - end - - test "__info__(:functions)" do - assert not ({:__info__, 1} in Kernel.__info__(:functions)) + test "function_exported?/3" do + assert function_exported?(__MODULE__, :exported?, 0) + refute function_exported?(__MODULE__, :not_exported?, 0) end test "macro_exported?/3" do @@ -119,302 +98,660 @@ defmodule KernelTest do assert macro_exported?(Kernel, :def, 2) == true assert macro_exported?(Kernel, :def, 3) == false assert macro_exported?(Kernel, :no_such_macro, 2) == false + assert macro_exported?(:erlang, :abs, 1) == false end test "apply/3 and apply/2" do - assert apply(Enum, :reverse, [[1|[2, 3]]]) == [3, 2, 1] + assert apply(Enum, :reverse, [[1 | [2, 3]]]) == [3, 2, 1] assert apply(fn x -> x * 2 end, [2]) == 4 end - test "binding/0, binding/1 and binding/2" do + test "binding/0 and binding/1" do x = 1 - assert binding == [x: 1] - assert binding([:x, :y]) == [x: 1] - assert binding([:x, :y], nil) == [x: 1] + assert binding() == [x: 1] x = 2 - assert binding == [x: 2] + assert binding() == [x: 2] y = 3 - assert binding == [x: 2, y: 3] + assert binding() == [x: 2, y: 3] + + var!(x, :foo) = 4 + assert binding() == [x: 2, y: 3] + assert binding(:foo) == [x: 4] - var!(x, :foo) = 2 - assert binding(:foo) == [x: 2] - assert binding([:x, :y], :foo) == [x: 2] + # No warnings + _x = 1 + assert binding() == [_x: 1, x: 2, y: 3] end defmodule User do - defstruct name: "jose" + assert is_map defstruct name: "john" end defmodule UserTuple do - def __struct__({ UserTuple, :ok }) do + def __struct__({UserTuple, :ok}) do %User{} end end test "struct/1 and struct/2" do - assert struct(User) == %User{name: "jose"} + assert struct(User) == %User{name: "john"} - user = struct(User, name: "eric") - assert user == %User{name: "eric"} + user = struct(User, name: "meg") + assert user == %User{name: "meg"} assert struct(user, unknown: "key") == user - assert struct(user, %{name: "jose"}) == %User{name: "jose"} + assert struct(user, %{name: "john"}) == %User{name: "john"} assert struct(user, name: "other", __struct__: Post) == %User{name: "other"} - - user_tuple = {UserTuple, :ok} - assert struct(user_tuple, name: "eric") == %User{name: "eric"} end - defdelegate my_flatten(list), to: List, as: :flatten - defdelegate [map(callback, list)], to: :lists, append_first: true + test "struct!/1 and struct!/2" do + assert struct!(User) == %User{name: "john"} - dynamic = :dynamic_flatten - defdelegate unquote(dynamic)(list), to: List, as: :flatten + user = struct!(User, name: "meg") + assert user == %User{name: "meg"} - test "defdelefate/2" do - assert my_flatten([[1]]) == [1] - end + assert_raise KeyError, fn -> + struct!(user, unknown: "key") + end - test "defdelegate/2 with :append_first" do - assert map([1], fn(x) -> x + 1 end) == [2] + assert struct!(user, %{name: "john"}) == %User{name: "john"} + assert struct!(user, name: "other", __struct__: Post) == %User{name: "other"} end - test "defdelegate/2 with unquote" do - assert dynamic_flatten([[1]]) == [1] - end + test "if/2 with invalid keys" do + error_message = "invalid or duplicate keys for if, only \"do\" " <> + "and an optional \"else\" are permitted" + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("if true, foo: 7") + end - test "get_in/2" do - users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - assert get_in(users, ["josé", :age]) == 27 - assert get_in(users, ["dave", :age]) == nil - assert get_in(nil, ["josé", :age]) == nil + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("if true, do: 6, boo: 7") + end - assert_raise FunctionClauseError, fn -> - get_in(users, []) + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("if true, do: 7, do: 6") end - end - test "put_in/3" do - users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("if true, do: 8, else: 7, else: 6") + end - assert put_in(nil, ["josé", :age], 28) == - %{"josé" => %{age: 28}} + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("if true, else: 6") + end - assert put_in(users, ["josé", :age], 28) == - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("if true, []") + end + end - assert put_in(users, ["dave", :age], 19) == - %{"josé" => %{age: 27}, "eric" => %{age: 23}, "dave" => %{age: 19}} + test "unless/2 with invalid keys" do + error_message = "invalid or duplicate keys for unless, only \"do\" " <> + "and an optional \"else\" are permitted" + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("unless true, foo: 7") + end - assert_raise FunctionClauseError, fn -> - put_in(users, [], %{}) + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("unless true, do: 6, boo: 7") end - end - test "put_in/2" do - users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("unless true, do: 7, do: 6") + end - assert put_in(nil["josé"][:age], 28) == - %{"josé" => %{age: 28}} + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("unless true, do: 8, else: 7, else: 6") + end - assert put_in(users["josé"][:age], 28) == - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("unless true, else: 6") + end - assert put_in(users["dave"][:age], 19) == - %{"josé" => %{age: 27}, "eric" => %{age: 23}, "dave" => %{age: 19}} + assert_raise ArgumentError, error_message, fn -> + Code.eval_string("unless true, []") + end + end + test "and/2" do + assert (true and false) == false + assert (true and true) == true + assert (true and 0) == 0 + assert (false and false) == false + assert (false and true) == false + assert (false and 0) == false + assert (false and raise "oops") == false + assert_raise BadBooleanError, fn -> 0 and 1 end + end - assert put_in(users["josé"].age, 28) == - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + test "or/2" do + assert (true or false) == true + assert (true or true) == true + assert (true or 0) == true + assert (true or raise "foo") == true + assert (false or false) == false + assert (false or true) == true + assert (false or 0) == 0 + assert_raise BadBooleanError, fn -> 0 or 1 end + end - assert_raise ArgumentError, fn -> - put_in(users["dave"].age, 19) + describe "in/2" do + test "with literals on right side" do + assert 2 in [1, 2, 3] + assert 2 in 1..3 + refute 4 in [1, 2, 3] + refute 4 in 1..3 + end + + test "with expressions on right side" do + list = [1, 2, 3] + assert 2 in list + refute 4 in list + + assert 2 in [1 | [2, 3]] + assert 3 in [1 | list] + end + + @at_list1 [4, 5] + @at_range 6..8 + @at_list2 [13, 14] + def fun_in(x) when x in [0], do: :list + def fun_in(x) when x in 1..3, do: :range + def fun_in(x) when x in @at_list1, do: :at_list + def fun_in(x) when x in @at_range, do: :at_range + def fun_in(x) when x in [9 | [10, 11]], do: :list_cons + def fun_in(x) when x in [12 | @at_list2], do: :list_cons_at + def fun_in(_), do: :none + + test "in function guard" do + assert fun_in(0) == :list + assert fun_in(1) == :range + assert fun_in(2) == :range + assert fun_in(3) == :range + assert fun_in(5) == :at_list + assert fun_in(6) == :at_range + assert fun_in(7) == :at_range + assert fun_in(8) == :at_range + assert fun_in(9) == :list_cons + assert fun_in(10) == :list_cons + assert fun_in(11) == :list_cons + assert fun_in(12) == :list_cons_at + assert fun_in(13) == :list_cons_at + assert fun_in(14) == :list_cons_at + + assert fun_in(0.0) == :none + assert fun_in(1.0) == :none + assert fun_in(2.0) == :none + assert fun_in(3.0) == :none + assert fun_in(6.0) == :none + assert fun_in(7.0) == :none + assert fun_in(8.0) == :none + assert fun_in(9.0) == :none + assert fun_in(10.0) == :none + assert fun_in(11.0) == :none + assert fun_in(12.0) == :none + assert fun_in(13.0) == :none + assert fun_in(14.0) == :none + end + + def dynamic_in(x, y, z) when x in y..z, do: true + def dynamic_in(_x, _y, _z), do: false + + test "in dynamic function guard" do + assert dynamic_in(1, 1, 3) + assert dynamic_in(2, 1, 3) + assert dynamic_in(3, 1, 3) + + assert dynamic_in(1, 3, 1) + assert dynamic_in(2, 3, 1) + assert dynamic_in(3, 3, 1) + + refute dynamic_in(0, 1, 3) + refute dynamic_in(4, 1, 3) + refute dynamic_in(0, 3, 1) + refute dynamic_in(4, 3, 1) + + refute dynamic_in(2, 1.0, 3) + refute dynamic_in(2, 1, 3.0) + refute dynamic_in(2.0, 1, 3) + end + + defmacrop case_in(x, y) do + quote do + case 0 do + _ when unquote(x) in unquote(y) -> true + _ -> false + end + end end - assert_raise KeyError, fn -> - put_in(users["eric"].unknown, "value") + test "in case guard" do + assert case_in(1, [1, 2, 3]) == true + assert case_in(1, 1..3) == true + assert case_in(2, 1..3) == true + assert case_in(3, 1..3) == true + assert case_in(-3, -1..-3) == true end - end - test "update_in/3" do - users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} + test "in module body" do + defmodule InSample do + @foo [:a, :b] + true = :a in @foo + end + after + purge(InSample) + end - assert update_in(nil, ["josé", :age], fn nil -> 28 end) == - %{"josé" => %{age: 28}} + test "inside and/2" do + response = %{code: 200} + if is_map(response) and response.code in 200..299 do + :pass + end - assert update_in(users, ["josé", :age], &(&1 + 1)) == - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + # This module definition copies internal variable + # defined during in/2 expansion. + Module.create(InVarCopy, nil, __ENV__) + purge(InVarCopy) + end - assert update_in(users, ["dave", :age], fn nil -> 19 end) == - %{"josé" => %{age: 27}, "eric" => %{age: 23}, "dave" => %{age: 19}} + test "with a non-literal non-escaped compile-time range in guards" do + message = "non-literal range in guard should be escaped with Macro.escape/2" + assert_raise ArgumentError, message, fn -> + Code.eval_string """ + defmodule InErrors do + range = 1..3 + def foo(x) when x in unquote(range), do: :ok + end + """ + end + end - assert_raise FunctionClauseError, fn -> - update_in(users, [], fn _ -> %{} end) + test "with a non-compile-time range in guards" do + message = ~r/invalid args for operator "in", .* got: :hello/ + assert_raise ArgumentError, message, fn -> + Code.eval_string """ + defmodule InErrors do + def foo(x) when x in :hello, do: :ok + end + """ + end end - end - test "update_in/2" do - users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} + test "with a non-compile-time list cons in guards" do + message = ~r/invalid args for operator "in", .* got: list\(\)/ + assert_raise ArgumentError, message, fn -> + Code.eval_string """ + defmodule InErrors do + def list, do: [1] + def foo(x) when x in [1 | list()], do: :ok + end + """ + end + end - assert update_in(nil["josé"][:age], fn nil -> 28 end) == - %{"josé" => %{age: 28}} + test "with a non-integer range" do + message = "ranges (first..last) expect both sides to be integers, got: 0..5.0" + assert_raise ArgumentError, message, fn -> + last = 5.0 + 1 in 0..last + end + end - assert update_in(users["josé"][:age], &(&1 + 1)) == - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + test "is optimized" do + assert expand_to_string(quote(do: foo in [])) == "Enum.member?([], foo)" - assert update_in(users["dave"][:age], fn nil -> 19 end) == - %{"josé" => %{age: 27}, "eric" => %{age: 23}, "dave" => %{age: 19}} + result = expand_to_string(quote(do: rand() in 1..2)) + assert result =~ "var = rand()" + assert result =~ ":erlang.andalso(:erlang.is_integer(var), :erlang.andalso(:erlang.>=(var, 1), :erlang.\"=<\"(var, 2)))" - assert update_in(users["josé"].age, &(&1 + 1)) == - %{"josé" => %{age: 28}, "eric" => %{age: 23}} + result = expand_to_string(quote(do: rand() in [1, 2])) + assert result =~ "var = rand()" + assert result =~ ":erlang.or(:erlang.\"=:=\"(var, 2), :erlang.\"=:=\"(var, 1))" - assert_raise ArgumentError, fn -> - update_in(users["dave"].age, &(&1 + 1)) + result = expand_to_string(quote(do: rand() in [1 | [2]])) + assert result =~ "var = rand()" + assert result =~ ":erlang.or(:erlang.\"=:=\"(var, 1), :erlang.\"=:=\"(var, 2))" end - assert_raise KeyError, fn -> - put_in(users["eric"].unknown, &(&1 + 1)) + defp expand_to_string(ast) do + ast + |> Macro.prewalk(&Macro.expand(&1, __ENV__)) + |> Macro.to_string end end - test "get_and_update_in/3" do - users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} - - assert get_and_update_in(nil, ["josé", :age], fn nil -> {:ok, 28} end) == - {:ok, %{"josé" => %{age: 28}}} + describe "__info__" do + test ":macros" do + assert {:in, 2} in Kernel.__info__(:macros) + end - assert get_and_update_in(users, ["josé", :age], &{&1, &1 + 1}) == - {27, %{"josé" => %{age: 28}, "eric" => %{age: 23}}} + test ":functions" do + refute {:__info__, 1} in Kernel.__info__(:functions) + end - assert_raise FunctionClauseError, fn -> - update_in(users, [], fn _ -> %{} end) + test "others" do + assert Kernel.__info__(:module) == Kernel + assert is_list Kernel.__info__(:compile) + assert is_list Kernel.__info__(:attributes) + assert is_list Kernel.__info__(:exports) end end - test "get_and_update_in/2" do - users = %{"josé" => %{age: 27}, "eric" => %{age: 23}} + describe "defdelegate" do + defdelegate my_flatten(list), to: List, as: :flatten - assert get_and_update_in(nil["josé"][:age], fn nil -> {:ok, 28} end) == - {:ok, %{"josé" => %{age: 28}}} + dynamic = :dynamic_flatten + defdelegate unquote(dynamic)(list), to: List, as: :flatten - assert get_and_update_in(users["josé"].age, &{&1, &1 + 1}) == - {27, %{"josé" => %{age: 28}, "eric" => %{age: 23}}} - - assert_raise ArgumentError, fn -> - get_and_update_in(users["dave"].age, &{&1, &1 + 1}) + test "dispatches to delegated functions" do + assert my_flatten([[1]]) == [1] end - assert_raise KeyError, fn -> - get_and_update_in(users["eric"].unknown, &{&1, &1 + 1}) + test "with unquote" do + assert dynamic_flatten([[1]]) == [1] end - end - test "paths" do - map = empty_map() + test "raises with non-variable arguments" do + msg = "defdelegate/2 only accepts function parameters, got: 1" - assert put_in(map[:foo], "bar") == %{foo: "bar"} - assert put_in(empty_map()[:foo], "bar") == %{foo: "bar"} - assert put_in(KernelTest.empty_map()[:foo], "bar") == %{foo: "bar"} - assert put_in(__MODULE__.empty_map()[:foo], "bar") == %{foo: "bar"} + assert_raise ArgumentError, msg, fn -> Code.eval_string(""" + defmodule IntDelegate do + defdelegate foo(1), to: List + end + """, [], __ENV__) + end - assert_raise ArgumentError, ~r"access at least one field,", fn -> - Code.eval_quoted(quote(do: put_in(map, "bar")), []) + assert_raise ArgumentError, msg, fn -> Code.eval_string(""" + defmodule IntOptionDelegate do + defdelegate foo(1 \\\\ 1), to: List + end + """, [], __ENV__) + end end - assert_raise ArgumentError, ~r"must start with a variable, local or remote call", fn -> - Code.eval_quoted(quote(do: put_in(map.foo(1, 2)[:bar], "baz")), []) + defdelegate my_reverse(list \\ []), to: :lists, as: :reverse + defdelegate my_get(map \\ %{}, key, default \\ ""), to: Map, as: :get + + test "accepts variable with optional arguments" do + assert my_reverse() == [] + assert my_reverse([1, 2, 3]) == [3, 2, 1] + + assert my_get("foo") == "" + assert my_get(%{}, "foo") == "" + assert my_get(%{"foo" => "bar"}, "foo") == "bar" + assert my_get(%{}, "foo", "not_found") == "not_found" end end - def empty_map, do: %{} + describe "access" do + test "get_in/2" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + assert get_in(users, ["john", :age]) == 27 + assert get_in(users, ["dave", :age]) == nil + assert get_in(nil, ["john", :age]) == nil - defmodule PipelineOp do - use ExUnit.Case, async: true + map = %{"fruits" => ["banana", "apple", "orange"]} + assert get_in(map, ["fruits", by_index(0)]) == "banana" + assert get_in(map, ["fruits", by_index(3)]) == nil + assert get_in(map, ["unknown", by_index(3)]) == :oops - test "simple" do - assert [1, [2], 3] |> List.flatten == [1, 2, 3] + assert_raise FunctionClauseError, fn -> + get_in(users, []) + end end - test "nested pipelines" do - assert [1, [2], 3] |> List.flatten |> Enum.map(&(&1 * 2)) == [2, 4, 6] + test "put_in/3" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + + assert put_in(users, ["john", :age], 28) == + %{"john" => %{age: 28}, "meg" => %{age: 23}} + + assert_raise FunctionClauseError, fn -> + put_in(users, [], %{}) + end + + assert_raise ArgumentError, "could not put/update key \"john\" on a nil value", fn -> + put_in(nil, ["john", :age], 28) + end end - test "local call" do - assert [1, [2], 3] |> List.flatten |> local == [2, 4, 6] + test "put_in/2" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + + assert put_in(users["john"][:age], 28) == + %{"john" => %{age: 28}, "meg" => %{age: 23}} + + assert put_in(users["john"].age, 28) == + %{"john" => %{age: 28}, "meg" => %{age: 23}} + + assert_raise BadMapError, fn -> + put_in(users["dave"].age, 19) + end + + assert_raise KeyError, fn -> + put_in(users["meg"].unknown, "value") + end end - test "pipeline with capture" do - assert Enum.map([1, 2, 3], &(&1 |> twice |> twice)) == [4, 8, 12] + test "update_in/3" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + + assert update_in(users, ["john", :age], &(&1 + 1)) == + %{"john" => %{age: 28}, "meg" => %{age: 23}} + + assert_raise FunctionClauseError, fn -> + update_in(users, [], fn _ -> %{} end) + end + + assert_raise ArgumentError, "could not put/update key \"john\" on a nil value", fn -> + update_in(nil, ["john", :age], fn _ -> %{} end) + end + + assert_raise UndefinedFunctionError, fn -> + pop_in(struct(Sample, []), [:name]) + end end - test "non-call" do - assert 1 |> (&(&1*2)).() == 2 - assert [1] |> (&hd(&1)).() == 1 + test "update_in/2" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + + assert update_in(users["john"][:age], &(&1 + 1)) == + %{"john" => %{age: 28}, "meg" => %{age: 23}} + + assert update_in(users["john"].age, &(&1 + 1)) == + %{"john" => %{age: 28}, "meg" => %{age: 23}} + + assert_raise BadMapError, fn -> + update_in(users["dave"].age, &(&1 + 1)) + end - import CompileAssertion - assert_compile_fail ArgumentError, "cannot pipe 1 into 2", "1 |> 2" + assert_raise KeyError, fn -> + put_in(users["meg"].unknown, &(&1 + 1)) + end end - defp twice(a), do: a * 2 + test "get_and_update_in/3" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} - defp local(list) do - Enum.map(list, &(&1 * 2)) + assert get_and_update_in(users, ["john", :age], &{&1, &1 + 1}) == + {27, %{"john" => %{age: 28}, "meg" => %{age: 23}}} + + map = %{"fruits" => ["banana", "apple", "orange"]} + assert get_and_update_in(map, ["fruits", by_index(0)], &{&1, String.reverse(&1)}) == + {"banana", %{"fruits" => ["ananab", "apple", "orange"]}} + + assert get_and_update_in(map, ["fruits", by_index(3)], &{&1, &1}) == + {nil, %{"fruits" => ["banana", "apple", "orange"]}} + + assert get_and_update_in(map, ["unknown", by_index(3)], &{&1, []}) == + {:oops, %{"fruits" => ["banana", "apple", "orange"], "unknown" => []}} + + assert_raise FunctionClauseError, fn -> + update_in(users, [], fn _ -> %{} end) + end end - end - defmodule IfScope do - use ExUnit.Case, async: true + test "get_and_update_in/2" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} - test "variables on nested if" do - if true do - a = 1 - if true do - b = 2 - end + assert get_and_update_in(users["john"].age, &{&1, &1 + 1}) == + {27, %{"john" => %{age: 28}, "meg" => %{age: 23}}} + + assert_raise ArgumentError, "could not put/update key \"john\" on a nil value", fn -> + get_and_update_in(nil["john"][:age], fn nil -> {:ok, 28} end) end - assert a == 1 - assert b == 2 + assert_raise BadMapError, fn -> + get_and_update_in(users["dave"].age, &{&1, &1 + 1}) + end + + assert_raise KeyError, fn -> + get_and_update_in(users["meg"].unknown, &{&1, &1 + 1}) + end end - test "variables on sibling if" do - if true do - a = 1 + test "pop_in/2" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} - if true do - b = 2 - end + assert pop_in(users, ["john", :age]) == + {27, %{"john" => %{}, "meg" => %{age: 23}}} - if true do - c = 3 - end + assert pop_in(users, ["bob", :age]) == + {nil, %{"john" => %{age: 27}, "meg" => %{age: 23}}} + + assert pop_in([], [:foo, :bar]) == {nil, []} + + assert_raise FunctionClauseError, fn -> + pop_in(users, []) end - assert a == 1 - assert b == 2 - assert c == 3 + assert_raise FunctionClauseError, "no function clause matching in Kernel.pop_in/2", fn -> + pop_in(users, :not_a_list) + end end - test "variables counter on nested ifs" do - r = (fn() -> 3 end).() # supresses warning at (if r < 0...) - r = r - 1 - r = r - 1 - r = r - 1 + test "pop_in/2 with paths" do + map = %{"fruits" => ["banana", "apple", "orange"]} + assert pop_in(map, ["fruits", by_index(0)]) == + {"banana", %{"fruits" => ["apple", "orange"]}} + assert pop_in(map, ["fruits", by_index(3)]) == + {nil, map} + + map = %{"fruits" => [%{name: "banana"}, %{name: "apple"}]} + assert pop_in(map, ["fruits", by_index(0), :name]) == + {"banana", %{"fruits" => [%{}, %{name: "apple"}]}} + assert pop_in(map, ["fruits", by_index(3), :name]) == + {nil, map} + end + + test "pop_in/1" do + users = %{"john" => %{age: 27}, "meg" => %{age: 23}} + + assert pop_in(users["john"][:age]) == + {27, %{"john" => %{}, "meg" => %{age: 23}}} + assert pop_in(users["john"][:name]) == + {nil, %{"john" => %{age: 27}, "meg" => %{age: 23}}} + assert pop_in(users["bob"][:age]) == + {nil, %{"john" => %{age: 27}, "meg" => %{age: 23}}} + + users = %{john: [age: 27], meg: [age: 23]} + + assert pop_in(users.john[:age]) == + {27, %{john: [], meg: [age: 23]}} + assert pop_in(users.john[:name]) == + {nil, %{john: [age: 27], meg: [age: 23]}} + + assert pop_in([][:foo][:bar]) == {nil, []} + assert_raise KeyError, fn -> pop_in(users.bob[:age]) end + end + + test "pop_in/1/2 with nils" do + users = %{"john" => nil, "meg" => %{age: 23}} + assert pop_in(users["john"][:age]) == + {nil, %{"meg" => %{age: 23}}} + assert pop_in(users, ["john", :age]) == + {nil, %{"meg" => %{age: 23}}} + + users = %{john: nil, meg: %{age: 23}} + assert pop_in(users.john[:age]) == + {nil, %{john: nil, meg: %{age: 23}}} + assert pop_in(users, [:john, :age]) == + {nil, %{meg: %{age: 23}}} + + x = nil + assert_raise ArgumentError, fn -> pop_in(x["john"][:age]) end + assert_raise ArgumentError, fn -> pop_in(nil["john"][:age]) end + assert_raise ArgumentError, fn -> pop_in(nil, ["john", :age]) end + end + + test "with dynamic paths" do + map = empty_map() + + assert put_in(map[:foo], "bar") == %{foo: "bar"} + assert put_in(empty_map()[:foo], "bar") == %{foo: "bar"} + assert put_in(KernelTest.empty_map()[:foo], "bar") == %{foo: "bar"} + assert put_in(__MODULE__.empty_map()[:foo], "bar") == %{foo: "bar"} + + assert_raise ArgumentError, ~r"access at least one element,", fn -> + Code.eval_quoted(quote(do: put_in(map, "bar")), []) + end - if true do - r = r - 1 - if r < 0, do: r = 0 + assert_raise ArgumentError, ~r"must start with a variable, local or remote call", fn -> + Code.eval_quoted(quote(do: put_in(map.foo(1, 2)[:bar], "baz")), []) end + end - assert r == 0 + def empty_map, do: %{} + + def by_index(index) do + fn + _, nil, next -> + next.(:oops) + :get, data, next -> + next.(Enum.at(data, index)) + :get_and_update, data, next -> + current = Enum.at(data, index) + case next.(current) do + {get, update} -> {get, List.replace_at(data, index, update)} + :pop -> {current, List.delete_at(data, index)} + end + end end end - defmodule Destructure do - use ExUnit.Case, async: true + describe "pipeline" do + test "simple" do + assert [1, [2], 3] |> List.flatten == [1, 2, 3] + end + + test "nested" do + assert [1, [2], 3] |> List.flatten |> Enum.map(&(&1 * 2)) == [2, 4, 6] + end + + test "local call" do + assert [1, [2], 3] |> List.flatten |> local == [2, 4, 6] + end + + test "with capture" do + assert Enum.map([1, 2, 3], &(&1 |> twice |> twice)) == [4, 8, 12] + end + + test "with anonymous functions" do + assert 1 |> (&(&1*2)).() == 2 + assert [1] |> (&hd(&1)).() == 1 + end + + defp twice(a), do: a * 2 + defp local(list) do + Enum.map(list, &(&1 * 2)) + end + end + + describe "destructure" do test "less args" do destructure [x, y, z], [1, 2, 3, 4, 5] assert x == 1 @@ -451,7 +788,7 @@ defmodule KernelTest do end test "nil values" do - destructure [a, b, c], a_nil + destructure [a, b, c], a_nil() assert a == nil assert b == nil assert c == nil @@ -459,12 +796,92 @@ defmodule KernelTest do test "invalid match" do a = List.first([3]) - assert_raise CaseClauseError, fn -> - destructure [^a, _b, _c], a_list + assert_raise MatchError, fn -> + destructure [^a, _b, _c], a_list() end end defp a_list, do: [1, 2, 3] defp a_nil, do: nil end + + describe "use/2" do + import ExUnit.CaptureIO + + defmodule SampleA do + defmacro __using__(opts) do + prefix = Keyword.get(opts, :prefix, "") + IO.puts(prefix <> "A") + end + end + + defmodule SampleB do + defmacro __using__(_) do + IO.puts("B") + end + end + + test "invalid argument is literal" do + message = "invalid arguments for use, expected a compile time atom or alias, got: 42" + assert_raise ArgumentError, message, fn -> + Code.eval_string("use 42") + end + end + + test "invalid argument is variable" do + message = "invalid arguments for use, expected a compile time atom or alias, got: variable" + assert_raise ArgumentError, message, fn -> + Code.eval_string("use variable") + end + end + + test "multi-call" do + assert capture_io(fn -> + Code.eval_string("use KernelTest.{SampleA, SampleB,}", [], __ENV__) + end) == "A\nB\n" + end + + test "multi-call with options" do + assert capture_io(fn -> + Code.eval_string(~S|use KernelTest.{SampleA}, prefix: "-"|, [], __ENV__) + end) == "-A\n" + end + + test "multi-call with unquote" do + assert capture_io(fn -> + Code.eval_string(""" + defmodule TestMod do + def main() do + use KernelTest.{SampleB, unquote(:SampleA)} + end + end + """, [], __ENV__) + end) == "B\nA\n" + after + purge(KernelTest.TestMod) + end + end + + test "tl/1" do + assert tl([:one]) == [] + assert tl([1, 2, 3]) == [2, 3] + assert_raise ArgumentError, "argument error", fn -> + tl(empty_list()) + end + assert tl([:a | :b]) == :b + assert tl([:a, :b | :c]) == [:b | :c] + end + + test "hd/1" do + assert hd([1, 2, 3, 4]) == 1 + assert_raise ArgumentError, "argument error", fn -> + hd(empty_list()) + end + assert hd([1 | 2]) == 1 + end + + defp purge(module) do + :code.delete(module) + :code.purge(module) + end end diff --git a/lib/elixir/test/elixir/keyword_test.exs b/lib/elixir/test/elixir/keyword_test.exs index 4387b6026e3..bb7a78a02de 100644 --- a/lib/elixir/test/elixir/keyword_test.exs +++ b/lib/elixir/test/elixir/keyword_test.exs @@ -3,6 +3,8 @@ Code.require_file "test_helper.exs", __DIR__ defmodule KeywordTest do use ExUnit.Case, async: true + doctest Keyword + test "has a literal syntax" do assert [B: 1] == [{:B, 1}] assert [foo?: :bar] == [{:foo?, :bar}] @@ -17,232 +19,130 @@ defmodule KeywordTest do test "supports optional comma" do [a: 1, b: 2, - c: 3, ] - end - - test "keyword?/1" do - assert Keyword.keyword?([]) - assert Keyword.keyword?([a: 1]) - assert Keyword.keyword?([{Foo, 1}]) - refute Keyword.keyword?([{}]) - refute Keyword.keyword?(<<>>) - end - - test "new/0" do - assert Keyword.new == [] - end - - test "new/1" do - assert Keyword.new([{:second_key, 2}, {:first_key, 1}]) == - [first_key: 1, second_key: 2] - end - - test "new/2" do - assert Keyword.new([:a, :b], fn x -> {x, x} end) == - [b: :b, a: :a] - end - - test "get/2 and get/3" do - assert Keyword.get(create_keywords, :first_key) == 1 - assert Keyword.get(create_keywords, :second_key) == 2 - assert Keyword.get(create_keywords, :other_key) == nil - assert Keyword.get(create_empty_keywords, :first_key, "default") == "default" - end - - test "fetch!/2" do - assert Keyword.fetch!(create_keywords, :first_key) == 1 - - error = assert_raise KeyError, fn -> - Keyword.fetch!(create_keywords, :unknown) - end - - assert error.key == :unknown + c: 3,] end - test "keys/1" do - assert Keyword.keys(create_keywords) == [:first_key, :second_key] - assert Keyword.keys(create_empty_keywords) == [] - - assert_raise FunctionClauseError, fn -> - Keyword.keys([:foo]) - end + test "implements (almost) all functions in Map" do + assert Map.__info__(:functions) -- Keyword.__info__(:functions) == + [from_struct: 1] end - test "values/1" do - assert Keyword.values(create_keywords) == [1, 2] - assert Keyword.values(create_empty_keywords) == [] - - assert_raise FunctionClauseError, fn -> - Keyword.values([:foo]) + test "get_and_update/3 raises on bad return value from the argument function" do + assert_raise RuntimeError, "the given function must return a two-element tuple or :pop, got: 1", fn -> + Keyword.get_and_update([a: 1], :a, fn value -> value end) end - end - - test "delete/2" do - assert Keyword.delete(create_keywords, :second_key) == [first_key: 1] - assert Keyword.delete(create_keywords, :other_key) == [first_key: 1, second_key: 2] - assert Keyword.delete(create_empty_keywords, :other_key) == [] - assert_raise FunctionClauseError, fn -> - Keyword.delete([:foo], :foo) + assert_raise RuntimeError, "the given function must return a two-element tuple or :pop, got: nil", fn -> + Keyword.get_and_update([], :a, fn value -> value end) end end - test "delete/3" do - keywords = [a: 1, b: 2, c: 3, a: 2] - assert Keyword.delete(keywords, :a, 2) == [a: 1, b: 2, c: 3] - assert Keyword.delete(keywords, :a, 1) == [b: 2, c: 3, a: 2] - - assert_raise FunctionClauseError, fn -> - Keyword.delete([:foo], :foo, 0) + test "get_and_update!/3 raises on bad return value from the argument function" do + assert_raise RuntimeError, "the given function must return a two-element tuple or :pop, got: 1", fn -> + Keyword.get_and_update!([a: 1], :a, fn value -> value end) end end - test "put/3" do - assert Keyword.put(create_empty_keywords, :first_key, 1) == [first_key: 1] - assert Keyword.put(create_keywords, :first_key, 3) == [first_key: 3, second_key: 2] - end - - test "put_new/3" do - assert Keyword.put_new(create_empty_keywords, :first_key, 1) == [first_key: 1] - assert Keyword.put_new(create_keywords, :first_key, 3) == [first_key: 1, second_key: 2] - end - test "merge/2" do - assert Keyword.merge(create_empty_keywords, create_keywords) == [first_key: 1, second_key: 2] - assert Keyword.merge(create_keywords, create_empty_keywords) == [first_key: 1, second_key: 2] - assert Keyword.merge(create_keywords, create_keywords) == [first_key: 1, second_key: 2] - assert Keyword.merge(create_empty_keywords, create_empty_keywords) == [] + assert Keyword.merge([a: 1, b: 2], [c: 11, d: 12]) == [a: 1, b: 2, c: 11, d: 12] + assert Keyword.merge([], [c: 11, d: 12]) == [c: 11, d: 12] + assert Keyword.merge([a: 1, b: 2], []) == [a: 1, b: 2] - assert_raise FunctionClauseError, fn -> - Keyword.delete([:foo], [:foo]) + assert_raise ArgumentError, "expected a keyword list as the first argument, got: [1, 2]", fn -> + Keyword.merge([1, 2], [c: 11, d: 12]) end - end - test "merge/3" do - result = Keyword.merge [a: 1, b: 2], [a: 3, d: 4], fn _k, v1, v2 -> - v1 + v2 + assert_raise ArgumentError, "expected a keyword list as the first argument, got: [1 | 2]", fn -> + Keyword.merge([1 | 2], [c: 11, d: 12]) end - assert result == [a: 4, b: 2, d: 4] - end - - test "has_key?/2" do - assert Keyword.has_key?([a: 1], :a) == true - assert Keyword.has_key?([a: 1], :b) == false - end - test "update!/3" do - kw = [a: 1, b: 2, a: 3] - assert Keyword.update!(kw, :a, &(&1 * 2)) == [a: 2, b: 2] - assert_raise KeyError, fn -> - Keyword.update!([a: 1], :b, &(&1 * 2)) + assert_raise ArgumentError, "expected a keyword list as the second argument, got: [11, 12, 0]", fn -> + Keyword.merge([a: 1, b: 2], [11, 12, 0]) end - end - - test "update/4" do - kw = [a: 1, b: 2, a: 3] - assert Keyword.update(kw, :a, 13, &(&1 * 2)) == [a: 2, b: 2] - assert Keyword.update([a: 1], :b, 11, &(&1 * 2)) == [a: 1, b: 11] - end - - defp create_empty_keywords, do: [] - defp create_keywords, do: [first_key: 1, second_key: 2] -end - -defmodule Keyword.DuplicatedTest do - use ExUnit.Case, async: true - test "get/2" do - assert Keyword.get(create_keywords, :first_key) == 1 - assert Keyword.get(create_keywords, :second_key) == 2 - assert Keyword.get(create_keywords, :other_key) == nil - assert Keyword.get(create_empty_keywords, :first_key, "default") == "default" - end - - test "get_values/2" do - assert Keyword.get_values(create_keywords, :first_key) == [1, 2] - assert Keyword.get_values(create_keywords, :second_key) == [2] - assert Keyword.get_values(create_keywords, :other_key) == [] - - assert_raise FunctionClauseError, fn -> - Keyword.get_values([:foo], :foo) + assert_raise ArgumentError, "expected a keyword list as the second argument, got: [11 | 12]", fn -> + Keyword.merge([a: 1, b: 2], [11 | 12]) end - end - test "keys/1" do - assert Keyword.keys(create_keywords) == [:first_key, :first_key, :second_key] - assert Keyword.keys(create_empty_keywords) == [] - end - - test "equal?/2" do - assert Keyword.equal? [a: 1, b: 2], [b: 2, a: 1] - refute Keyword.equal? [a: 1, b: 2], [b: 2, c: 3] - end + # duplicate keys in keywords1 are kept if key is not present in keywords2 + assert Keyword.merge([a: 1, b: 2, a: 3], [c: 11, d: 12]) == [a: 1, b: 2, a: 3, c: 11, d: 12] + assert Keyword.merge([a: 1, b: 2, a: 3], [a: 11]) == [b: 2, a: 11] - test "values/1" do - assert Keyword.values(create_keywords) == [1, 2, 2] - assert Keyword.values(create_empty_keywords) == [] - end + # duplicate keys in keywords2 are always kept + assert Keyword.merge([a: 1, b: 2], [c: 11, c: 12, d: 13]) == [a: 1, b: 2, c: 11, c: 12, d: 13] - test "delete/2" do - assert Keyword.delete(create_keywords, :first_key) == [second_key: 2] - assert Keyword.delete(create_keywords, :other_key) == create_keywords - assert Keyword.delete(create_empty_keywords, :other_key) == [] + # any key in keywords1 is removed if key is present in keyword2 + assert Keyword.merge([a: 1, b: 2, c: 3, c: 4], [c: 11, c: 12, d: 13]) == [a: 1, b: 2, c: 11, c: 12, d: 13] end - test "delete_first/2" do - assert Keyword.delete_first(create_keywords, :first_key) == [first_key: 2, second_key: 2] - assert Keyword.delete_first(create_keywords, :other_key) == [first_key: 1, first_key: 2, second_key: 2] - assert Keyword.delete_first(create_empty_keywords, :other_key) == [] - end + test "merge/3" do + fun = fn _key, value1, value2 -> value1 + value2 end - test "put/3" do - assert Keyword.put(create_empty_keywords, :first_key, 1) == [first_key: 1] - assert Keyword.put(create_keywords, :first_key, 1) == [first_key: 1, second_key: 2] - end + assert Keyword.merge([a: 1, b: 2], [c: 11, d: 12], fun) == [a: 1, b: 2, c: 11, d: 12] + assert Keyword.merge([], [c: 11, d: 12], fun) == [c: 11, d: 12] + assert Keyword.merge([a: 1, b: 2], [], fun) == [a: 1, b: 2] - test "merge/2" do - assert Keyword.merge(create_empty_keywords, create_keywords) == create_keywords - assert Keyword.merge(create_keywords, create_empty_keywords) == create_keywords - assert Keyword.merge(create_keywords, create_keywords) == create_keywords - assert Keyword.merge(create_empty_keywords, create_empty_keywords) == [] - assert Keyword.merge(create_keywords, [first_key: 0]) == [first_key: 0, second_key: 2] - assert Keyword.merge(create_keywords, [first_key: 0, first_key: 3]) == [first_key: 0, first_key: 3, second_key: 2] - end + assert_raise ArgumentError, "expected a keyword list as the first argument, got: [1, 2]", fn -> + Keyword.merge([1, 2], [c: 11, d: 12], fun) + end - test "merge/3" do - result = Keyword.merge [a: 1, b: 2], [a: 3, d: 4], fn _k, v1, v2 -> - v1 + v2 + assert_raise ArgumentError, "expected a keyword list as the first argument, got: [1 | 2]", fn -> + Keyword.merge([1 | 2], [c: 11, d: 12], fun) end - assert Keyword.equal?(result, [a: 4, b: 2, d: 4]) - end - test "has_key?/2" do - assert Keyword.has_key?([a: 1], :a) == true - assert Keyword.has_key?([a: 1], :b) == false - end + assert_raise ArgumentError, "expected a keyword list as the second argument, got: [{:x, 1}, :y, :z]", fn -> + Keyword.merge([a: 1, b: 2], [{:x, 1}, :y, :z], fun) + end - test "take/2" do - assert Keyword.take([], []) == [] - assert Keyword.take([a: 0, b: 1, a: 2], []) == [] - assert Keyword.take([a: 0, b: 1, a: 2], [:a]) == [a: 0, a: 2] - assert Keyword.take([a: 0, b: 1, a: 2], [:b]) == [b: 1] + assert_raise ArgumentError, "expected a keyword list as the second argument, got: [:x | :y]", fn -> + Keyword.merge([a: 1, b: 2], [:x | :y], fun) + end - assert_raise FunctionClauseError, fn -> - Keyword.take([:foo], [:foo]) + assert_raise ArgumentError, "expected a keyword list as the second argument, got: [{:x, 1} | :y]", fn -> + Keyword.merge([a: 1, b: 2], [{:x, 1} | :y], fun) end - end - test "drop/2" do - assert Keyword.drop([], []) == [] - assert Keyword.drop([a: 0, b: 1, a: 2], []) == [a: 0, b: 1, a: 2] - assert Keyword.drop([a: 0, b: 1, a: 2], [:a]) == [b: 1] - assert Keyword.drop([a: 0, b: 1, a: 2], [:b]) == [a: 0, a: 2] + # duplicate keys in keywords1 are left untouched if key is not present in keywords2 + assert Keyword.merge([a: 1, b: 2, a: 3], [c: 11, d: 12], fun) == [a: 1, b: 2, a: 3, c: 11, d: 12] + assert Keyword.merge([a: 1, b: 2, a: 3], [a: 11], fun) == [b: 2, a: 12] + + # duplicate keys in keywords2 are always kept + assert Keyword.merge([a: 1, b: 2], [c: 11, c: 12, d: 13], fun) == [a: 1, b: 2, c: 11, c: 12, d: 13] + + # every key in keywords1 is replaced with fun result if key is present in keyword2 + assert Keyword.merge([a: 1, b: 2, c: 3, c: 4], [c: 11, c: 50, d: 13], fun) == [a: 1, b: 2, c: 14, c: 54, d: 13] + end + + test "merge/2 and merge/3 behave exactly the same way" do + fun = fn _key, _value1, value2 -> value2 end + + args = [ + {[a: 1, b: 2], [c: 11, d: 12]}, + {[], [c: 11, d: 12]}, + {[a: 1, b: 2], []}, + {[a: 1, b: 2, a: 3], [c: 11, d: 12]}, + {[a: 1, b: 2, a: 3], [a: 11]}, + {[a: 1, b: 2], [c: 11, c: 12, d: 13]}, + {[a: 1, b: 2, c: 3, c: 4], [c: 11, c: 12, d: 13]}, + ] + + args_error = [ + {[1, 2], [c: 11, d: 12]}, + {[1 | 2], [c: 11, d: 12]}, + {[a: 1, b: 2], [11, 12, 0]}, + {[a: 1, b: 2], [11 | 12]}, + {[a: 1, b: 2], [{:x, 1}, :y, :z]}, + {[a: 1, b: 2], [:x | :y]}, + {[a: 1, b: 2], [{:x, 1} | :y]}, + ] + + for {arg1, arg2} <- args do + assert Keyword.merge(arg1, arg2) == Keyword.merge(arg1, arg2, fun) + end - assert_raise FunctionClauseError, fn -> - Keyword.drop([:foo], [:foo]) + for {arg1, arg2} <- args_error do + error = assert_raise ArgumentError, fn -> Keyword.merge(arg1, arg2) end + assert_raise ArgumentError, error.message, fn -> Keyword.merge(arg1, arg2, fun) end end end - - defp create_empty_keywords, do: [] - defp create_keywords, do: [first_key: 1, first_key: 2, second_key: 2] end diff --git a/lib/elixir/test/elixir/list/chars_test.exs b/lib/elixir/test/elixir/list/chars_test.exs index 94389bddb95..47e9f4b66a6 100644 --- a/lib/elixir/test/elixir/list/chars_test.exs +++ b/lib/elixir/test/elixir/list/chars_test.exs @@ -3,35 +3,35 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule List.Chars.AtomTest do use ExUnit.Case, async: true - test :basic do - assert to_char_list(:foo) == 'foo' + test "basic" do + assert to_charlist(:foo) == 'foo' end end defmodule List.Chars.BitStringTest do use ExUnit.Case, async: true - test :basic do - assert to_char_list("foo") == 'foo' + test "basic" do + assert to_charlist("foo") == 'foo' end end defmodule List.Chars.NumberTest do use ExUnit.Case, async: true - test :integer do - assert to_char_list(1) == '1' + test "integer" do + assert to_charlist(1) == '1' end - test :float do - assert to_char_list(1.0) == '1.0' + test "float" do + assert to_charlist(1.0) == '1.0' end end defmodule List.Chars.ListTest do use ExUnit.Case, async: true - test :basic do - assert to_char_list([ 1, "b", 3 ]) == [1, "b", 3] + test "basic" do + assert to_charlist([ 1, "b", 3 ]) == [1, "b", 3] end end diff --git a/lib/elixir/test/elixir/list_test.exs b/lib/elixir/test/elixir/list_test.exs index 7da04556abc..5bf89061e31 100644 --- a/lib/elixir/test/elixir/list_test.exs +++ b/lib/elixir/test/elixir/list_test.exs @@ -3,30 +3,32 @@ Code.require_file "test_helper.exs", __DIR__ defmodule ListTest do use ExUnit.Case, async: true - test :cons_cell_precedence do - assert [1|:lists.flatten([2, 3])] == [1, 2, 3] + doctest List + + test "cons cell precedence" do + assert [1 | :lists.flatten([2, 3])] == [1, 2, 3] end - test :optional_comma do - assert [1] == [ 1, ] - assert [1, 2, 3] == [1, 2, 3, ] + test "optional comma" do + assert [1] == [1,] + assert [1, 2, 3] == [1, 2, 3,] end - test :partial_application do + test "partial application" do assert (&[&1, 2]).(1) == [1, 2] assert (&[&1, &2]).(1, 2) == [1, 2] assert (&[&2, &1]).(2, 1) == [1, 2] - assert (&[&1|&2]).(1, 2) == [1|2] - assert (&[&1, &2|&3]).(1, 2, 3) == [1, 2|3] + assert (&[&1 | &2]).(1, 2) == [1 | 2] + assert (&[&1, &2 | &3]).(1, 2, 3) == [1, 2 | 3] end - test :wrap do + test "wrap/1" do assert List.wrap([1, 2, 3]) == [1, 2, 3] assert List.wrap(1) == [1] assert List.wrap(nil) == [] end - test :flatten do + test "flatten/1" do assert List.flatten([1, 2, 3]) == [1, 2, 3] assert List.flatten([1, [2], 3]) == [1, 2, 3] assert List.flatten([[1, [2], 3]]) == [1, 2, 3] @@ -35,87 +37,76 @@ defmodule ListTest do assert List.flatten([[]]) == [] end - test :flatten_with_tail do + test "flatten/2" do assert List.flatten([1, 2, 3], [4, 5]) == [1, 2, 3, 4, 5] assert List.flatten([1, [2], 3], [4, 5]) == [1, 2, 3, 4, 5] assert List.flatten([[1, [2], 3]], [4, 5]) == [1, 2, 3, 4, 5] end - test :foldl do + test "foldl/3" do assert List.foldl([1, 2, 3], 0, fn x, y -> x + y end) == 6 assert List.foldl([1, 2, 3], 10, fn x, y -> x + y end) == 16 assert List.foldl([1, 2, 3, 4], 0, fn x, y -> x - y end) == 2 end - test :foldr do + test "foldr/3" do assert List.foldr([1, 2, 3], 0, fn x, y -> x + y end) == 6 assert List.foldr([1, 2, 3], 10, fn x, y -> x + y end) == 16 assert List.foldr([1, 2, 3, 4], 0, fn x, y -> x - y end) == -2 end - test :reverse do - assert Enum.reverse([1, 2, 3]) == [3, 2, 1] - end - - test :duplicate do + test "duplicate/2" do assert List.duplicate(1, 3) == [1, 1, 1] assert List.duplicate([1], 1) == [[1]] end - test :last do + test "last/1" do assert List.last([]) == nil assert List.last([1]) == 1 assert List.last([1, 2, 3]) == 3 end - test :zip do + test "zip/1" do assert List.zip([[1, 4], [2, 5], [3, 6]]) == [{1, 2, 3}, {4, 5, 6}] assert List.zip([[1, 4], [2, 5, 0], [3, 6]]) == [{1, 2, 3}, {4, 5, 6}] assert List.zip([[1], [2, 5], [3, 6]]) == [{1, 2, 3}] assert List.zip([[1, 4], [2, 5], []]) == [] end - test :unzip do - assert List.unzip([{1, 2, 3}, {4, 5, 6}]) == [[1, 4], [2, 5], [3, 6]] - assert List.unzip([{1, 2, 3}, {4, 5}]) == [[1, 4], [2, 5]] - assert List.unzip([[1, 2, 3], [4, 5]]) == [[1, 4], [2, 5]] - assert List.unzip([]) == [] - end - - test :keyfind do + test "keyfind/4" do assert List.keyfind([a: 1, b: 2], :a, 0) == {:a, 1} assert List.keyfind([a: 1, b: 2], 2, 1) == {:b, 2} assert List.keyfind([a: 1, b: 2], :c, 0) == nil end - test :keyreplace do + test "keyreplace/4" do assert List.keyreplace([a: 1, b: 2], :a, 0, {:a, 3}) == [a: 3, b: 2] assert List.keyreplace([a: 1], :b, 0, {:b, 2}) == [a: 1] end - test :keysort do + test "keysort/2" do assert List.keysort([a: 4, b: 3, c: 5], 1) == [b: 3, a: 4, c: 5] assert List.keysort([a: 4, c: 1, b: 2], 0) == [a: 4, b: 2, c: 1] end - test :keystore do + test "keystore/4" do assert List.keystore([a: 1, b: 2], :a, 0, {:a, 3}) == [a: 3, b: 2] assert List.keystore([a: 1], :b, 0, {:b, 2}) == [a: 1, b: 2] end - test :keymember? do + test "keymember?/3" do assert List.keymember?([a: 1, b: 2], :a, 0) == true assert List.keymember?([a: 1, b: 2], 2, 1) == true assert List.keymember?([a: 1, b: 2], :c, 0) == false end - test :keydelete do + test "keydelete/3" do assert List.keydelete([a: 1, b: 2], :a, 0) == [{:b, 2}] assert List.keydelete([a: 1, b: 2], 2, 1) == [{:a, 1}] assert List.keydelete([a: 1, b: 2], :c, 0) == [{:a, 1}, {:b, 2}] end - test :insert_at do + test "insert_at/3" do assert List.insert_at([1, 2, 3], 0, 0) == [0, 1, 2, 3] assert List.insert_at([1, 2, 3], 3, 0) == [1, 2, 3, 0] assert List.insert_at([1, 2, 3], 2, 0) == [1, 2, 0, 3] @@ -125,7 +116,7 @@ defmodule ListTest do assert List.insert_at([1, 2, 3], -10, 0) == [0, 1, 2, 3] end - test :replace_at do + test "replace_at/3" do assert List.replace_at([1, 2, 3], 0, 0) == [0, 2, 3] assert List.replace_at([1, 2, 3], 1, 0) == [1, 0, 3] assert List.replace_at([1, 2, 3], 2, 0) == [1, 2, 0] @@ -134,7 +125,7 @@ defmodule ListTest do assert List.replace_at([1, 2, 3], -4, 0) == [1, 2, 3] end - test :update_at do + test "update_at/3" do assert List.update_at([1, 2, 3], 0, &(&1 + 1)) == [2, 2, 3] assert List.update_at([1, 2, 3], 1, &(&1 + 1)) == [1, 3, 3] assert List.update_at([1, 2, 3], 2, &(&1 + 1)) == [1, 2, 4] @@ -143,9 +134,9 @@ defmodule ListTest do assert List.update_at([1, 2, 3], -4, &(&1 + 1)) == [1, 2, 3] end - test :delete_at do - Enum.each [-1, 0, 1], fn i -> - assert [] = List.delete_at([], i) + test "delete_at/2" do + for index <- [-1, 0, 1] do + assert List.delete_at([], index) == [] end assert List.delete_at([1, 2, 3], 0) == [2, 3] assert List.delete_at([1, 2, 3], 2) == [1, 2] @@ -155,7 +146,55 @@ defmodule ListTest do assert List.delete_at([1, 2, 3], -4) == [1, 2, 3] end - test :to_string do + test "pop_at/3" do + for index <- [-1, 0, 1] do + assert List.pop_at([], index) == {nil, []} + end + assert List.pop_at([1], 1, 2) == {2, [1]} + assert List.pop_at([1, 2, 3], 0) == {1, [2, 3]} + assert List.pop_at([1, 2, 3], 2) == {3, [1, 2]} + assert List.pop_at([1, 2, 3], 3) == {nil, [1, 2, 3]} + assert List.pop_at([1, 2, 3], -1) == {3, [1, 2]} + assert List.pop_at([1, 2, 3], -3) == {1, [2, 3]} + assert List.pop_at([1, 2, 3], -4) == {nil, [1, 2, 3]} + end + + describe "starts_with?/2" do + test "list and prefix are equal" do + assert List.starts_with?([], []) + assert List.starts_with?([1], [1]) + assert List.starts_with?([1, 2, 3], [1, 2, 3]) + end + + test "proper lists" do + refute List.starts_with?([1], [1, 2]) + assert List.starts_with?([1, 2, 3], [1, 2]) + refute List.starts_with?([1, 2, 3], [1, 2, 3, 4]) + end + + test "list is empty" do + refute List.starts_with?([], [1]) + refute List.starts_with?([], [1, 2]) + end + + test "prefix is empty" do + assert List.starts_with?([1], []) + assert List.starts_with?([1, 2], []) + assert List.starts_with?([1, 2, 3], []) + end + + test "only accepts lists" do + assert_raise FunctionClauseError, "no function clause matching in List.starts_with?/2", fn -> + List.starts_with?([1 | 2], [1 | 2]) + end + + assert_raise FunctionClauseError, "no function clause matching in List.starts_with?/2", fn -> + List.starts_with?([1, 2], 1) + end + end + end + + test "to_string/1" do assert List.to_string([?æ, ?ß]) == "æß" assert List.to_string([?a, ?b, ?c]) == "abc" @@ -163,5 +202,24 @@ defmodule ListTest do "invalid code point 57343", fn -> List.to_string([0xDFFF]) end + assert_raise UnicodeConversionError, + "invalid encoding starting at <<216, 0>>", fn -> + List.to_string(["a", "b", <<0xD800 :: size(16)>>]) + end + + assert_raise ArgumentError, ~r"cannot convert the given list to a string", fn -> + List.to_string([:a, :b]) + end + end + + test "myers_difference/2" do + assert List.myers_difference([], []) == [] + assert List.myers_difference([], [1, 2, 3]) == [ins: [1, 2, 3]] + assert List.myers_difference([1, 2, 3], []) == [del: [1, 2, 3]] + assert List.myers_difference([1, 2, 3], [1, 2, 3]) == [eq: [1, 2, 3]] + assert List.myers_difference([1, 2, 3], [1, 4, 2, 3]) == [eq: [1], ins: [4], eq: [2, 3]] + assert List.myers_difference([1, 4, 2, 3], [1, 2, 3]) == [eq: [1], del: [4], eq: [2, 3]] + assert List.myers_difference([1], [[1]]) == [del: [1], ins: [[1]]] + assert List.myers_difference([[1]], [1]) == [del: [[1]], ins: [1]] end end diff --git a/lib/elixir/test/elixir/macro_test.exs b/lib/elixir/test/elixir/macro_test.exs index 9a1de1c6130..9410796eebb 100644 --- a/lib/elixir/test/elixir/macro_test.exs +++ b/lib/elixir/test/elixir/macro_test.exs @@ -22,197 +22,197 @@ defmodule MacroTest do # fail since we are asserting on the caller lines import Macro.ExternalTest - ## Escape + describe "escape/2" do + test "handles tuples with size different than two" do + assert Macro.escape({:a}) == {:{}, [], [:a]} + assert Macro.escape({:a, :b, :c}) == {:{}, [], [:a, :b, :c]} + assert Macro.escape({:a, {1, 2, 3}, :c}) == {:{}, [], [:a, {:{}, [], [1, 2, 3]}, :c]} + end - test :escape_handle_tuples_with_size_different_than_two do - assert {:{}, [], [:a]} == Macro.escape({:a}) - assert {:{}, [], [:a, :b, :c]} == Macro.escape({:a, :b, :c}) - assert {:{}, [], [:a, {:{}, [], [1,2,3]}, :c]} == Macro.escape({:a, {1, 2, 3}, :c}) - end + test "simply returns tuples with size equal to two" do + assert Macro.escape({:a, :b}) == {:a, :b} + end - test :escape_simply_returns_tuples_with_size_equal_to_two do - assert {:a, :b} == Macro.escape({:a, :b}) - end + test "simply returns any other structure" do + assert Macro.escape([1, 2, 3]) == [1, 2, 3] + end - test :escape_simply_returns_any_other_structure do - assert [1, 2, 3] == Macro.escape([1, 2, 3]) - end + test "handles maps" do + assert Macro.escape(%{a: 1}) == {:%{}, [], [a: 1]} + end - test :escape_handles_maps do - assert {:%{}, [], [a: 1]} = Macro.escape(%{a: 1}) - end + test "handles bitstring" do + assert Macro.escape(<<300::12>>) == {:<<>>, [], [{:::, [], [1, {:size, [], [4]}]}, {:::, [], [",", {:binary, [], []}]}]} + end - test :escape_works_recursively do - assert [1,{:{}, [], [:a,:b,:c]}, 3] == Macro.escape([1, {:a, :b, :c}, 3]) - end + test "works recursively" do + assert Macro.escape([1, {:a, :b, :c}, 3]) == [1, {:{}, [], [:a, :b, :c]}, 3] + end - test :escape_improper do - assert [{:|, [], [1,2]}] == Macro.escape([1|2]) - assert [1,{:|, [], [2,3]}] == Macro.escape([1,2|3]) - end + test "with improper lists" do + assert Macro.escape([1 | 2]) == [{:|, [], [1, 2]}] + assert Macro.escape([1, 2 | 3]) == [1, {:|, [], [2, 3]}] + end - test :escape_with_unquote do - contents = quote unquote: false, do: unquote(1) - assert Macro.escape(contents, unquote: true) == 1 + test "with unquote" do + contents = quote(unquote: false, do: unquote(1)) + assert Macro.escape(contents, unquote: true) == 1 - contents = quote unquote: false, do: unquote(x) - assert Macro.escape(contents, unquote: true) == {:x, [], MacroTest} - end + contents = quote(unquote: false, do: unquote(x)) + assert Macro.escape(contents, unquote: true) == {:x, [], MacroTest} + end - defp eval_escaped(contents) do - {eval, []} = Code.eval_quoted(Macro.escape(contents, unquote: true)) - eval - end + defp eval_escaped(contents) do + {eval, []} = Code.eval_quoted(Macro.escape(contents, unquote: true)) + eval + end - test :escape_with_remote_unquote do - contents = quote unquote: false, do: Kernel.unquote(:is_atom)(:ok) - assert eval_escaped(contents) == quote(do: Kernel.is_atom(:ok)) - end + test "with remote unquote" do + contents = quote(unquote: false, do: Kernel.unquote(:is_atom)(:ok)) + assert eval_escaped(contents) == quote(do: Kernel.is_atom(:ok)) + end - test :escape_with_nested_unquote do - contents = quote do - quote do: unquote(x) + test "with nested unquote" do + contents = quote do + quote(do: unquote(x)) + end + assert eval_escaped(contents) == quote(do: quote(do: unquote(x))) end - assert eval_escaped(contents) == quote do: (quote do: unquote(x)) - end - test :escape_with_alias_or_no_args_remote_unquote do - contents = quote unquote: false, do: Kernel.unquote(:self) - assert eval_escaped(contents) == quote(do: Kernel.self()) + test "with alias or no arguments remote unquote" do + contents = quote(unquote: false, do: Kernel.unquote(:self)) + assert eval_escaped(contents) == quote(do: Kernel.self()) - contents = quote unquote: false, do: x.unquote(Foo) - assert eval_escaped(contents) == quote(do: x.unquote(Foo)) - end + contents = quote(unquote: false, do: x.unquote(Foo)) + assert eval_escaped(contents) == quote(do: x.unquote(Foo)) + end - test :escape_with_splicing do - contents = quote unquote: false, do: [1, 2, 3, 4, 5] - assert Macro.escape(contents, unquote: true) == [1, 2, 3, 4, 5] + test "with splicing" do + contents = quote(unquote: false, do: [1, 2, 3, 4, 5]) + assert Macro.escape(contents, unquote: true) == [1, 2, 3, 4, 5] - contents = quote unquote: false, do: [1, 2, unquote_splicing([3, 4, 5])] - assert eval_escaped(contents) == [1, 2, 3, 4, 5] + contents = quote(unquote: false, do: [1, 2, unquote_splicing([3, 4, 5])]) + assert eval_escaped(contents) == [1, 2, 3, 4, 5] - contents = quote unquote: false, do: [unquote_splicing([1, 2, 3]), 4, 5] - assert eval_escaped(contents) == [1, 2, 3, 4, 5] + contents = quote(unquote: false, do: [unquote_splicing([1, 2, 3]), 4, 5]) + assert eval_escaped(contents) == [1, 2, 3, 4, 5] - contents = quote unquote: false, do: [unquote_splicing([1, 2, 3]), unquote_splicing([4, 5])] - assert eval_escaped(contents) == [1, 2, 3, 4, 5] + contents = quote(unquote: false, do: [unquote_splicing([1, 2, 3]), unquote_splicing([4, 5])]) + assert eval_escaped(contents) == [1, 2, 3, 4, 5] - contents = quote unquote: false, do: [1, unquote_splicing([2]), 3, unquote_splicing([4]), 5] - assert eval_escaped(contents) == [1, 2, 3, 4, 5] + contents = quote(unquote: false, do: [1, unquote_splicing([2]), 3, unquote_splicing([4]), 5]) + assert eval_escaped(contents) == [1, 2, 3, 4, 5] - contents = quote unquote: false, do: [1, unquote_splicing([2]), 3, unquote_splicing([4])|[5]] - assert eval_escaped(contents) == [1, 2, 3, 4, 5] + contents = quote(unquote: false, do: [1, unquote_splicing([2]), 3, unquote_splicing([4]) | [5]]) + assert eval_escaped(contents) == [1, 2, 3, 4, 5] + end end - ## Expansion + describe "expand_once/2" do + test "with external macro" do + assert {:||, _, [1, false]} = Macro.expand_once(quote(do: oror(1, false)), __ENV__) + end - test :expand_once do - assert {:||, _, _} = Macro.expand_once(quote(do: oror(1, false)), __ENV__) - end + test "with raw atom" do + assert Macro.expand_once(quote(do: :foo), __ENV__) == :foo + end - test :expand_once_with_raw_atom do - assert Macro.expand_once(quote(do: :foo), __ENV__) == :foo - end + test "with current module" do + assert Macro.expand_once(quote(do: __MODULE__), __ENV__) == __MODULE__ + end - test :expand_once_with_current_module do - assert Macro.expand_once(quote(do: __MODULE__), __ENV__) == __MODULE__ - end + test "with main" do + assert Macro.expand_once(quote(do: Elixir), __ENV__) == Elixir + end - test :expand_once_with_main do - assert Macro.expand_once(quote(do: Elixir), __ENV__) == Elixir - end + test "with simple alias" do + assert Macro.expand_once(quote(do: Foo), __ENV__) == Foo + end - test :expand_once_with_simple_alias do - assert Macro.expand_once(quote(do: Foo), __ENV__) == Foo - end + test "with current module plus alias" do + assert Macro.expand_once(quote(do: __MODULE__.Foo), __ENV__) == __MODULE__.Foo + end - test :expand_once_with_current_module_plus_alias do - assert Macro.expand_once(quote(do: __MODULE__.Foo), __ENV__) == __MODULE__.Foo - end + test "with main plus alias" do + assert Macro.expand_once(quote(do: Elixir.Foo), __ENV__) == Foo + end - test :expand_once_with_main_plus_alias do - assert Macro.expand_once(quote(do: Elixir.Foo), __ENV__) == Foo - end + test "with custom alias" do + alias Foo, as: Bar + assert Macro.expand_once(quote(do: Bar.Baz), __ENV__) == Foo.Baz + end - test :expand_once_with_custom_alias do - alias Foo, as: Bar - assert Macro.expand_once(quote(do: Bar.Baz), __ENV__) == Foo.Baz - end + test "with main plus custom alias" do + alias Foo, as: Bar, warn: false + assert Macro.expand_once(quote(do: Elixir.Bar.Baz), __ENV__) == Elixir.Bar.Baz + end - test :expand_once_with_main_plus_custom_alias do - alias Foo, as: Bar, warn: false - assert Macro.expand_once(quote(do: Elixir.Bar.Baz), __ENV__) == Elixir.Bar.Baz - end + test "with call in alias" do + assert Macro.expand_once(quote(do: Foo.bar.Baz), __ENV__) == quote(do: Foo.bar.Baz) + end - test :expand_once_with_op do - assert Macro.expand_once(quote(do: Foo.bar.Baz), __ENV__) == (quote do - Foo.bar.Baz - end) - end + test "env" do + env = %{__ENV__ | line: 0} + assert Macro.expand_once(quote(do: __ENV__), env) == {:%{}, [], Map.to_list(env)} + assert Macro.expand_once(quote(do: __ENV__.file), env) == env.file + assert Macro.expand_once(quote(do: __ENV__.unknown), env) == quote(do: __ENV__.unknown) + end - test :expand_once_with_erlang do - assert Macro.expand_once(quote(do: :foo), __ENV__) == :foo - end + defmacro local_macro() do + :local_macro + end - test :expand_once_env do - env = %{__ENV__ | line: 0} - assert Macro.expand_once(quote(do: __ENV__), env) == {:%{}, [], Map.to_list(env)} - assert Macro.expand_once(quote(do: __ENV__.file), env) == env.file - assert Macro.expand_once(quote(do: __ENV__.unknown), env) == quote(do: __ENV__.unknown) - end + test "local macro" do + assert Macro.expand_once(quote(do: local_macro), __ENV__) == :local_macro + end - defmacro local_macro do - :local_macro - end + test "checks vars" do + local_macro = 1 + assert local_macro == 1 + expr = {:local_macro, [], nil} + assert Macro.expand_once(expr, __ENV__) == expr + end - test :expand_once_local_macro do - assert Macro.expand_once(quote(do: local_macro), __ENV__) == :local_macro - end + defp expand_once_and_clean(quoted, env) do + cleaner = &Keyword.drop(&1, [:counter]) + quoted + |> Macro.expand_once(env) + |> Macro.prewalk(&Macro.update_meta(&1, cleaner)) + end - test :expand_once_checks_vars do - local_macro = 1 - assert local_macro == 1 - quote = {:local_macro, [], nil} - assert Macro.expand_once(quote, __ENV__) == quote - end + test "with imported macro" do + temp_var = {:x, [], Kernel} + assert expand_once_and_clean(quote(do: 1 || false), __ENV__) == (quote context: Kernel do + case 1 do + unquote(temp_var) when unquote(temp_var) in [false, nil] -> false + unquote(temp_var) -> unquote(temp_var) + end + end) + end - defp expand_once_and_clean(quoted, env) do - cleaner = &Keyword.drop(&1, [:counter]) - quoted - |> Macro.expand_once(env) - |> Macro.prewalk(&Macro.update_meta(&1, cleaner)) - end + test "with require macro" do + temp_var = {:x, [], Kernel} + assert expand_once_and_clean(quote(do: Kernel.||(1, false)), __ENV__) == (quote context: Kernel do + case 1 do + unquote(temp_var) when unquote(temp_var) in [false, nil] -> false + unquote(temp_var) -> unquote(temp_var) + end + end) + end - test :expand_once_with_imported_macro do - temp_var = {:x, [], Kernel} - assert expand_once_and_clean(quote(do: 1 || false), __ENV__) == (quote context: Kernel do - case 1 do - unquote(temp_var) when unquote(temp_var) in [false, nil] -> false - unquote(temp_var) -> unquote(temp_var) - end - end) - end + test "with not expandable expression" do + expr = quote(do: other(1, 2, 3)) + assert Macro.expand_once(expr, __ENV__) == expr + end - test :expand_once_with_require_macro do - temp_var = {:x, [], Kernel} - assert expand_once_and_clean(quote(do: Kernel.||(1, false)), __ENV__) == (quote context: Kernel do - case 1 do - unquote(temp_var) when unquote(temp_var) in [false, nil] -> false - unquote(temp_var) -> unquote(temp_var) + test "does not expand module attributes" do + message = + "could not call get_attribute with argument #{inspect(__MODULE__)} " <> + "because the module is already compiled" + assert_raise ArgumentError, message, fn -> + Macro.expand_once(quote(do: @foo), __ENV__) end - end) - end - - test :expand_once_with_not_expandable_expression do - expr = quote(do: other(1, 2, 3)) - assert Macro.expand_once(expr, __ENV__) == expr - end - - @foo 1 - @bar Macro.expand_once(quote(do: @foo), __ENV__) - - test :expand_once_with_module_at do - assert @bar == 1 + end end defp expand_and_clean(quoted, env) do @@ -222,7 +222,7 @@ defmodule MacroTest do |> Macro.prewalk(&Macro.update_meta(&1, cleaner)) end - test :expand do + test "expand/2" do temp_var = {:x, [], Kernel} assert expand_and_clean(quote(do: oror(1, false)), __ENV__) == (quote context: Kernel do case 1 do @@ -232,267 +232,408 @@ defmodule MacroTest do end) end - test :var do + test "var/2" do assert Macro.var(:foo, nil) == {:foo, [], nil} assert Macro.var(:foo, Other) == {:foo, [], Other} end - ## to_string + describe "to_string/1" do + test "variable" do + assert Macro.to_string(quote do: foo) == "foo" + end - test :var_to_string do - assert Macro.to_string(quote do: foo) == "foo" - end + test "local call" do + assert Macro.to_string(quote do: foo(1, 2, 3)) == "foo(1, 2, 3)" + assert Macro.to_string(quote do: foo([1, 2, 3])) == "foo([1, 2, 3])" + end - test :local_call_to_string do - assert Macro.to_string(quote do: foo(1, 2, 3)) == "foo(1, 2, 3)" - assert Macro.to_string(quote do: foo([1, 2, 3])) == "foo([1, 2, 3])" - end + test "remote call" do + assert Macro.to_string(quote do: foo.bar(1, 2, 3)) == "foo.bar(1, 2, 3)" + assert Macro.to_string(quote do: foo.bar([1, 2, 3])) == "foo.bar([1, 2, 3])" + end - test :remote_call_to_string do - assert Macro.to_string(quote do: foo.bar(1, 2, 3)) == "foo.bar(1, 2, 3)" - assert Macro.to_string(quote do: foo.bar([1, 2, 3])) == "foo.bar([1, 2, 3])" - end + test "atom remote call" do + assert Macro.to_string(quote do: :foo.bar(1, 2, 3)) == ":foo.bar(1, 2, 3)" + end - test :low_atom_remote_call_to_string do - assert Macro.to_string(quote do: :foo.bar(1, 2, 3)) == ":foo.bar(1, 2, 3)" - end + test "remote and fun call" do + assert Macro.to_string(quote do: foo.bar.(1, 2, 3)) == "foo.bar().(1, 2, 3)" + assert Macro.to_string(quote do: foo.bar.([1, 2, 3])) == "foo.bar().([1, 2, 3])" + end - test :big_atom_remote_call_to_string do - assert Macro.to_string(quote do: Foo.Bar.bar(1, 2, 3)) == "Foo.Bar.bar(1, 2, 3)" - end + test "unusual remote atom fun call" do + assert Macro.to_string(quote do: Foo."42") == ~s/Foo."42"()/ + assert Macro.to_string(quote do: Foo.'Bar') == ~s/Foo."Bar"()/ + assert Macro.to_string(quote do: Foo."bar baz"."") == ~s/Foo."bar baz"().""()/ + assert Macro.to_string(quote do: Foo."%{}") == ~s/Foo."%{}"()/ + assert Macro.to_string(quote do: Foo."...") == ~s/Foo."..."()/ + end - test :remote_and_fun_call_to_string do - assert Macro.to_string(quote do: foo.bar.(1, 2, 3)) == "foo.bar().(1, 2, 3)" - assert Macro.to_string(quote do: foo.bar.([1, 2, 3])) == "foo.bar().([1, 2, 3])" - end + test "atom fun call" do + assert Macro.to_string(quote do: :foo.(1, 2, 3)) == ":foo.(1, 2, 3)" + end - test :atom_call_to_string do - assert Macro.to_string(quote do: :foo.(1, 2, 3)) == ":foo.(1, 2, 3)" - end + test "aliases call" do + assert Macro.to_string(quote do: Foo.Bar.baz(1, 2, 3)) == "Foo.Bar.baz(1, 2, 3)" + assert Macro.to_string(quote do: Foo.Bar.baz([1, 2, 3])) == "Foo.Bar.baz([1, 2, 3])" + assert Macro.to_string(quote do: Foo.bar(<<>>, [])) == "Foo.bar(<<>>, [])" + end - test :aliases_call_to_string do - assert Macro.to_string(quote do: Foo.Bar.baz(1, 2, 3)) == "Foo.Bar.baz(1, 2, 3)" - assert Macro.to_string(quote do: Foo.Bar.baz([1, 2, 3])) == "Foo.Bar.baz([1, 2, 3])" - end + test "keyword call" do + assert Macro.to_string(quote do: Foo.bar(foo: :bar)) == "Foo.bar(foo: :bar)" + assert Macro.to_string(quote do: Foo.bar(["Elixir.Foo": :bar])) == "Foo.bar([{Foo, :bar}])" + end - test :arrow_to_string do - assert Macro.to_string(quote do: foo(1, (2 -> 3))) == "foo(1, (2 -> 3))" - end + test "sigil call" do + assert Macro.to_string(quote do: ~r"123") == ~s/~r"123"/ + assert Macro.to_string(quote do: ~r"123"u) == ~s/~r"123"u/ + assert Macro.to_string(quote do: ~r"\n123") == ~s/~r"\\\\n123"/ - test :blocks_to_string do - assert Macro.to_string(quote do: (1; 2; (:foo; :bar); 3)) <> "\n" == """ - ( - 1 - 2 - ( - :foo - :bar - ) - 3 - ) - """ - end + assert Macro.to_string(quote do: ~r"1#{two}3") == ~S/~r"1#{two}3"/ + assert Macro.to_string(quote do: ~r"1#{two}3"u) == ~S/~r"1#{two}3"u/ - test :if_else_to_string do - assert Macro.to_string(quote do: (if foo, do: bar, else: baz)) <> "\n" == """ - if(foo) do - bar - else - baz + assert Macro.to_string(quote do: ~R"123") == ~s/~R"123"/ + assert Macro.to_string(quote do: ~R"123"u) == ~s/~R"123"u/ + assert Macro.to_string(quote do: ~R"\n123") == ~s/~R"\\\\n123"/ end - """ - end - test :case_to_string do - assert Macro.to_string(quote do: (case foo do true -> 0; false -> (1; 2) end)) <> "\n" == """ - case(foo) do - true -> - 0 - false -> + test "arrow" do + assert Macro.to_string(quote do: foo(1, (2 -> 3))) == "foo(1, (2 -> 3))" + end + + test "block" do + assert Macro.to_string(quote do: (1; 2; (:foo; :bar); 3)) <> "\n" == """ + ( 1 2 + ( + :foo + :bar + ) + 3 + ) + """ end - """ - end - test :fn_to_string do - assert Macro.to_string(quote do: (fn -> 1 + 2 end)) == "fn -> 1 + 2 end" - assert Macro.to_string(quote do: (fn(x) -> x + 1 end)) == "fn x -> x + 1 end" + test "not in" do + assert Macro.to_string(quote do: (false not in [])) == "false not in []" + end + + test "if else" do + assert Macro.to_string(quote do: (if foo, do: bar, else: baz)) <> "\n" == """ + if(foo) do + bar + else + baz + end + """ + end - assert Macro.to_string(quote do: (fn(x) -> y = x + 1; y end)) <> "\n" == """ - fn x -> - y = x + 1 - y + test "case" do + assert Macro.to_string(quote do: (case foo do true -> 0; false -> (1; 2) end)) <> "\n" == """ + case(foo) do + true -> + 0 + false -> + 1 + 2 + end + """ end - """ - assert Macro.to_string(quote do: (fn(x) -> y = x + 1; y; (z) -> z end)) <> "\n" == """ - fn - x -> + test "fn" do + assert Macro.to_string(quote do: (fn -> 1 + 2 end)) == "fn -> 1 + 2 end" + assert Macro.to_string(quote do: (fn(x) -> x + 1 end)) == "fn x -> x + 1 end" + + assert Macro.to_string(quote do: (fn(x) -> y = x + 1; y end)) <> "\n" == """ + fn x -> y = x + 1 y - z -> - z + end + """ + + assert Macro.to_string(quote do: (fn(x) -> y = x + 1; y; (z) -> z end)) <> "\n" == """ + fn + x -> + y = x + 1 + y + z -> + z + end + """ + + assert Macro.to_string(quote do: (fn(x) -> x end).(1)) == "(fn x -> x end).(1)" + + assert Macro.to_string(quote do: (fn %{} -> :map; _ -> :other end).(1)) <> "\n" == """ + (fn + %{} -> + :map + _ -> + :other + end).(1) + """ end - """ - end - test :when do - assert Macro.to_string(quote do: (() -> x)) == "(() -> x)" - assert Macro.to_string(quote do: (x when y -> z)) == "(x when y -> z)" - assert Macro.to_string(quote do: (x, y when z -> w)) == "((x, y) when z -> w)" - assert Macro.to_string(quote do: ((x, y) when z -> w)) == "((x, y) when z -> w)" - end + test "range" do + assert Macro.to_string(quote do: unquote(-1..+2)) == "-1..2" + assert Macro.to_string(quote do: Foo.integer..3) == "Foo.integer()..3" + end - test :nested_to_string do - assert Macro.to_string(quote do: (defmodule Foo do def foo do 1 + 1 end end)) <> "\n" == """ - defmodule(Foo) do - def(foo) do - 1 + 1 + test "when" do + assert Macro.to_string(quote do: (() -> x)) == "(() -> x)" + assert Macro.to_string(quote do: (x when y -> z)) == "(x when y -> z)" + assert Macro.to_string(quote do: (x, y when z -> w)) == "((x, y) when z -> w)" + assert Macro.to_string(quote do: ((x, y) when z -> w)) == "((x, y) when z -> w)" + end + + test "nested" do + assert Macro.to_string(quote do: (defmodule Foo do def foo do 1 + 1 end end)) <> "\n" == """ + defmodule(Foo) do + def(foo) do + 1 + 1 + end end + """ end - """ - end - test :op_precedence_to_string do - assert Macro.to_string(quote do: (1 + 2) * (3 - 4)) == "(1 + 2) * (3 - 4)" - assert Macro.to_string(quote do: ((1 + 2) * 3) - 4) == "(1 + 2) * 3 - 4" - assert Macro.to_string(quote do: (1 + 2 + 3) == "(1 + 2 + 3)") - assert Macro.to_string(quote do: (1 + 2 - 3) == "(1 + 2 - 3)") - end + test "operator precedence" do + assert Macro.to_string(quote do: (1 + 2) * (3 - 4)) == "(1 + 2) * (3 - 4)" + assert Macro.to_string(quote do: ((1 + 2) * 3) - 4) == "(1 + 2) * 3 - 4" + assert Macro.to_string(quote do: (1 + 2 + 3) == "(1 + 2 + 3)") + assert Macro.to_string(quote do: (1 + 2 - 3) == "(1 + 2 - 3)") + end - test :containers_to_string do - assert Macro.to_string(quote do: {}) == "{}" - assert Macro.to_string(quote do: []) == "[]" - assert Macro.to_string(quote do: {1, 2, 3}) == "{1, 2, 3}" - assert Macro.to_string(quote do: [ 1, 2, 3 ]) == "[1, 2, 3]" - assert Macro.to_string(quote do: %{}) == "%{}" - assert Macro.to_string(quote do: %{:foo => :bar}) == "%{foo: :bar}" - assert Macro.to_string(quote do: %{{1,2} => [1,2,3]}) == "%{{1, 2} => [1, 2, 3]}" - assert Macro.to_string(quote do: %{map | "a" => "b"}) == "%{map | \"a\" => \"b\"}" - assert Macro.to_string(quote do: [ 1, 2, 3 ]) == "[1, 2, 3]" - assert Macro.to_string(quote do: << 1, 2, 3 >>) == "<<1, 2, 3>>" - assert Macro.to_string(quote do: << <<1>> >>) == "<< <<1>> >>" - end + test "capture operator" do + assert Macro.to_string(quote do: &foo/0) == "&foo/0" + assert Macro.to_string(quote do: &Foo.foo/0) == "&Foo.foo/0" + assert Macro.to_string(quote do: & &1 + &2) == "&(&1 + &2)" + assert Macro.to_string(quote do: & &1) == "&(&1)" + assert Macro.to_string(quote do: &(&1).(:x)) == "&(&1.(:x))" + assert Macro.to_string(quote do: (&(&1)).(:x)) == "(&(&1)).(:x)" + end - test :struct_to_string do - assert Macro.to_string(quote do: %Test{}) == "%Test{}" - assert Macro.to_string(quote do: %Test{foo: 1, bar: 1}) == "%Test{foo: 1, bar: 1}" - assert Macro.to_string(quote do: %Test{struct | foo: 2}) == "%Test{struct | foo: 2}" - assert Macro.to_string(quote do: %Test{} + 1) == "%Test{} + 1" - end + test "containers" do + assert Macro.to_string(quote do: {}) == "{}" + assert Macro.to_string(quote do: []) == "[]" + assert Macro.to_string(quote do: {1, 2, 3}) == "{1, 2, 3}" + assert Macro.to_string(quote do: [ 1, 2, 3 ]) == "[1, 2, 3]" + assert Macro.to_string(quote do: ["Elixir.Foo": :bar]) == "[{Foo, :bar}]" + assert Macro.to_string(quote do: %{}) == "%{}" + assert Macro.to_string(quote do: %{:foo => :bar}) == "%{foo: :bar}" + assert Macro.to_string(quote do: %{:"Elixir.Foo" => :bar}) == "%{Foo => :bar}" + assert Macro.to_string(quote do: %{{1, 2} => [1, 2, 3]}) == "%{{1, 2} => [1, 2, 3]}" + assert Macro.to_string(quote do: %{map | "a" => "b"}) == "%{map | \"a\" => \"b\"}" + assert Macro.to_string(quote do: [ 1, 2, 3 ]) == "[1, 2, 3]" + end - test :binary_ops_to_string do - assert Macro.to_string(quote do: 1 + 2) == "1 + 2" - assert Macro.to_string(quote do: [ 1, 2 | 3 ]) == "[1, 2 | 3]" - assert Macro.to_string(quote do: [h|t] = [1, 2, 3]) == "[h | t] = [1, 2, 3]" - assert Macro.to_string(quote do: (x ++ y) ++ z) == "(x ++ y) ++ z" - end + test "struct" do + assert Macro.to_string(quote do: %Test{}) == "%Test{}" + assert Macro.to_string(quote do: %Test{foo: 1, bar: 1}) == "%Test{foo: 1, bar: 1}" + assert Macro.to_string(quote do: %Test{struct | foo: 2}) == "%Test{struct | foo: 2}" + assert Macro.to_string(quote do: %Test{} + 1) == "%Test{} + 1" + end - test :unary_ops_to_string do - assert Macro.to_string(quote do: not 1) == "not 1" - assert Macro.to_string(quote do: not foo) == "not foo" - assert Macro.to_string(quote do: -1) == "-1" - assert Macro.to_string(quote do: !(foo > bar)) == "!(foo > bar)" - assert Macro.to_string(quote do: @foo(bar)) == "@foo(bar)" - assert Macro.to_string(quote do: identity(&1)) == "identity(&1)" - assert Macro.to_string(quote do: identity(&foo)) == "identity(&foo)" - end + test "binary operators" do + assert Macro.to_string(quote do: 1 + 2) == "1 + 2" + assert Macro.to_string(quote do: [1, 2 | 3]) == "[1, 2 | 3]" + assert Macro.to_string(quote do: [h | t] = [1, 2, 3]) == "[h | t] = [1, 2, 3]" + assert Macro.to_string(quote do: (x ++ y) ++ z) == "(x ++ y) ++ z" + end - test :access_to_string do - assert Macro.to_string(quote do: a[b]) == "a[b]" - assert Macro.to_string(quote do: a[1 + 2]) == "a[1 + 2]" - end + test "unary operators" do + assert Macro.to_string(quote do: not 1) == "not 1" + assert Macro.to_string(quote do: not foo) == "not foo" + assert Macro.to_string(quote do: -1) == "-1" + assert Macro.to_string(quote do: !(foo > bar)) == "!(foo > bar)" + assert Macro.to_string(quote do: @foo(bar)) == "@foo(bar)" + assert Macro.to_string(quote do: identity(&1)) == "identity(&1)" + end - test :kw_list do - assert Macro.to_string(quote do: [a: a, b: b]) == "[a: a, b: b]" - assert Macro.to_string(quote do: [a: 1, b: 1 + 2]) == "[a: 1, b: 1 + 2]" - assert Macro.to_string(quote do: ["a.b": 1, c: 1 + 2]) == "[\"a.b\": 1, c: 1 + 2]" - end + test "access" do + assert Macro.to_string(quote do: a[b]) == "a[b]" + assert Macro.to_string(quote do: a[1 + 2]) == "a[1 + 2]" + assert Macro.to_string(quote do: (a || [a: 1])[:a]) == "(a || [a: 1])[:a]" + assert Macro.to_string(quote do: Map.put(%{}, :a, 1)[:a]) == "Map.put(%{}, :a, 1)[:a]" + end - test :string_list do - assert Macro.to_string(quote do: []) == "[]" - assert Macro.to_string(quote do: 'abc') == "'abc'" - end + test "keyword list" do + assert Macro.to_string(quote do: [a: a, b: b]) == "[a: a, b: b]" + assert Macro.to_string(quote do: [a: 1, b: 1 + 2]) == "[a: 1, b: 1 + 2]" + assert Macro.to_string(quote do: ["a.b": 1, c: 1 + 2]) == "[\"a.b\": 1, c: 1 + 2]" + end - test :last_arg_kw_list do - assert Macro.to_string(quote do: foo([])) == "foo([])" - assert Macro.to_string(quote do: foo(x: y)) == "foo(x: y)" - assert Macro.to_string(quote do: foo(x: 1 + 2)) == "foo(x: 1 + 2)" - assert Macro.to_string(quote do: foo(x: y, p: q)) == "foo(x: y, p: q)" - assert Macro.to_string(quote do: foo(a, x: y, p: q)) == "foo(a, x: y, p: q)" + test "interpolation" do + assert Macro.to_string(quote do: "foo#{bar}baz") == ~S["foo#{bar}baz"] + end - assert Macro.to_string(quote do: {[]}) == "{[]}" - assert Macro.to_string(quote do: {[a: b]}) == "{[a: b]}" - assert Macro.to_string(quote do: {x, a: b}) == "{x, [a: b]}" - end + test "bit syntax" do + ast = quote(do: <<1::8*4>>) + assert Macro.to_string(ast) == "<<1::8*4>>" + + ast = quote(do: @type foo :: <<_::8, _::_*4>>) + assert Macro.to_string(ast) == "@type(foo :: <<_::8, _::_*4>>)" + + ast = quote(do: <<69 - 4::bits-size(8 - 4)-unit(1), 65>>) + assert Macro.to_string(ast) == "<<69 - 4::bits-size(8 - 4)-unit(1), 65>>" + + ast = quote(do: << <<65>>, 65>>) + assert Macro.to_string(ast) == "<<(<<65>>), 65>>" + + ast = quote(do: <<65, <<65>> >>) + assert Macro.to_string(ast) == "<<65, (<<65>>)>>" - test :to_string_with_fun do - assert Macro.to_string(quote(do: foo(1, 2, 3)), fn _, string -> ":#{string}:" end) == - ":foo(:1:, :2:, :3:):" + ast = quote(do: (for <> >>, do: a)) + assert Macro.to_string(ast) == "for(<<(a :: 4 <- <<1, 2>>)>>) do\n a\nend" + end + + test "charlist" do + assert Macro.to_string(quote do: []) == "[]" + assert Macro.to_string(quote do: 'abc') == "'abc'" + end + + test "last arg keyword list" do + assert Macro.to_string(quote do: foo([])) == "foo([])" + assert Macro.to_string(quote do: foo(x: y)) == "foo(x: y)" + assert Macro.to_string(quote do: foo(x: 1 + 2)) == "foo(x: 1 + 2)" + assert Macro.to_string(quote do: foo(x: y, p: q)) == "foo(x: y, p: q)" + assert Macro.to_string(quote do: foo(a, x: y, p: q)) == "foo(a, x: y, p: q)" + + assert Macro.to_string(quote do: {[]}) == "{[]}" + assert Macro.to_string(quote do: {[a: b]}) == "{[a: b]}" + assert Macro.to_string(quote do: {x, a: b}) == "{x, [a: b]}" + assert Macro.to_string(quote do: foo(else: a)) == "foo(else: a)" + assert Macro.to_string(quote do: foo(catch: a)) == "foo(catch: a)" + end + + test "with fun" do + assert Macro.to_string(quote(do: foo(1, 2, 3)), fn _, string -> ":#{string}:" end) == + ":foo(:1:, :2:, :3:):" - assert Macro.to_string(quote(do: Bar.foo(1, 2, 3)), fn _, string -> ":#{string}:" end) == - "::Bar:.foo(:1:, :2:, :3:):" + assert Macro.to_string(quote(do: Bar.foo(1, 2, 3)), fn _, string -> ":#{string}:" end) == + "::Bar:.foo(:1:, :2:, :3:):" + end end - ## decompose_call + test "validate/1" do + ref = make_ref() + + assert Macro.validate(1) == :ok + assert Macro.validate(1.0) == :ok + assert Macro.validate(:foo) == :ok + assert Macro.validate("bar") == :ok + assert Macro.validate(self()) == :ok + assert Macro.validate({1, 2}) == :ok + assert Macro.validate({:foo, [], :baz}) == :ok + assert Macro.validate({:foo, [], []}) == :ok + assert Macro.validate([1, 2, 3]) == :ok - test :decompose_call do - assert Macro.decompose_call(quote do: foo) == {:foo, []} - assert Macro.decompose_call(quote do: foo()) == {:foo, []} + assert Macro.validate(<<0::4>>) == {:error, <<0::4>>} + assert Macro.validate(ref) == {:error, ref} + assert Macro.validate({1, ref}) == {:error, ref} + assert Macro.validate({ref, 2}) == {:error, ref} + assert Macro.validate([1, ref, 3]) == {:error, ref} + assert Macro.validate({:foo, [], 0}) == {:error, {:foo, [], 0}} + assert Macro.validate({:foo, 0, []}) == {:error, {:foo, 0, []}} + end + + test "decompose_call/1" do + assert Macro.decompose_call(quote do: foo) == {:foo, []} + assert Macro.decompose_call(quote do: foo()) == {:foo, []} assert Macro.decompose_call(quote do: foo(1, 2, 3)) == {:foo, [1, 2, 3]} assert Macro.decompose_call(quote do: M.N.foo(1, 2, 3)) == {{:__aliases__, [alias: false], [:M, :N]}, :foo, [1, 2, 3]} assert Macro.decompose_call(quote do: :foo.foo(1, 2, 3)) == {:foo, :foo, [1, 2, 3]} - assert Macro.decompose_call(quote do: 1.(1, 2, 3)) == :error - assert Macro.decompose_call(quote do: "some string") == :error + assert Macro.decompose_call(quote do: 1.(1, 2, 3)) == :error + assert Macro.decompose_call(quote do: "some string") == :error end - ## env - - test :env_stacktrace do - env = %{__ENV__ | file: "foo", line: 12} - assert Macro.Env.stacktrace(env) == - [{__MODULE__, :"test env_stacktrace", 1, [file: "foo", line: 12]}] + describe "env" do + test "stacktrace" do + env = %{__ENV__ | file: "foo", line: 12} + assert Macro.Env.stacktrace(env) == + [{__MODULE__, :"test env stacktrace", 1, [file: "foo", line: 12]}] - env = %{env | function: nil} - assert Macro.Env.stacktrace(env) == - [{__MODULE__, :__MODULE__, 0, [file: "foo", line: 12]}] + env = %{env | function: nil} + assert Macro.Env.stacktrace(env) == + [{__MODULE__, :__MODULE__, 0, [file: "foo", line: 12]}] - env = %{env | module: nil} - assert Macro.Env.stacktrace(env) == - [{:elixir_compiler, :__FILE__, 1, [file: "foo", line: 12]}] - end + env = %{env | module: nil} + assert Macro.Env.stacktrace(env) == + [{:elixir_compiler, :__FILE__, 1, [file: "foo", line: 12]}] + end - test :context_modules do - defmodule Foo.Bar do - assert __MODULE__ in __ENV__.context_modules + test "context modules" do + defmodule Foo.Bar do + assert __MODULE__ in __ENV__.context_modules + end end end ## pipe/unpipe - test :pipe do + test "pipe/3" do assert Macro.pipe(1, quote(do: foo), 0) == quote(do: foo(1)) assert Macro.pipe(1, quote(do: foo(2)), 0) == quote(do: foo(1, 2)) assert Macro.pipe(1, quote(do: foo), -1) == quote(do: foo(1)) assert Macro.pipe(2, quote(do: foo(1)), -1) == quote(do: foo(1, 2)) - assert_raise ArgumentError, "cannot pipe 1 into 2", fn -> + assert_raise ArgumentError, ~r"cannot pipe 1 into 2", fn -> Macro.pipe(1, 2, 0) end + + assert_raise ArgumentError, ~r"cannot pipe 1 into {:ok}", fn -> + Macro.pipe(1, {:ok}, 0) + end + + assert_raise ArgumentError, ~r"cannot pipe 1 into 1 \+ 1", fn -> + Macro.pipe(1, quote(do: 1 + 1), 0) == quote(do: foo(1)) + end + + # TODO: restore this test when we drop unary operator support in pipes + # assert_raise ArgumentError, ~r"cannot pipe 1 into \+1", fn -> + # Macro.pipe(1, quote(do: + 1), 0) + # end + + assert_raise ArgumentError, ~r"cannot pipe Macro into Env", fn -> + Macro.pipe(Macro, quote(do: Env), 0) + end + + message = ~r"cannot pipe :foo into an anonymous function without calling" + assert_raise ArgumentError, message, fn -> + Macro.pipe(:foo, quote(do: fn x -> x end), 0) + end end - test :unpipe do + test "unpipe/1" do assert Macro.unpipe(quote(do: foo)) == quote(do: [{foo, 0}]) assert Macro.unpipe(quote(do: foo |> bar)) == quote(do: [{foo, 0}, {bar, 0}]) assert Macro.unpipe(quote(do: foo |> bar |> baz)) == quote(do: [{foo, 0}, {bar, 0}, {baz, 0}]) end - ## pre/postwalk + ## traverse/pre/postwalk + + test "traverse/4" do + assert traverse({:foo, [], nil}) == + [{:foo, [], nil}, {:foo, [], nil}] + + assert traverse({:foo, [], [1, 2, 3]}) == + [{:foo, [], [1, 2, 3]}, 1, 1, 2, 2, 3, 3, {:foo, [], [1, 2, 3]}] + + assert traverse({{:., [], [:foo, :bar]}, [], [1, 2, 3]}) == + [{{:., [], [:foo, :bar]}, [], [1, 2, 3]}, {:., [], [:foo, :bar]}, :foo, :foo, :bar, :bar, {:., [], [:foo, :bar]}, + 1, 1, 2, 2, 3, 3, {{:., [], [:foo, :bar]}, [], [1, 2, 3]}] + + assert traverse({[1, 2, 3], [4, 5, 6]}) == + [{[1, 2, 3], [4, 5, 6]}, [1, 2, 3], 1, 1, 2, 2, 3, 3, [1, 2, 3], + [4, 5, 6], 4, 4, 5, 5, 6, 6, [4, 5, 6], {[1, 2, 3], [4, 5, 6]}] + end + + defp traverse(ast) do + Macro.traverse(ast, [], &{&1, [&1 | &2]}, &{&1, [&1 | &2]}) |> elem(1) |> Enum.reverse + end - test :prewalk do + test "prewalk/3" do assert prewalk({:foo, [], nil}) == [{:foo, [], nil}] @@ -507,10 +648,10 @@ defmodule MacroTest do end defp prewalk(ast) do - Macro.prewalk(ast, [], &{&1, [&1|&2]}) |> elem(1) |> Enum.reverse + Macro.prewalk(ast, [], &{&1, [&1 | &2]}) |> elem(1) |> Enum.reverse end - test :postwalk do + test "postwalk/3" do assert postwalk({:foo, [], nil}) == [{:foo, [], nil}] @@ -524,7 +665,51 @@ defmodule MacroTest do [1, 2, 3, [1, 2, 3], 4, 5, 6, [4, 5, 6], {[1, 2, 3], [4, 5, 6]}] end + test "generate_arguments/2" do + assert Macro.generate_arguments(0, __MODULE__) == [] + assert Macro.generate_arguments(1, __MODULE__) == [{:var1, [], __MODULE__}] + assert Macro.generate_arguments(4, __MODULE__) |> length == 4 + end + defp postwalk(ast) do - Macro.postwalk(ast, [], &{&1, [&1|&2]}) |> elem(1) |> Enum.reverse + Macro.postwalk(ast, [], &{&1, [&1 | &2]}) |> elem(1) |> Enum.reverse + end + + test "underscore/1" do + assert Macro.underscore("foo") == "foo" + assert Macro.underscore("foo_bar") == "foo_bar" + assert Macro.underscore("Foo") == "foo" + assert Macro.underscore("FooBar") == "foo_bar" + assert Macro.underscore("FOOBar") == "foo_bar" + assert Macro.underscore("FooBAR") == "foo_bar" + assert Macro.underscore("FOO_BAR") == "foo_bar" + assert Macro.underscore("FoBaZa") == "fo_ba_za" + assert Macro.underscore("Foo10") == "foo10" + assert Macro.underscore("10Foo") == "10_foo" + assert Macro.underscore("FooBar10") == "foo_bar10" + assert Macro.underscore("Foo10Bar") == "foo10_bar" + assert Macro.underscore("Foo.Bar") == "foo/bar" + assert Macro.underscore(Foo.Bar) == "foo/bar" + assert Macro.underscore("API.V1.User") == "api/v1/user" + assert Macro.underscore("") == "" + end + + test "camelize/1" do + assert Macro.camelize("Foo") == "Foo" + assert Macro.camelize("FooBar") == "FooBar" + assert Macro.camelize("foo") == "Foo" + assert Macro.camelize("foo_bar") == "FooBar" + assert Macro.camelize("foo_") == "Foo" + assert Macro.camelize("_foo") == "Foo" + assert Macro.camelize("foo10") == "Foo10" + assert Macro.camelize("_10foo") == "10foo" + assert Macro.camelize("foo_10") == "Foo10" + assert Macro.camelize("foo__10") == "Foo10" + assert Macro.camelize("foo__bar") == "FooBar" + assert Macro.camelize("foo/bar") == "Foo.Bar" + assert Macro.camelize("Foo.Bar") == "Foo.Bar" + assert Macro.camelize("FOO_BAR") == "FOO_BAR" + assert Macro.camelize("FOO.BAR") == "FOO.BAR" + assert Macro.camelize("") == "" end end diff --git a/lib/elixir/test/elixir/map_set_test.exs b/lib/elixir/test/elixir/map_set_test.exs new file mode 100644 index 00000000000..3dd59692fae --- /dev/null +++ b/lib/elixir/test/elixir/map_set_test.exs @@ -0,0 +1,111 @@ +Code.require_file "test_helper.exs", __DIR__ + +defmodule MapSetTest do + use ExUnit.Case, async: true + + doctest MapSet + + test "new/1" do + result = MapSet.new(1..5) + assert MapSet.equal?(result, Enum.into(1..5, MapSet.new)) + end + + test "new/2" do + result = MapSet.new(1..5, &(&1 + 2)) + assert MapSet.equal?(result, Enum.into(3..7, MapSet.new)) + end + + test "put/2" do + result = MapSet.put(MapSet.new, 1) + assert MapSet.equal?(result, MapSet.new([1])) + + result = MapSet.put(MapSet.new([1, 3, 4]), 2) + assert MapSet.equal?(result, MapSet.new(1..4)) + + result = MapSet.put(MapSet.new(5..100), 10) + assert MapSet.equal?(result, MapSet.new(5..100)) + end + + test "union/2" do + result = MapSet.union(MapSet.new([1, 3, 4]), MapSet.new) + assert MapSet.equal?(result, MapSet.new([1, 3, 4])) + + result = MapSet.union(MapSet.new(5..15), MapSet.new(10..25)) + assert MapSet.equal?(result, MapSet.new(5..25)) + + result = MapSet.union(MapSet.new(1..120), MapSet.new(1..100)) + assert MapSet.equal?(result, MapSet.new(1..120)) + end + + test "intersection/2" do + result = MapSet.intersection(MapSet.new, MapSet.new(1..21)) + assert MapSet.equal?(result, MapSet.new) + + result = MapSet.intersection(MapSet.new(1..21), MapSet.new(4..24)) + assert MapSet.equal?(result, MapSet.new(4..21)) + + result = MapSet.intersection(MapSet.new(2..100), MapSet.new(1..120)) + assert MapSet.equal?(result, MapSet.new(2..100)) + end + + test "difference/2" do + result = MapSet.difference(MapSet.new(2..20), MapSet.new) + assert MapSet.equal?(result, MapSet.new(2..20)) + + result = MapSet.difference(MapSet.new(2..20), MapSet.new(1..21)) + assert MapSet.equal?(result, MapSet.new) + + result = MapSet.difference(MapSet.new(1..101), MapSet.new(2..100)) + assert MapSet.equal?(result, MapSet.new([1, 101])) + end + + test "disjoint?/2" do + assert MapSet.disjoint?(MapSet.new, MapSet.new) + assert MapSet.disjoint?(MapSet.new(1..6), MapSet.new(8..20)) + refute MapSet.disjoint?(MapSet.new(1..6), MapSet.new(5..15)) + refute MapSet.disjoint?(MapSet.new(1..120), MapSet.new(1..6)) + end + + test "subset?/2" do + assert MapSet.subset?(MapSet.new, MapSet.new) + assert MapSet.subset?(MapSet.new(1..6), MapSet.new(1..10)) + assert MapSet.subset?(MapSet.new(1..6), MapSet.new(1..120)) + refute MapSet.subset?(MapSet.new(1..120), MapSet.new(1..6)) + end + + test "equal?/2" do + assert MapSet.equal?(MapSet.new, MapSet.new) + refute MapSet.equal?(MapSet.new(1..20), MapSet.new(2..21)) + assert MapSet.equal?(MapSet.new(1..120), MapSet.new(1..120)) + end + + test "delete/2" do + result = MapSet.delete(MapSet.new, 1) + assert MapSet.equal?(result, MapSet.new) + + result = MapSet.delete(MapSet.new(1..4), 5) + assert MapSet.equal?(result, MapSet.new(1..4)) + + result = MapSet.delete(MapSet.new(1..4), 1) + assert MapSet.equal?(result, MapSet.new(2..4)) + + result = MapSet.delete(MapSet.new(1..4), 2) + assert MapSet.equal?(result, MapSet.new([1, 3, 4])) + end + + test "size/1" do + assert MapSet.size(MapSet.new) == 0 + assert MapSet.size(MapSet.new(5..15)) == 11 + assert MapSet.size(MapSet.new(2..100)) == 99 + end + + test "to_list/1" do + assert MapSet.to_list(MapSet.new) == [] + + list = MapSet.to_list(MapSet.new(1..20)) + assert Enum.sort(list) == Enum.to_list(1..20) + + list = MapSet.to_list(MapSet.new(5..120)) + assert Enum.sort(list) == Enum.to_list(5..120) + end +end diff --git a/lib/elixir/test/elixir/map_test.exs b/lib/elixir/test/elixir/map_test.exs index a005bf0c5ba..0c1f1d0d010 100644 --- a/lib/elixir/test/elixir/map_test.exs +++ b/lib/elixir/test/elixir/map_test.exs @@ -3,50 +3,18 @@ Code.require_file "test_helper.exs", __DIR__ defmodule MapTest do use ExUnit.Case, async: true - defp empty_map do - %{} - end - - defp two_items_map do - %{a: 1, b: 2} - end + doctest Map - @map %{a: 1, b: 2} + @sample %{a: 1, b: 2} test "maps in attributes" do - assert @map == %{a: 1, b: 2} + assert @sample == %{a: 1, b: 2} end test "maps when quoted" do assert (quote do %{foo: 1} end) == {:%{}, [], [{:foo, 1}]} - - assert (quote do - % - {foo: 1} - end) == {:%{}, [], [{:foo, 1}]} - end - - test "structs when quoted" do - assert (quote do - %User{foo: 1} - end) == {:%, [], [ - {:__aliases__, [alias: false], [:User]}, - {:%{}, [], [{:foo, 1}]} - ]} - - assert (quote do - % - User{foo: 1} - end) == {:%, [], [ - {:__aliases__, [alias: false], [:User]}, - {:%{}, [], [{:foo, 1}]} - ]} - - assert (quote do - %unquote(User){foo: 1} - end) == {:%, [], [User, {:%{}, [], [{:foo, 1}]}]} end test "maps keywords and atoms" do @@ -60,14 +28,61 @@ defmodule MapTest do assert a == 1 end + test "maps with generated variables in key" do + assert %{"#{1}" => 1} == %{"1" => 1} + assert %{(for x <- 1..3, do: x) => 1} == %{[1, 2, 3] => 1} + assert %{(with x = 1, do: x) => 1} == %{1 => 1} + assert %{(with {:ok, x} <- {:ok, 1}, do: x) => 1} == %{1 => 1} + assert %{(try do raise "error" rescue _ -> 1 end) => 1} == %{1 => 1} + assert %{(try do throw 1 catch x -> x end) => 1} == %{1 => 1} + assert %{(try do a = 1; a rescue _ -> 2 end) => 1} == %{1 => 1} + assert %{(try do 1 else a -> a end) => 1} == %{1 => 1} + end + + test "matching with map as a key" do + assert %{%{1 => 2} => x} = %{%{1 => 2} => 3} + assert x == 3 + end + test "is_map/1" do - assert is_map empty_map - refute is_map(Enum.to_list(empty_map)) + assert is_map(Map.new) + refute is_map(Enum.to_list(%{})) end test "map_size/1" do - assert map_size(empty_map) == 0 - assert map_size(two_items_map) == 2 + assert map_size(%{}) == 0 + assert map_size(@sample) == 2 + end + + test "take/2" do + assert Map.take(%{a: 1, b: 2, c: 3}, [:b, :c]) == %{b: 2, c: 3} + assert Map.take(%{a: 1, b: 2, c: 3}, MapSet.new([:b, :c])) == %{b: 2, c: 3} + assert Map.take(%{a: 1, b: 2, c: 3}, []) == %{} + assert_raise BadMapError, fn -> Map.take(:foo, []) end + end + + test "drop/2" do + assert Map.drop(%{a: 1, b: 2, c: 3}, [:b, :c]) == %{a: 1} + assert Map.drop(%{a: 1, b: 2, c: 3}, MapSet.new([:b, :c])) == %{a: 1} + assert_raise BadMapError, fn -> Map.drop(:foo, []) end + end + + test "split/2" do + assert Map.split(%{a: 1, b: 2, c: 3}, [:b, :c]) == {%{b: 2, c: 3}, %{a: 1}} + assert Map.split(%{a: 1, b: 2, c: 3}, MapSet.new([:b, :c])) == {%{b: 2, c: 3}, %{a: 1}} + assert_raise BadMapError, fn -> Map.split(:foo, []) end + end + + test "get_and_update/3" do + assert_raise RuntimeError, "the given function must return a two-element tuple or :pop, got: 1", fn -> + Map.get_and_update(%{a: 1}, :a, fn value -> value end) + end + end + + test "get_and_update!/3" do + assert_raise RuntimeError, "the given function must return a two-element tuple or :pop, got: 1", fn -> + Map.get_and_update!(%{a: 1}, :a, fn value -> value end) + end end test "maps with optional comma" do @@ -76,51 +91,144 @@ defmodule MapTest do assert %{1 => 2, a: :b,} == %{1 => 2, a: :b} end - test "maps with duplicate keys" do - assert %{a: :b, a: :c} == %{a: :c} - assert %{1 => 2, 1 => 3} == %{1 => 3} - assert %{:a => :b, a: :c} == %{a: :c} - end - test "update maps" do - assert %{two_items_map | a: 3} == %{a: 3, b: 2} + assert %{@sample | a: 3} == %{a: 3, b: 2} - assert_raise ArgumentError, fn -> - %{two_items_map | c: 3} + assert_raise KeyError, fn -> + %{@sample | c: 3} end end test "map access" do - assert two_items_map.a == 1 + assert @sample.a == 1 assert_raise KeyError, fn -> - two_items_map.c + @sample.c end end + test "merge/3" do + # When first map is bigger + assert Map.merge(%{a: 1, b: 2, c: 3}, %{c: 4, d: 5}, fn :c, 3, 4 -> :x end) == + %{a: 1, b: 2, c: :x, d: 5} + + # When second map is bigger + assert Map.merge(%{b: 2, c: 3}, %{a: 1, c: 4, d: 5}, fn :c, 3, 4 -> :x end) == + %{a: 1, b: 2, c: :x, d: 5} + end + + test "implements (almost) all functions in Keyword" do + assert Keyword.__info__(:functions) -- Map.__info__(:functions) == + [delete: 3, delete_first: 2, get_values: 2, keyword?: 1, pop_first: 2, pop_first: 3] + end + + test "variable keys" do + x = :key + %{^x => :value} = %{x => :value} + assert %{x => :value} == %{key: :value} + assert (fn %{^x => :value} -> true end).(%{key: :value}) + + map = %{x => :value} + assert %{map | x => :new_value} == %{x => :new_value} + end + defmodule ExternalUser do def __struct__ do - %{__struct__: ThisDoesNotLeak, name: "josé", age: 27} + %{__struct__: ThisDoesNotLeak, name: "john", age: 27} + end + + def __struct__(kv) do + Enum.reduce kv, __struct__(), fn {k, v}, acc -> :maps.update(k, v, acc) end end end test "structs" do assert %ExternalUser{} == - %{__struct__: ExternalUser, name: "josé", age: 27} + %{__struct__: ExternalUser, name: "john", age: 27} - assert %ExternalUser{name: "valim"} == - %{__struct__: ExternalUser, name: "valim", age: 27} + assert %ExternalUser{name: "meg"} == + %{__struct__: ExternalUser, name: "meg", age: 27} user = %ExternalUser{} - assert %ExternalUser{user | name: "valim"} == - %{__struct__: ExternalUser, name: "valim", age: 27} + assert %ExternalUser{user | name: "meg"} == + %{__struct__: ExternalUser, name: "meg", age: 27} %ExternalUser{name: name} = %ExternalUser{} - assert name == "josé" + assert name == "john" map = %{} assert_raise BadStructError, "expected a struct named MapTest.ExternalUser, got: %{}", fn -> - %ExternalUser{map | name: "valim"} + %ExternalUser{map | name: "meg"} + end + end + + test "structs when matching" do + %struct{name: "john"} = %ExternalUser{name: "john", age: 27} + assert struct == ExternalUser + user = %ExternalUser{name: "john", age: 27} + %^struct{name: "john"} = user + end + + test "structs when using dynamic modules" do + defmodule Module.concat(MapTest, DynamicUser) do + defstruct [:name, :age] + + def sample do + %__MODULE__{} + end + end + end + + test "structs when quoted" do + assert (quote do + %User{foo: 1} + end) == {:%, [], [ + {:__aliases__, [alias: false], [:User]}, + {:%{}, [], [{:foo, 1}]} + ]} + + assert (quote do + %unquote(User){foo: 1} + end) == {:%, [], [User, {:%{}, [], [{:foo, 1}]}]} + end + + test "defstruct can only be used once in a module" do + message = "defstruct has already been called for TestMod, " <> + "defstruct can only be called once per module" + assert_raise ArgumentError, message, fn -> + Code.eval_string(""" + defmodule TestMod do + defstruct [:foo] + defstruct [:foo] + end + """) + end + end + + test "defstruct allow keys to be enforced" do + message = "the following keys must also be given when building struct TestMod: [:foo]" + assert_raise ArgumentError, message, fn -> + Code.eval_string(""" + defmodule TestMod do + @enforce_keys :foo + defstruct [:foo] + def foo do + %TestMod{} + end + end + """) + end + end + + test "defstruct raises on invalid enforce_keys" do + message = "keys given to @enforce_keys must be atoms, got: \"foo\"" + assert_raise ArgumentError, message, fn -> + Code.eval_string(""" + defmodule TestMod do + @enforce_keys "foo" + defstruct [:foo] + end + """) end end @@ -129,7 +237,7 @@ defmodule MapTest do defstruct [] end - defstruct name: "josé", nested: struct(NestedUser) + defstruct name: "john", nested: struct(NestedUser), context: %{} def new do %LocalUser{} @@ -142,16 +250,18 @@ defmodule MapTest do end end - test "local user" do - assert LocalUser.new == %LocalUser{name: "josé", nested: %LocalUser.NestedUser{}} - assert LocalUser.Context.new == %LocalUser{name: "josé", nested: %LocalUser.NestedUser{}} + test "local and nested structs" do + assert LocalUser.new == %LocalUser{name: "john", nested: %LocalUser.NestedUser{}} + assert LocalUser.Context.new == %LocalUser{name: "john", nested: %LocalUser.NestedUser{}} end - defmodule NilUser do - defstruct name: nil, contents: %{} + defmodule :elixir_struct_from_erlang_module do + defstruct [:hello] + def world(%:elixir_struct_from_erlang_module{} = struct), do: struct end - test "nil user" do - assert %NilUser{} == %{__struct__: NilUser, name: nil, contents: %{}} + test "struct from erlang module" do + struct = %:elixir_struct_from_erlang_module{} + assert :elixir_struct_from_erlang_module.world(struct) == struct end end diff --git a/lib/elixir/test/elixir/module/locals_tracker_test.exs b/lib/elixir/test/elixir/module/locals_tracker_test.exs index d24f7eb497d..17994d871e7 100644 --- a/lib/elixir/test/elixir/module/locals_tracker_test.exs +++ b/lib/elixir/test/elixir/module/locals_tracker_test.exs @@ -6,7 +6,8 @@ defmodule Module.LocalsTrackerTest do alias Module.LocalsTracker, as: D setup do - {:ok, [pid: D.start_link]} + {:ok, pid} = D.start_link + {:ok, [pid: pid]} end ## Locals @@ -54,48 +55,61 @@ defmodule Module.LocalsTrackerTest do assert {:private, 1} in D.reachable(config[:pid]) end - @unused [ - {{:private, 1}, :defp, 0} - ] + test "can yank and reattach nodes", config do + D.add_definition(config[:pid], :def, {:foo, 1}) + D.add_local(config[:pid], {:foo, 1}, {:bar, 1}) + D.add_definition(config[:pid], :defp, {:bar, 1}) + + {infoo, outfoo} = D.yank(config[:pid], {:foo, 1}) + {inbar, outbar} = D.yank(config[:pid], {:bar, 1}) + + D.reattach(config[:pid], :defp, {:bar, 1}, {inbar, outbar}) + D.reattach(config[:pid], :def, {:foo, 1}, {infoo, outfoo}) + assert {:bar, 1} in D.reachable(config[:pid]) + end test "unused private definitions are marked as so", config do D.add_definition(config[:pid], :def, {:public, 1}) + D.add_local(config[:pid], {:public, 1}, {:private, 1}) + D.add_local(config[:pid], {:private, 2}) - unused = D.collect_unused_locals(config[:pid], @unused) - assert unused == [{:unused_def, {:private, 1}, :defp}] + unused = D.collect_unused_locals(config[:pid], [{{:private, 0}, :defp, [], 0}]) + assert unused == {[private: 0], [{[], {:unused_def, {:private, 0}, :defp}}]} - D.add_local(config[:pid], {:public, 1}, {:private, 1}) - unused = D.collect_unused_locals(config[:pid], @unused) - refute unused == [{:unused_def, {:private, 1}, :defp}] + unused = D.collect_unused_locals(config[:pid], [{{:private, 1}, :defp, [], 0}]) + assert unused == {[], []} + + unused = D.collect_unused_locals(config[:pid], [{{:private, 2}, :defp, [], 0}]) + assert unused == {[], []} end @unused [ - {{:private, 3}, :defp, 3} + {{:private, 3}, :defp, [], 3} ] test "private definitions with unused default arguments", config do D.add_definition(config[:pid], :def, {:public, 1}) unused = D.collect_unused_locals(config[:pid], @unused) - assert unused == [{:unused_def, {:private, 3}, :defp}] + assert unused == {[private: 3], [{[], {:unused_def, {:private, 3}, :defp}}]} D.add_local(config[:pid], {:public, 1}, {:private, 3}) unused = D.collect_unused_locals(config[:pid], @unused) - assert unused == [{:unused_args, {:private, 3}}] + assert unused == {[], [{[], {:unused_args, {:private, 3}}}]} end test "private definitions with some unused default arguments", config do D.add_definition(config[:pid], :def, {:public, 1}) D.add_local(config[:pid], {:public, 1}, {:private, 1}) unused = D.collect_unused_locals(config[:pid], @unused) - assert unused == [{:unused_args, {:private, 3}, 1}] + assert unused == {[private: 3], [{[], {:unused_args, {:private, 3}, 1}}]} end test "private definitions with all used default arguments", config do D.add_definition(config[:pid], :def, {:public, 1}) D.add_local(config[:pid], {:public, 1}, {:private, 0}) unused = D.collect_unused_locals(config[:pid], @unused) - assert unused == [] + assert unused == {[private: 3], []} end ## Defaults @@ -119,12 +133,12 @@ defmodule Module.LocalsTrackerTest do refute {:foo, 3} in D.reachable(config[:pid]) end - test "defaults are connected", config do + test "defaults are connected to last clause only", config do D.add_definition(config[:pid], :defp, {:foo, 4}) D.add_defaults(config[:pid], :defp, {:foo, 4}, 2) D.add_local(config[:pid], {:foo, 2}) assert {:foo, 2} in D.reachable(config[:pid]) - assert {:foo, 3} in D.reachable(config[:pid]) + refute {:foo, 3} in D.reachable(config[:pid]) assert {:foo, 4} in D.reachable(config[:pid]) end @@ -137,14 +151,27 @@ defmodule Module.LocalsTrackerTest do end test "find import conflicts", config do - refute {[Module], :conflict, 1} in D.collect_imports_conflicts(config[:pid], [conflict: 1]) + entries = [{{:conflict, 1}, :def, [], []}] + + refute {[], {[Module], :conflict, 1}} in D.collect_imports_conflicts(config[:pid], entries) # Calls outside local functions are not triggered D.add_import(config[:pid], nil, Module, {:conflict, 1}) - refute {[Module], :conflict, 1} in D.collect_imports_conflicts(config[:pid], [conflict: 1]) + refute {[], {[Module], :conflict, 1}} in D.collect_imports_conflicts(config[:pid], entries) D.add_local(config[:pid], {:foo, 2}) D.add_import(config[:pid], {:foo, 2}, Module, {:conflict, 1}) - assert {[Module], :conflict, 1} in D.collect_imports_conflicts(config[:pid], [conflict: 1]) + assert {[], {[Module], :conflict, 1}} in D.collect_imports_conflicts(config[:pid], entries) + end + + defmodule NoPrivate do + defmacrop foo(), do: bar() + defp bar(), do: :baz + def baz(), do: foo() + end + + test "does not include unreachable locals" do + assert NoPrivate.module_info(:functions) == + [__info__: 1, baz: 0, module_info: 0, module_info: 1] end end diff --git a/lib/elixir/test/elixir/module_test.exs b/lib/elixir/test/elixir/module_test.exs index a750e93becd..e8f457147d4 100644 --- a/lib/elixir/test/elixir/module_test.exs +++ b/lib/elixir/test/elixir/module_test.exs @@ -31,7 +31,7 @@ end defmodule ModuleTest.ToUse do 32 = __ENV__.line # Moving the next line around can make tests fail var = 1 - var # Not available in callbacks + _ = var # Not available in callbacks def callback_value(false), do: false use ModuleTest.ToBeUsed end @@ -39,77 +39,81 @@ end defmodule ModuleTest do use ExUnit.Case, async: true + doctest Module + Module.register_attribute __MODULE__, :register_example, accumulate: true, persist: true @register_example :it_works @register_example :still_works - contents = quote do: (def eval_quoted_info, do: {__MODULE__, __ENV__.file, __ENV__.line}) + contents = quote do + def eval_quoted_info, do: {__MODULE__, __ENV__.file, __ENV__.line} + end Module.eval_quoted __MODULE__, contents, [], file: "sample.ex", line: 13 + defp purge(module) do + :code.purge(module) + :code.delete(module) + end + defmacrop in_module(block) do quote do defmodule Temp, unquote(block) - :code.purge(Temp) - :code.delete(Temp) + purge Temp + end + end + + test "module attributes returns value" do + in_module do + assert (@return [:foo, :bar]) == :ok + _ = @return end end + test "in memory modules are tagged as so" do + assert :code.which(__MODULE__) == :in_memory + end + ## Eval - test :eval_quoted do + test "executes eval_quoted definitions" do assert eval_quoted_info() == {ModuleTest, "sample.ex", 13} end - test :line_from_macro do + test "retrieves line from macros" do assert ModuleTest.ToUse.line == 36 end ## Callbacks - test :compile_callback_hook do + test "executes custom before_compile callback" do assert ModuleTest.ToUse.callback_value(true) == true assert ModuleTest.ToUse.callback_value(false) == false end - test :before_compile_callback_hook do + test "executes default before_compile callback" do assert ModuleTest.ToUse.before_compile == [] end - test :on_definition do - defmodule OnDefinition do - @on_definition ModuleTest - - def hello(foo, bar) do - foo + bar - end - end - - assert Process.get(ModuleTest.OnDefinition) == :called - end - def __on_definition__(env, kind, name, args, guards, expr) do Process.put(env.module, :called) assert env.module == ModuleTest.OnDefinition assert kind == :def assert name == :hello - assert [{:foo, _, _}, {:bar, _ , _}] = args + assert [{:foo, _, _}, {:bar, _, _}] = args assert [] = guards - assert {{:., _, [:erlang, :+]}, _, [{:foo, _, nil}, {:bar, _, nil}]} = expr + assert [do: {:+, _, [{:foo, _, nil}, {:bar, _, nil}]}] = expr end - test :overridable_inside_before_compile do - defmodule OverridableWithBeforeCompile do - @before_compile ModuleTest - end - assert OverridableWithBeforeCompile.constant == 1 - end + test "executes on definition callback" do + defmodule OnDefinition do + @on_definition ModuleTest - test :alias_with_raw_atom do - defmodule :"Elixir.ModuleTest.RawModule" do - def hello, do: :world + def hello(foo, bar) do + foo + bar + end end - assert RawModule.hello == :world + assert Process.get(ModuleTest.OnDefinition) == :called end defmacro __before_compile__(_) do @@ -119,28 +123,42 @@ defmodule ModuleTest do end end + test "may set overridable inside before_compile callback" do + defmodule OverridableWithBeforeCompile do + @before_compile ModuleTest + end + assert OverridableWithBeforeCompile.constant == 1 + end + ## Attributes - test :reserved_attributes do - assert List.keyfind(ExUnit.Server.__info__(:attributes), :behaviour, 0) == {:behaviour, [:gen_server]} + test "reserved attributes" do + assert List.keyfind(ExUnit.Server.__info__(:attributes), :behaviour, 0) == {:behaviour, [GenServer]} end - test :registered_attributes do - assert [{:register_example, [:it_works]}, {:register_example, [:still_works]}] == - Enum.filter __MODULE__.__info__(:attributes), &match?({:register_example, _}, &1) + test "registered attributes" do + assert Enum.filter __MODULE__.__info__(:attributes), &match?({:register_example, _}, &1) == + [{:register_example, [:it_works]}, {:register_example, [:still_works]}] end - @some_attribute [1] + @some_attribute [1] @other_attribute [3, 2, 1] - test :inside_function_attributes do - assert [1] = @some_attribute - assert [3, 2, 1] = @other_attribute + test "inside function attributes" do + assert @some_attribute == [1] + assert @other_attribute == [3, 2, 1] + end + + test "@compile autoload attribute" do + defmodule NoAutoload do + @compile {:autoload, false} + end + refute :code.is_loaded(NoAutoload) end ## Naming - test :concat do + test "concat" do assert Module.concat(Foo, Bar) == Foo.Bar assert Module.concat(Foo, :Bar) == Foo.Bar assert Module.concat(Foo, "Bar") == Foo.Bar @@ -149,39 +167,62 @@ defmodule ModuleTest do assert Module.concat(Bar, nil) == Elixir.Bar end - test :safe_concat do + test "safe concat" do assert Module.safe_concat(Foo, :Bar) == Foo.Bar assert_raise ArgumentError, fn -> Module.safe_concat SafeConcat, Doesnt.Exist end end - test :split do + test "split" do module = Very.Long.Module.Name.And.Even.Longer assert Module.split(module) == ["Very", "Long", "Module", "Name", "And", "Even", "Longer"] assert Module.split("Elixir.Very.Long") == ["Very", "Long"] + assert_raise ArgumentError, "expected an Elixir module, got: :just_an_atom", fn -> + Module.split(:just_an_atom) + end + assert_raise ArgumentError, "expected an Elixir module, got: \"Foo\"", fn -> + Module.split("Foo") + end assert Module.concat(Module.split(module)) == module end - test :__MODULE__ do + test "__MODULE__" do assert Code.eval_string("__MODULE__.Foo") |> elem(0) == Foo end + test "__ENV__.file" do + assert Path.basename(__ENV__.file) == "module_test.exs" + end + + @file "sample.ex" + test "__ENV__.file with module attribute" do + assert __ENV__.file == "sample.ex" + end + ## Creation - test :defmodule do + test "defmodule" do assert match?({:module, Defmodule, binary, 3} when is_binary(binary), defmodule Defmodule do 1 + 2 end) end - test :defmodule_with_atom do + test "defmodule with atom" do assert match?({:module, :root_defmodule, _, _}, defmodule :root_defmodule do :ok end) end - test :create do + test "defmodule with alias as atom" do + defmodule :"Elixir.ModuleTest.RawModule" do + def hello, do: :world + end + + assert RawModule.hello == :world + end + + test "create" do contents = quote do def world, do: true @@ -191,7 +232,7 @@ defmodule ModuleTest do assert ModuleCreateSample.world end - test :create_with_elixir_as_a_name do + test "create with Elixir as a name" do contents = quote do def world, do: true @@ -202,7 +243,65 @@ defmodule ModuleTest do end end - test :no_function_in_module_body do + test "create with aliases/var hygiene" do + contents = + quote do + alias List, as: L + def test do + L.flatten([1, [2], 3]) + end + end + + Module.create ModuleHygiene, contents, __ENV__ + assert ModuleHygiene.test == [1, 2, 3] + end + + test "ensure function clauses are ordered" do + {_, _, binary, _} = + defmodule Ordered do + def foo(:foo), do: :bar + def baz(:baz), do: :bat + end + atoms = :beam_lib.chunks(binary, [:atoms]) + assert :erlang.phash2(atoms) == 53987778 + end + + # TODO: Remove this check once we depend only on 19 + if :erlang.system_info(:otp_release) >= '19' do + test "create with generated true does not emit warnings" do + contents = + quote generated: true do + def world, do: true + def world, do: false + end + {:module, ModuleCreateGenerated, _, _} = + Module.create(ModuleCreateGenerated, contents, __ENV__) + assert ModuleCreateGenerated.world + end + end + + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + test "uses the new debug_info chunk" do + {:module, ModuleCreateDebugInfo, binary, _} = + Module.create(ModuleCreateDebugInfo, :ok, __ENV__) + {:ok, {_, [debug_info: {:debug_info_v1, backend, data}]}} = + :beam_lib.chunks(binary, [:debug_info]) + {:ok, map} = backend.debug_info(:elixir_v1, ModuleCreateDebugInfo, data, []) + assert map.module == ModuleCreateDebugInfo + end + + test "uses the new debug_info chunk even if debug_info is set to false" do + {:module, ModuleCreateNoDebugInfo, binary, _} = + Module.create(ModuleCreateNoDebugInfo, quote(do: @compile {:debug_info, false}), __ENV__) + {:ok, {_, [debug_info: {:debug_info_v1, backend, data}]}} = + :beam_lib.chunks(binary, [:debug_info]) + assert backend.debug_info(:elixir_v1, ModuleCreateNoDebugInfo, data, []) == + {:error, :missing} + end + end + + test "no function in module body" do in_module do assert __ENV__.function == nil end @@ -210,7 +309,7 @@ defmodule ModuleTest do ## Definitions - test :defines? do + test "defines?" do in_module do refute Module.defines? __MODULE__, {:foo, 0} def foo(), do: bar() @@ -227,7 +326,7 @@ defmodule ModuleTest do end end - test :definitions_in do + test "definitions in" do in_module do def foo(1, 2, 3), do: 4 @@ -237,8 +336,18 @@ defmodule ModuleTest do end end - test :function do - assert Module.function(:erlang, :atom_to_list, 1).(:hello) == 'hello' - assert is_function Module.function(This, :also_works, 0) + test "make_overridable/2 with invalid arguments" do + contents = + quote do + Module.make_overridable(__MODULE__, [{:foo, 256}]) + end + + assert_raise ArgumentError, + "each element in tuple list has to be a {function_name :: atom, arity :: 0..255} tuple, got: {:foo, 256}", + fn -> + Module.create(Foo, contents, __ENV__) + end + after + purge Foo end end diff --git a/lib/elixir/test/elixir/node_test.exs b/lib/elixir/test/elixir/node_test.exs index ff612e4a8cb..d1f1fe60a74 100644 --- a/lib/elixir/test/elixir/node_test.exs +++ b/lib/elixir/test/elixir/node_test.exs @@ -3,6 +3,8 @@ Code.require_file "test_helper.exs", __DIR__ defmodule NodeTest do use ExUnit.Case + doctest Node + test "start/3 and stop/0" do assert Node.stop == {:error, :not_found} assert {:ok, _} = Node.start(:hello, :shortnames, 15000) diff --git a/lib/elixir/test/elixir/option_parser_test.exs b/lib/elixir/test/elixir/option_parser_test.exs index e4f648f0d97..ad3f2a07481 100644 --- a/lib/elixir/test/elixir/option_parser_test.exs +++ b/lib/elixir/test/elixir/option_parser_test.exs @@ -3,291 +3,397 @@ Code.require_file "test_helper.exs", __DIR__ defmodule OptionParserTest do use ExUnit.Case, async: true + doctest OptionParser + test "parses boolean option" do assert OptionParser.parse(["--docs"]) == {[docs: true], [], []} end test "parses alias boolean option as the alias key" do - assert OptionParser.parse(["-d"], aliases: [d: :docs]) - == {[docs: true], [], []} + assert OptionParser.parse(["-d"], aliases: [d: :docs]) == + {[docs: true], [], []} end test "parses more than one boolean option" do - assert OptionParser.parse(["--docs", "--compile"]) - == {[docs: true, compile: true], [], []} + assert OptionParser.parse(["--docs", "--compile"]) == + {[docs: true, compile: true], [], []} end test "parses more than one boolean options as the alias" do - assert OptionParser.parse(["-d", "--compile"], aliases: [d: :docs]) - == {[docs: true, compile: true], [], []} + assert OptionParser.parse(["-d", "--compile"], aliases: [d: :docs]) == + {[docs: true, compile: true], [], []} end test "parses --key value option" do - assert OptionParser.parse(["--source", "form_docs/"]) - == {[source: "form_docs/"], [], []} + assert OptionParser.parse(["--source", "form_docs/"]) == + {[source: "form_docs/"], [], []} + end + + test "parses only to existing atoms" do + assert OptionParser.parse(["--option-key-does-not-exist"]) == + {[], [], [{"--option-key-does-not-exist", nil}]} end test "parses --key=value option" do - assert OptionParser.parse(["--source=form_docs/", "other"]) - == {[source: "form_docs/"], ["other"], []} + assert OptionParser.parse(["--source=form_docs/", "other"]) == + {[source: "form_docs/"], ["other"], []} end test "parses alias --key value option as the alias" do - assert OptionParser.parse(["-s", "from_docs/"], aliases: [s: :source]) - == {[source: "from_docs/"], [], []} + assert OptionParser.parse(["-s", "from_docs/"], aliases: [s: :source]) == + {[source: "from_docs/"], [], []} end test "parses alias --key=value option as the alias" do - assert OptionParser.parse(["-s=from_docs/", "other"], aliases: [s: :source]) - == {[source: "from_docs/"], ["other"], []} + assert OptionParser.parse(["-s=from_docs/", "other"], aliases: [s: :source]) == + {[source: "from_docs/"], ["other"], []} end test "does not interpret undefined options with value as boolean" do - assert OptionParser.parse(["--no-bool"]) - == {[no_bool: true], [], []} - assert OptionParser.parse(["--no-bool"], strict: []) - == {[], [], [{"--no-bool", nil}]} - assert OptionParser.parse(["--no-bool=...", "other"]) - == {[], ["other"], [{"--no-bool", "..."}]} + assert OptionParser.parse(["--no-bool"]) == + {[no_bool: true], [], []} + assert OptionParser.parse(["--no-bool"], strict: []) == + {[], [], [{"--no-bool", nil}]} + assert OptionParser.parse(["--no-bool=...", "other"]) == + {[no_bool: "..."], ["other"], []} end test "does not parse -- as an alias" do - assert OptionParser.parse(["--s=from_docs/"], aliases: [s: :source]) - == {[s: "from_docs/"], [], []} + assert OptionParser.parse(["--s=from_docs/"], aliases: [s: :source]) == + {[s: "from_docs/"], [], []} end - test "does not parse - as a switch" do - assert OptionParser.parse(["-source=from_docs/"], aliases: [s: :source]) - == {[], [], [{"-source", "from_docs/"}]} + test "parses -ab as -a -b" do + aliases = [a: :first, b: :second] + + assert OptionParser.parse(["-ab"], aliases: aliases) == + {[first: true, second: true], [], []} + + assert OptionParser.parse(["-ab=1"], aliases: aliases, switches: [second: :integer]) == + {[first: true, second: 1], [], []} + + assert OptionParser.parse(["-ab", "1"], aliases: aliases, switches: [second: :integer]) == + {[first: true, second: 1], [], []} end test "parses configured booleans" do - assert OptionParser.parse(["--docs=false"], switches: [docs: :boolean]) - == {[docs: false], [], []} - assert OptionParser.parse(["--docs=true"], switches: [docs: :boolean]) - == {[docs: true], [], []} - assert OptionParser.parse(["--docs=other"], switches: [docs: :boolean]) - == {[], [], [{"--docs", "other"}]} - assert OptionParser.parse(["--docs="], switches: [docs: :boolean]) - == {[], [], [{"--docs", ""}]} - - assert OptionParser.parse(["--docs", "foo"], switches: [docs: :boolean]) - == {[docs: true], ["foo"], []} - assert OptionParser.parse(["--no-docs", "foo"], switches: [docs: :boolean]) - == {[docs: false], ["foo"], []} - assert OptionParser.parse(["--no-docs=foo", "bar"], switches: [docs: :boolean]) - == {[], ["bar"], [{"--no-docs", "foo"}]} - assert OptionParser.parse(["--no-docs=", "bar"], switches: [docs: :boolean]) - == {[], ["bar"], [{"--no-docs", ""}]} + assert OptionParser.parse(["--docs=false"], switches: [docs: :boolean]) == + {[docs: false], [], []} + assert OptionParser.parse(["--docs=true"], switches: [docs: :boolean]) == + {[docs: true], [], []} + assert OptionParser.parse(["--docs=other"], switches: [docs: :boolean]) == + {[], [], [{"--docs", "other"}]} + assert OptionParser.parse(["--docs="], switches: [docs: :boolean]) == + {[], [], [{"--docs", ""}]} + + assert OptionParser.parse(["--docs", "foo"], switches: [docs: :boolean]) == + {[docs: true], ["foo"], []} + assert OptionParser.parse(["--no-docs", "foo"], switches: [docs: :boolean]) == + {[docs: false], ["foo"], []} + assert OptionParser.parse(["--no-docs=foo", "bar"], switches: [docs: :boolean]) == + {[], ["bar"], [{"--no-docs", "foo"}]} + assert OptionParser.parse(["--no-docs=", "bar"], switches: [docs: :boolean]) == + {[], ["bar"], [{"--no-docs", ""}]} end test "does not set unparsed booleans" do - assert OptionParser.parse(["foo"], switches: [docs: :boolean]) - == {[], ["foo"], []} + assert OptionParser.parse(["foo"], switches: [docs: :boolean]) == + {[], ["foo"], []} end test "keeps options on configured keep" do - args = ["--require", "foo", "--require", "bar", "baz"] - assert OptionParser.parse(args, switches: [require: :keep]) - == {[require: "foo", require: "bar"], ["baz"], []} + argv = ["--require", "foo", "--require", "bar", "baz"] + assert OptionParser.parse(argv, switches: [require: :keep]) == + {[require: "foo", require: "bar"], ["baz"], []} - assert OptionParser.parse(["--require"], switches: [require: :keep]) - == {[], [], [{"--require", nil}]} + assert OptionParser.parse(["--require"], switches: [require: :keep]) == + {[], [], [{"--require", nil}]} end test "parses configured strings" do - assert OptionParser.parse(["--value", "1", "foo"], switches: [value: :string]) - == {[value: "1"], ["foo"], []} - assert OptionParser.parse(["--value=1", "foo"], switches: [value: :string]) - == {[value: "1"], ["foo"], []} - assert OptionParser.parse(["--value"], switches: [value: :string]) - == {[], [], [{"--value", nil}]} - assert OptionParser.parse(["--no-value"], switches: [value: :string]) - == {[], [], [{"--no-value", nil}]} + assert OptionParser.parse(["--value", "1", "foo"], switches: [value: :string]) == + {[value: "1"], ["foo"], []} + assert OptionParser.parse(["--value=1", "foo"], switches: [value: :string]) == + {[value: "1"], ["foo"], []} + assert OptionParser.parse(["--value"], switches: [value: :string]) == + {[], [], [{"--value", nil}]} + assert OptionParser.parse(["--no-value"], switches: [value: :string]) == + {[no_value: true], [], []} + end + + test "parses configured counters" do + assert OptionParser.parse(["--verbose"], switches: [verbose: :count]) == + {[verbose: 1], [], []} + assert OptionParser.parse(["--verbose", "--verbose"], switches: [verbose: :count]) == + {[verbose: 2], [], []} + assert OptionParser.parse(["--verbose", "-v", "-v", "--", "bar"], + aliases: [v: :verbose], strict: [verbose: :count]) == + {[verbose: 3], ["bar"], []} end test "parses configured integers" do - assert OptionParser.parse(["--value", "1", "foo"], switches: [value: :integer]) - == {[value: 1], ["foo"], []} - assert OptionParser.parse(["--value=1", "foo"], switches: [value: :integer]) - == {[value: 1], ["foo"], []} - assert OptionParser.parse(["--value", "WAT", "foo"], switches: [value: :integer]) - == {[], ["foo"], [{"--value", "WAT"}]} + assert OptionParser.parse(["--value", "1", "foo"], switches: [value: :integer]) == + {[value: 1], ["foo"], []} + assert OptionParser.parse(["--value=1", "foo"], switches: [value: :integer]) == + {[value: 1], ["foo"], []} + assert OptionParser.parse(["--value", "WAT", "foo"], switches: [value: :integer]) == + {[], ["foo"], [{"--value", "WAT"}]} end test "parses configured integers with keep" do - args = ["--value", "1", "--value", "2", "foo"] - assert OptionParser.parse(args, switches: [value: [:integer, :keep]]) - == {[value: 1, value: 2], ["foo"], []} + argv = ["--value", "1", "--value", "2", "foo"] + assert OptionParser.parse(argv, switches: [value: [:integer, :keep]]) == + {[value: 1, value: 2], ["foo"], []} - args = ["--value=1", "foo", "--value=2", "bar"] - assert OptionParser.parse(args, switches: [value: [:integer, :keep]]) - == {[value: 1, value: 2], ["foo", "bar"], []} + argv = ["--value=1", "foo", "--value=2", "bar"] + assert OptionParser.parse(argv, switches: [value: [:integer, :keep]]) == + {[value: 1, value: 2], ["foo", "bar"], []} end test "parses configured floats" do - assert OptionParser.parse(["--value", "1.0", "foo"], switches: [value: :float]) - == {[value: 1.0], ["foo"], []} - assert OptionParser.parse(["--value=1.0", "foo"], switches: [value: :float]) - == {[value: 1.0], ["foo"], []} - assert OptionParser.parse(["--value", "WAT", "foo"], switches: [value: :float]) - == {[], ["foo"], [{"--value", "WAT"}]} - end - - test "parses no switches as flags" do - assert OptionParser.parse(["--no-docs", "foo"]) - == {[no_docs: true], ["foo"], []} + assert OptionParser.parse(["--value", "1.0", "foo"], switches: [value: :float]) == + {[value: 1.0], ["foo"], []} + assert OptionParser.parse(["--value=1.0", "foo"], switches: [value: :float]) == + {[value: 1.0], ["foo"], []} + assert OptionParser.parse(["--value", "WAT", "foo"], switches: [value: :float]) == + {[], ["foo"], [{"--value", "WAT"}]} end test "overrides options by default" do - assert OptionParser.parse(["--require", "foo", "--require", "bar", "baz"]) - == {[require: "bar"], ["baz"], []} + assert OptionParser.parse(["--require", "foo", "--require", "bar", "baz"]) == + {[require: "bar"], ["baz"], []} end test "parses mixed options" do - args = ["--source", "from_docs/", "--compile", "-x"] - assert OptionParser.parse(args, aliases: [x: :x]) - == {[source: "from_docs/", compile: true, x: true], [], []} + argv = ["--source", "from_docs/", "--compile", "-x"] + assert OptionParser.parse(argv, aliases: [x: :x]) == + {[source: "from_docs/", compile: true, x: true], [], []} end - test "stops on first non option arguments" do - args = ["--source", "from_docs/", "test/enum_test.exs", "--verbose"] - assert OptionParser.parse_head(args) - == {[source: "from_docs/"], ["test/enum_test.exs", "--verbose"], []} + test "stops on first non-option arguments" do + argv = ["--source", "from_docs/", "test/enum_test.exs", "--verbose"] + assert OptionParser.parse_head(argv) == + {[source: "from_docs/"], ["test/enum_test.exs", "--verbose"], []} end test "stops on --" do - options = OptionParser.parse(["--source", "from_docs/", "--", "1", "2", "3"]) - assert options == {[source: "from_docs/"], ["1", "2", "3"], []} + options = OptionParser.parse(["--source", "foo", "--", "1", "2", "3"]) + assert options == {[source: "foo"], ["1", "2", "3"], []} - options = OptionParser.parse_head(["--source", "from_docs/", "--", "1", "2", "3"]) - assert options == {[source: "from_docs/"], ["1", "2", "3"], []} + options = OptionParser.parse_head(["--source", "foo", "--", "1", "2", "3"]) + assert options == {[source: "foo"], ["1", "2", "3"], []} - options = OptionParser.parse(["--no-dash", "foo", "bar", "--", "-x"]) - assert options == {[no_dash: true], ["foo", "bar", "-x"], []} + options = OptionParser.parse(["--source", "foo", "bar", "--", "-x"]) + assert options == {[source: "foo"], ["bar", "-x"], []} - options = OptionParser.parse_head(["--no-dash", "foo", "bar", "--", "-x"]) - assert options == {[no_dash: true], ["foo", "bar", "--", "-x"], []} + options = OptionParser.parse_head(["--source", "foo", "bar", "--", "-x"]) + assert options == {[source: "foo"], ["bar", "--", "-x"], []} end - test "goes beyond the first non option arguments" do - args = ["--source", "from_docs/", "test/enum_test.exs", "--verbose"] - assert OptionParser.parse(args) - == {[source: "from_docs/", verbose: true], ["test/enum_test.exs"], []} + test "goes beyond the first non-option arguments" do + argv = ["--source", "from_docs/", "test/enum_test.exs", "--verbose"] + assert OptionParser.parse(argv) == + {[source: "from_docs/", verbose: true], ["test/enum_test.exs"], []} end test "parses more than one key/value options" do - assert OptionParser.parse(["--source", "from_docs/", "--docs", "show"]) - == {[source: "from_docs/", docs: "show"], [], []} + assert OptionParser.parse(["--source", "from_docs/", "--docs", "show"]) == + {[source: "from_docs/", docs: "show"], [], []} end test "collects multiple invalid options" do - args = ["--bad", "opt", "foo", "-o", "bad", "bar"] - assert OptionParser.parse(args, switches: [bad: :integer]) - == {[], ["foo", "bar"], [{"--bad", "opt"}, {"-o", "bad"}]} + argv = ["--bad", "opt", "foo", "-o", "bad", "bar"] + assert OptionParser.parse(argv, switches: [bad: :integer]) == + {[], ["foo", "bar"], [{"--bad", "opt"}, {"-o", "bad"}]} end test "parses more than one key/value options using strict" do assert OptionParser.parse(["--source", "from_docs/", "--docs", "show"], - strict: [source: :string, docs: :string]) - == {[source: "from_docs/", docs: "show"], [], []} + strict: [source: :string, docs: :string]) == + {[source: "from_docs/", docs: "show"], [], []} assert OptionParser.parse(["--source", "from_docs/", "--doc", "show"], - strict: [source: :string, docs: :string]) - == {[source: "from_docs/"], ["show"], [{"--doc", nil}]} + strict: [source: :string, docs: :string]) == + {[source: "from_docs/"], ["show"], [{"--doc", nil}]} assert OptionParser.parse(["--source", "from_docs/", "--doc=show"], - strict: [source: :string, docs: :string]) - == {[source: "from_docs/"], [], [{"--doc", nil}]} + strict: [source: :string, docs: :string]) == + {[source: "from_docs/"], [], [{"--doc", nil}]} + end + + test "parse/2 raises when using both options: switches and strict" do + assert_raise ArgumentError, ":switches and :strict cannot be given together", fn -> + OptionParser.parse(["--elixir"], switches: [ex: :string], strict: [elixir: :string]) + end + end + + test "parse!/2 raise an exception for an unknown option using strict" do + assert_raise OptionParser.ParseError, "1 error found!\n--doc : Unknown option", fn -> + argv = ["--source", "from_docs/", "--doc", "show"] + OptionParser.parse!(argv, strict: [source: :string, docs: :string]) + end + end + + test "parse!/2 raise an exception when an option is of the wrong type" do + assert_raise OptionParser.ParseError, fn -> + argv = ["--bad", "opt", "foo", "-o", "bad", "bar"] + OptionParser.parse!(argv, switches: [bad: :integer]) + end + end + + test "parse_head!/2 raise an exception when an option is of the wrong type" do + assert_raise OptionParser.ParseError, "1 error found!\n--number : Expected type integer, got \"lib\"", fn -> + argv = ["--number", "lib", "test/enum_test.exs"] + OptionParser.parse_head!(argv, strict: [number: :integer]) + end + end + + test ":switches with :strict raises" do + assert_raise ArgumentError, ":switches and :strict cannot be given together", fn -> + OptionParser.parse([], strict: [], switches: []) + end end test "parses - as argument" do - assert OptionParser.parse(["-a", "-", "-", "-b", "-"], aliases: [b: :boo]) - == {[boo: "-"], ["-"], [{"-a", "-"}]} + assert OptionParser.parse(["-a", "-", "-", "-b", "-"], aliases: [b: :boo]) == + {[boo: "-"], ["-"], [{"-a", "-"}]} + + assert OptionParser.parse(["--foo", "-", "-b", "-"], strict: [foo: :boolean, boo: :string], aliases: [b: :boo]) == + {[foo: true, boo: "-"], ["-"], []} + end + + test "allow nonexistent atoms" do + assert OptionParser.parse(["--option-key-creates-atom"], allow_nonexistent_atoms: true) == + {[{String.to_atom("option_key_creates_atom"), true}], [], []} + end + + test "correctly handles negative integers" do + assert OptionParser.parse(["arg1", "-43"]) == + {[], ["arg1", "-43"], []} + + assert OptionParser.parse(["arg1", "-o", "-43"], switches: [option: :integer], aliases: [o: :option]) == + {[option: -43], ["arg1"], []} + + assert OptionParser.parse(["arg1", "--option=-43"], switches: [option: :integer]) == + {[option: -43], ["arg1"], []} + end + + test "correctly handles negative floating-point numbers" do + assert OptionParser.parse(["arg1", "-43.2"]) == + {[], ["arg1", "-43.2"], []} + + assert OptionParser.parse(["arg1", "-o", "-43.2"], switches: [option: :float], aliases: [o: :option]) == + {[option: -43.2], ["arg1"], []} - assert OptionParser.parse(["--foo", "-", "-b", "-"], strict: [foo: :boolean, boo: :string], aliases: [b: :boo]) - == {[foo: true, boo: "-"], ["-"], []} + assert OptionParser.parse(["arg1", "--option=-43.2"], switches: [option: :float]) == + {[option: -43.2], ["arg1"], []} end test "multi-word option" do config = [switches: [hello_world: :boolean]] - assert OptionParser.next(["--hello-world"], config) - == {:ok, :hello_world, true, []} - assert OptionParser.next(["--no-hello-world"], config) - == {:ok, :hello_world, false, []} - - assert OptionParser.next(["--hello-world"], []) - == {:ok, :hello_world, true, []} - assert OptionParser.next(["--no-hello-world"], []) - == {:ok, :no_hello_world, true, []} - assert OptionParser.next(["--hello_world"], []) - == {:invalid, "--hello_world", nil, []} - assert OptionParser.next(["--no-hello_world"], []) - == {:invalid, "--no-hello_world", nil, []} - - assert OptionParser.next(["--no-hello-world"], strict: []) - == {:undefined, "--no-hello-world", nil, []} - assert OptionParser.next(["--no-hello_world"], strict: []) - == {:undefined, "--no-hello_world", nil, []} + assert OptionParser.next(["--hello-world"], config) == + {:ok, :hello_world, true, []} + assert OptionParser.next(["--no-hello-world"], config) == + {:ok, :hello_world, false, []} + + assert OptionParser.next(["--hello-world"], []) == + {:ok, :hello_world, true, []} + assert OptionParser.next(["--no-hello-world"], []) == + {:ok, :no_hello_world, true, []} + assert OptionParser.next(["--hello_world"], []) == + {:invalid, "--hello_world", nil, []} + assert OptionParser.next(["--no-hello_world"], []) == + {:invalid, "--no-hello_world", nil, []} + + assert OptionParser.next(["--no-hello-world"], strict: []) == + {:undefined, "--no-hello-world", nil, []} + assert OptionParser.next(["--no-hello_world"], strict: []) == + {:undefined, "--no-hello_world", nil, []} config = [strict: [hello_world: :boolean]] - assert OptionParser.next(["--hello-world"], config) - == {:ok, :hello_world, true, []} - assert OptionParser.next(["--no-hello-world"], config) - == {:ok, :hello_world, false, []} - assert OptionParser.next(["--hello_world"], config) - == {:undefined, "--hello_world", nil, []} - assert OptionParser.next(["--no-hello_world"], config) - == {:undefined, "--no-hello_world", nil, []} + assert OptionParser.next(["--hello-world"], config) == + {:ok, :hello_world, true, []} + assert OptionParser.next(["--no-hello-world"], config) == + {:ok, :hello_world, false, []} + assert OptionParser.next(["--hello_world"], config) == + {:undefined, "--hello_world", nil, []} + assert OptionParser.next(["--no-hello_world"], config) == + {:undefined, "--no-hello_world", nil, []} end test "next strict: good options" do config = [strict: [str: :string, int: :integer, bool: :boolean]] - assert OptionParser.next(["--str", "hello", "..."], config) - == {:ok, :str, "hello", ["..."]} - assert OptionParser.next(["--int=13", "..."], config) - == {:ok, :int, 13, ["..."]} - assert OptionParser.next(["--bool=false", "..."], config) - == {:ok, :bool, false, ["..."]} - assert OptionParser.next(["--no-bool", "..."], config) - == {:ok, :bool, false, ["..."]} - assert OptionParser.next(["--bool", "..."], config) - == {:ok, :bool, true, ["..."]} - assert OptionParser.next(["..."], config) - == {:error, ["..."]} + assert OptionParser.next(["--str", "hello", "..."], config) == + {:ok, :str, "hello", ["..."]} + assert OptionParser.next(["--int=13", "..."], config) == + {:ok, :int, 13, ["..."]} + assert OptionParser.next(["--bool=false", "..."], config) == + {:ok, :bool, false, ["..."]} + assert OptionParser.next(["--no-bool", "..."], config) == + {:ok, :bool, false, ["..."]} + assert OptionParser.next(["--bool", "..."], config) == + {:ok, :bool, true, ["..."]} + assert OptionParser.next(["..."], config) == + {:error, ["..."]} end test "next strict: unknown options" do config = [strict: [bool: :boolean]] - assert OptionParser.next(["--str", "13", "..."], config) - == {:undefined, "--str", nil, ["13", "..."]} - assert OptionParser.next(["--int=hello", "..."], config) - == {:undefined, "--int", "hello", ["..."]} - assert OptionParser.next(["-no-bool=other", "..."], config) - == {:undefined, "-no-bool", "other", ["..."]} + assert OptionParser.next(["--str", "13", "..."], config) == + {:undefined, "--str", nil, ["13", "..."]} + assert OptionParser.next(["--int=hello", "..."], config) == + {:undefined, "--int", "hello", ["..."]} + assert OptionParser.next(["-no-bool=other", "..."], config) == + {:undefined, "-no-bool", "other", ["..."]} end test "next strict: bad type" do config = [strict: [str: :string, int: :integer, bool: :boolean]] - assert OptionParser.next(["--str", "13", "..."], config) - == {:ok, :str, "13", ["..."]} - assert OptionParser.next(["--int=hello", "..."], config) - == {:invalid, "--int", "hello", ["..."]} - assert OptionParser.next(["--int", "hello", "..."], config) - == {:invalid, "--int", "hello", ["..."]} - assert OptionParser.next(["--bool=other", "..."], config) - == {:invalid, "--bool", "other", ["..."]} + assert OptionParser.next(["--str", "13", "..."], config) == + {:ok, :str, "13", ["..."]} + assert OptionParser.next(["--int=hello", "..."], config) == + {:invalid, "--int", "hello", ["..."]} + assert OptionParser.next(["--int", "hello", "..."], config) == + {:invalid, "--int", "hello", ["..."]} + assert OptionParser.next(["--bool=other", "..."], config) == + {:invalid, "--bool", "other", ["..."]} end test "next strict: missing value" do config = [strict: [str: :string, int: :integer, bool: :boolean]] - assert OptionParser.next(["--str"], config) - == {:invalid, "--str", nil, []} - assert OptionParser.next(["--int"], config) - == {:invalid, "--int", nil, []} - assert OptionParser.next(["--bool=", "..."], config) - == {:invalid, "--bool", "", ["..."]} - assert OptionParser.next(["--no-bool=", "..."], config) - == {:undefined, "--no-bool", "", ["..."]} + assert OptionParser.next(["--str"], config) == + {:invalid, "--str", nil, []} + assert OptionParser.next(["--int"], config) == + {:invalid, "--int", nil, []} + assert OptionParser.next(["--bool=", "..."], config) == + {:invalid, "--bool", "", ["..."]} + assert OptionParser.next(["--no-bool=", "..."], config) == + {:invalid, "--no-bool", "", ["..."]} + end + + test "split" do + assert OptionParser.split(~S[]) == [] + assert OptionParser.split(~S[foo]) == ["foo"] + assert OptionParser.split(~S[foo bar]) == ["foo", "bar"] + assert OptionParser.split(~S[ foo bar ]) == ["foo", "bar"] + assert OptionParser.split(~S[foo\ bar]) == ["foo bar"] + assert OptionParser.split(~S[foo" bar"]) == ["foo bar"] + assert OptionParser.split(~S[foo\" bar\"]) == ["foo\"", "bar\""] + assert OptionParser.split(~S[foo "\ bar\""]) == ["foo", "\\ bar\""] + assert OptionParser.split(~S[foo '\"bar"\'\ ']) == ["foo", "\\\"bar\"'\\ "] + end + + test "to_argv" do + assert OptionParser.to_argv([foo_bar: "baz"]) == + ["--foo-bar", "baz"] + + assert OptionParser.to_argv([bool: true, bool: false, discarded: nil]) == + ["--bool", "--no-bool"] + end + + test ":count switch type can be translated back" do + original = ["--counter", "--counter"] + {opts, [], []} = OptionParser.parse(original, [switches: [counter: :count]]) + assert original == OptionParser.to_argv(opts, [switches: [counter: :count]]) end end diff --git a/lib/elixir/test/elixir/path_test.exs b/lib/elixir/test/elixir/path_test.exs index 4fa577c9fa8..35bb4cd8af6 100644 --- a/lib/elixir/test/elixir/path_test.exs +++ b/lib/elixir/test/elixir/path_test.exs @@ -2,10 +2,13 @@ Code.require_file "test_helper.exs", __DIR__ defmodule PathTest do use ExUnit.Case, async: true + + doctest Path + import PathHelpers if :file.native_name_encoding == :utf8 do - test :wildcard_with_utf8 do + test "wildcard with UTF-8" do File.mkdir_p(tmp_path("héllò")) assert Path.wildcard(tmp_path("héllò")) == [tmp_path("héllò")] after @@ -13,7 +16,7 @@ defmodule PathTest do end end - test :wildcard do + test "wildcard/2" do hello = tmp_path("wildcard/.hello") world = tmp_path("wildcard/.hello/world") File.mkdir_p(world) @@ -29,60 +32,82 @@ defmodule PathTest do File.rm_rf tmp_path("wildcard") end - if is_win? do - test :relative do - assert Path.relative("C:/usr/local/bin") == "usr/local/bin" - assert Path.relative("C:\\usr\\local\\bin") == "usr\\local\\bin" - assert Path.relative("C:usr\\local\\bin") == "usr\\local\\bin" - - assert Path.relative("/usr/local/bin") == "usr/local/bin" - assert Path.relative("usr/local/bin") == "usr/local/bin" - assert Path.relative("../usr/local/bin") == "../usr/local/bin" - end - - test :type do - assert Path.type("C:/usr/local/bin") == :absolute - assert Path.type('C:\\usr\\local\\bin') == :absolute - assert Path.type("C:usr\\local\\bin") == :volumerelative - - assert Path.type("/usr/local/bin") == :volumerelative - assert Path.type('usr/local/bin') == :relative - assert Path.type("../usr/local/bin") == :relative + if windows?() do + describe "Windows" do + test "relative/1" do + assert Path.relative("C:/usr/local/bin") == "usr/local/bin" + assert Path.relative("C:\\usr\\local\\bin") == "usr\\local\\bin" + assert Path.relative("C:usr\\local\\bin") == "usr\\local\\bin" + + assert Path.relative("/usr/local/bin") == "usr/local/bin" + assert Path.relative("usr/local/bin") == "usr/local/bin" + assert Path.relative("../usr/local/bin") == "../usr/local/bin" + end + + test "relative_to/2" do + assert Path.relative_to("D:/usr/local/foo", "D:/usr/") == "local/foo" + assert Path.relative_to("D:/usr/local/foo", "d:/usr/") == "local/foo" + assert Path.relative_to("d:/usr/local/foo", "D:/usr/") == "local/foo" + assert Path.relative_to("D:/usr/local/foo", "d:/") == "usr/local/foo" + assert Path.relative_to("D:/usr/local/foo", "D:/") == "usr/local/foo" + assert Path.relative_to("D:/usr/local/foo", "d:") == "D:/usr/local/foo" + assert Path.relative_to("D:/usr/local/foo", "D:") == "D:/usr/local/foo" + end + + test "type/1" do + assert Path.type("C:/usr/local/bin") == :absolute + assert Path.type('C:\\usr\\local\\bin') == :absolute + assert Path.type("C:usr\\local\\bin") == :volumerelative + + assert Path.type("/usr/local/bin") == :volumerelative + assert Path.type('usr/local/bin') == :relative + assert Path.type("../usr/local/bin") == :relative + end + + test "split/1" do + assert Path.split("C:\\foo\\bar") == ["c:/", "foo", "bar"] + assert Path.split("C:/foo/bar") == ["c:/", "foo", "bar"] + end end else - test :relative do - assert Path.relative("/usr/local/bin") == "usr/local/bin" - assert Path.relative("usr/local/bin") == "usr/local/bin" - assert Path.relative("../usr/local/bin") == "../usr/local/bin" - assert Path.relative(['/usr', ?/, "local/bin"]) == "usr/local/bin" - end - - test :type do - assert Path.type("/usr/local/bin") == :absolute - assert Path.type("usr/local/bin") == :relative - assert Path.type("../usr/local/bin") == :relative - - assert Path.type('/usr/local/bin') == :absolute - assert Path.type('usr/local/bin') == :relative - assert Path.type('../usr/local/bin') == :relative - - assert Path.type(['/usr/', 'local/bin']) == :absolute - assert Path.type(['usr/', 'local/bin']) == :relative - assert Path.type(['../usr', '/local/bin']) == :relative + describe "Unix" do + test "relative/1" do + assert Path.relative("/usr/local/bin") == "usr/local/bin" + assert Path.relative("usr/local/bin") == "usr/local/bin" + assert Path.relative("../usr/local/bin") == "../usr/local/bin" + assert Path.relative("/") == "." + assert Path.relative('/') == "." + assert Path.relative(['/usr', ?/, "local/bin"]) == "usr/local/bin" + end + + test "type/1" do + assert Path.type("/usr/local/bin") == :absolute + assert Path.type("usr/local/bin") == :relative + assert Path.type("../usr/local/bin") == :relative + + assert Path.type('/usr/local/bin') == :absolute + assert Path.type('usr/local/bin') == :relative + assert Path.type('../usr/local/bin') == :relative + + assert Path.type(['/usr/', 'local/bin']) == :absolute + assert Path.type(['usr/', 'local/bin']) == :relative + assert Path.type(['../usr', '/local/bin']) == :relative + end end end - test :relative_to_cwd do + test "relative_to_cwd/1" do assert Path.relative_to_cwd(__ENV__.file) == Path.relative_to(__ENV__.file, System.cwd!) - assert Path.relative_to_cwd(to_char_list(__ENV__.file)) == - Path.relative_to(to_char_list(__ENV__.file), to_char_list(System.cwd!)) + assert Path.relative_to_cwd(to_charlist(__ENV__.file)) == + Path.relative_to(to_charlist(__ENV__.file), to_charlist(System.cwd!)) end - test :absname do + test "absname/1,2" do assert (Path.absname("/") |> strip_drive_letter_if_windows) == "/" assert (Path.absname("/foo") |> strip_drive_letter_if_windows) == "/foo" + assert (Path.absname("/./foo") |> strip_drive_letter_if_windows) == "/foo" assert (Path.absname("/foo/bar") |> strip_drive_letter_if_windows) == "/foo/bar" assert (Path.absname("/foo/bar/") |> strip_drive_letter_if_windows) == "/foo/bar" assert (Path.absname("/foo/bar/../bar") |> strip_drive_letter_if_windows) == "/foo/bar/../bar" @@ -95,8 +120,8 @@ defmodule PathTest do assert Path.absname(["bar/", ?., ?., ["/bar"]], "/foo") == "/foo/bar/../bar" end - test :expand_path_with_user_home do - home = System.user_home! + test "expand/1,2 with user home" do + home = System.user_home! |> Path.absname assert home == Path.expand("~") assert home == Path.expand('~') @@ -107,31 +132,35 @@ defmodule PathTest do assert Path.expand("~/file", "whatever") == Path.join(home, "file") assert Path.expand("file", Path.expand("~")) == Path.expand("~/file") assert Path.expand("file", "~") == Path.join(home, "file") + assert Path.expand("~file") == Path.join(System.cwd!, "file") end - test :expand_path do + test "expand/1,2" do assert (Path.expand("/") |> strip_drive_letter_if_windows) == "/" + assert (Path.expand("/foo/../..") |> strip_drive_letter_if_windows) == "/" assert (Path.expand("/foo") |> strip_drive_letter_if_windows) == "/foo" + assert (Path.expand("/./foo") |> strip_drive_letter_if_windows) == "/foo" + assert (Path.expand("/../foo") |> strip_drive_letter_if_windows) == "/foo" assert (Path.expand("/foo/bar") |> strip_drive_letter_if_windows) == "/foo/bar" assert (Path.expand("/foo/bar/") |> strip_drive_letter_if_windows) == "/foo/bar" - assert (Path.expand("/foo/bar/.") |> strip_drive_letter_if_windows)== "/foo/bar" + assert (Path.expand("/foo/bar/.") |> strip_drive_letter_if_windows) == "/foo/bar" assert (Path.expand("/foo/bar/../bar") |> strip_drive_letter_if_windows) == "/foo/bar" - assert (Path.expand("bar", "/foo") |> strip_drive_letter_if_windows)== "/foo/bar" - assert (Path.expand("bar/", "/foo") |> strip_drive_letter_if_windows)== "/foo/bar" - assert (Path.expand("bar/.", "/foo") |> strip_drive_letter_if_windows)== "/foo/bar" - assert (Path.expand("bar/../bar", "/foo") |> strip_drive_letter_if_windows)== "/foo/bar" + assert (Path.expand("bar", "/foo") |> strip_drive_letter_if_windows) == "/foo/bar" + assert (Path.expand("bar/", "/foo") |> strip_drive_letter_if_windows) == "/foo/bar" + assert (Path.expand("bar/.", "/foo") |> strip_drive_letter_if_windows) == "/foo/bar" + assert (Path.expand("bar/../bar", "/foo") |> strip_drive_letter_if_windows) == "/foo/bar" assert (Path.expand("../bar/../bar", "/foo/../foo/../foo") |> strip_drive_letter_if_windows) == "/bar" - assert (Path.expand(['..', ?/, "bar/../bar"], '/foo/../foo/../foo') |> - strip_drive_letter_if_windows) == "/bar" - - assert Path.expand("bar/../bar", "foo") == Path.expand("foo/bar") + assert "/bar" == + (Path.expand(['..', ?/, "bar/../bar"], '/foo/../foo/../foo') |> strip_drive_letter_if_windows) assert (Path.expand("/..") |> strip_drive_letter_if_windows) == "/" + + assert Path.expand("bar/../bar", "foo") == Path.expand("foo/bar") end - test :relative_to do + test "relative_to/2" do assert Path.relative_to("/usr/local/foo", "/usr/local") == "foo" assert Path.relative_to("/usr/local/foo", "/") == "usr/local/foo" assert Path.relative_to("/usr/local/foo", "/etc") == "/usr/local/foo" @@ -145,14 +174,14 @@ defmodule PathTest do assert Path.relative_to(["usr", ?/, 'local/foo'], 'usr/local') == "foo" end - test :rootname do + test "rootname/2" do assert Path.rootname("~/foo/bar.ex", ".ex") == "~/foo/bar" assert Path.rootname("~/foo/bar.exs", ".ex") == "~/foo/bar.exs" assert Path.rootname("~/foo/bar.old.ex", ".ex") == "~/foo/bar.old" assert Path.rootname([?~, '/foo/bar', ".old.ex"], '.ex') == "~/foo/bar.old" end - test :extname do + test "extname/1" do assert Path.extname("foo.erl") == ".erl" assert Path.extname("~/foo/bar") == "" @@ -160,7 +189,7 @@ defmodule PathTest do assert Path.extname('~/foo/bar') == "" end - test :dirname do + test "dirname/1" do assert Path.dirname("/foo/bar.ex") == "/foo" assert Path.dirname("foo/bar.ex") == "foo" @@ -170,7 +199,7 @@ defmodule PathTest do assert Path.dirname([?~, "/foo", '/bar.ex']) == "~/foo" end - test :basename do + test "basename/1,2" do assert Path.basename("foo") == "foo" assert Path.basename("/foo/bar") == "bar" assert Path.basename("/") == "" @@ -182,35 +211,45 @@ defmodule PathTest do assert Path.basename([?~, "/for/bar", '.old.ex'], ".ex") == "bar.old" end - test :join do + test "join/1" do assert Path.join([""]) == "" assert Path.join(["foo"]) == "foo" assert Path.join(["/", "foo", "bar"]) == "/foo/bar" assert Path.join(["~", "foo", "bar"]) == "~/foo/bar" assert Path.join(['/foo/', "/bar/"]) == "/foo/bar" + assert Path.join(["/", ""]) == "/" + assert Path.join(["/", "", "bar"]) == "/bar" + assert Path.join(['foo', [?b, "a", ?r]]) == "foo/bar" + assert Path.join([[?f, 'o', "o"]]) == "foo" end - test :join_two do + test "join/2" do assert Path.join("/foo", "bar") == "/foo/bar" assert Path.join("~", "foo") == "~/foo" - assert Path.join("", "bar") == "/bar" + assert Path.join("", "bar") == "bar" + assert Path.join("bar", "") == "bar" + assert Path.join("", "/bar") == "bar" + assert Path.join("/bar", "") == "/bar" + + assert Path.join("foo", "/bar") == "foo/bar" + assert Path.join("/foo", "/bar") == "/foo/bar" assert Path.join("/foo", "/bar") == "/foo/bar" - assert Path.join("/foo", "./bar") == "/foo/bar" + assert Path.join("/foo", "./bar") == "/foo/./bar" - assert Path.join([?/, "foo"], "./bar") == "/foo/bar" + assert Path.join([?/, "foo"], "./bar") == "/foo/./bar" end - test :split_with_binary do + test "split/1" do assert Path.split("") == [] assert Path.split("foo") == ["foo"] assert Path.split("/foo/bar") == ["/", "foo", "bar"] assert Path.split([?/, "foo/bar"]) == ["/", "foo", "bar"] end - if is_win? do - defp strip_drive_letter_if_windows([_d,?:|rest]), do: rest - defp strip_drive_letter_if_windows(<<_d,?:,rest::binary>>), do: rest + if windows?() do + defp strip_drive_letter_if_windows([_d, ?: | rest]), do: rest + defp strip_drive_letter_if_windows(<<_d, ?:, rest::binary>>), do: rest else defp strip_drive_letter_if_windows(path), do: path end diff --git a/lib/elixir/test/elixir/port_test.exs b/lib/elixir/test/elixir/port_test.exs new file mode 100644 index 00000000000..d2bae5275ce --- /dev/null +++ b/lib/elixir/test/elixir/port_test.exs @@ -0,0 +1,21 @@ +Code.require_file "test_helper.exs", __DIR__ + +defmodule PortTest do + use ExUnit.Case, async: true + + test "info/1,2 with registered name" do + {:ok, port} = :gen_udp.open(0) + + assert Port.info(port, :links) == {:links, [self()]} + assert Port.info(port, :registered_name) == {:registered_name, []} + + Process.register(port, __MODULE__) + + assert Port.info(port, :registered_name) == {:registered_name, __MODULE__} + + :ok = :gen_udp.close(port) + + assert Port.info(port, :registered_name) == nil + assert Port.info(port) == nil + end +end diff --git a/lib/elixir/test/elixir/process_test.exs b/lib/elixir/test/elixir/process_test.exs index be5b984f2f2..bd11503ca56 100644 --- a/lib/elixir/test/elixir/process_test.exs +++ b/lib/elixir/test/elixir/process_test.exs @@ -3,9 +3,24 @@ Code.require_file "test_helper.exs", __DIR__ defmodule ProcessTest do use ExUnit.Case, async: true + doctest Process + + test "dictionary" do + assert Process.put(:foo, :bar) == nil + assert Process.put(:foo, :baz) == :bar + + assert Process.get_keys() == [:foo] + assert Process.get_keys(:bar) == [] + assert Process.get_keys(:baz) == [:foo] + + assert Process.get(:foo) == :baz + assert Process.delete(:foo) == :baz + assert Process.get(:foo) == nil + end + test "group_leader/2 and group_leader/0" do - another = spawn_link(fn -> :timer.sleep(1000) end) - assert Process.group_leader(self, another) + another = spawn_link(fn -> Process.sleep(1000) end) + assert Process.group_leader(self(), another) assert Process.group_leader == another end @@ -14,28 +29,117 @@ defmodule ProcessTest do quote(do: :erlang.monitor(:process, pid())) end + test "sleep/1" do + assert Process.sleep(0) == :ok + end + test "info/2" do - pid = spawn fn -> end + pid = spawn fn -> Process.sleep(1000) end + assert Process.info(pid, :priority) == {:priority, :normal} + assert Process.info(pid, [:priority]) == [priority: :normal] + Process.exit(pid, :kill) assert Process.info(pid, :backtrace) == nil + assert Process.info(pid, [:backtrace, :status]) == nil end test "info/2 with registered name" do - pid = spawn fn -> end + pid = spawn fn -> nil end Process.exit(pid, :kill) assert Process.info(pid, :registered_name) == nil + assert Process.info(pid, [:registered_name]) == + nil - assert Process.info(self, :registered_name) == + assert Process.info(self(), :registered_name) == {:registered_name, []} + assert Process.info(self(), [:registered_name]) == + [registered_name: []] - Process.register(self, __MODULE__) - assert Process.info(self, :registered_name) == + Process.register(self(), __MODULE__) + assert Process.info(self(), :registered_name) == {:registered_name, __MODULE__} + assert Process.info(self(), [:registered_name]) == + [registered_name: __MODULE__] + end + + test "send_after/3 sends messages once expired" do + Process.send_after(self(), :hello, 10) + assert_receive :hello + end + + test "send_after/4 with absolute time sends message once expired" do + time = System.monotonic_time(:millisecond) + 10 + Process.send_after(self(), :hello, time, abs: true) + assert_receive :hello + end + + test "send_after/3 returns a timer reference that can be read or cancelled" do + timer = Process.send_after(self(), :hello, 100_000) + refute_received :hello + assert is_integer(Process.read_timer(timer)) + assert is_integer(Process.cancel_timer(timer)) + + timer = Process.send_after(self(), :hello, 0) + assert_receive :hello + assert Process.read_timer(timer) == false + assert Process.cancel_timer(timer) == false + + timer = Process.send_after(self(), :hello, 100_000) + assert Process.cancel_timer(timer, async: true) + assert_receive {:cancel_timer, ^timer, result} + assert is_integer(result) + assert Process.cancel_timer(timer, info: false) == :ok + end + + test "exit(pid, :normal) does not cause the target process to exit" do + pid = spawn_link fn -> + receive do + :done -> nil + end + end + + trap = Process.flag(:trap_exit, true) + true = Process.exit(pid, :normal) + refute_receive {:EXIT, ^pid, :normal} + assert Process.alive?(pid) + + # now exit the process for real so it doesn't hang around + true = Process.exit(pid, :abnormal) + assert_receive {:EXIT, ^pid, :abnormal} + refute Process.alive?(pid) + + Process.flag(:trap_exit, trap) + end + + test "exit(pid, :normal) makes the process receive a message if it traps exits" do + parent = self() + pid = spawn_link fn -> + Process.flag(:trap_exit, true) + receive do + {:EXIT, ^parent, :normal} -> send(parent, {:ok, self()}) + end + end + + refute_receive _ + Process.exit(pid, :normal) + assert_receive {:ok, ^pid} + refute Process.alive?(pid) + end + + test "exit(self(), :normal) causes the calling process to exit" do + trap = Process.flag(:trap_exit, true) + + pid = spawn_link fn -> Process.exit(self(), :normal) end + + assert_receive {:EXIT, ^pid, :normal} + refute Process.alive?(pid) + + Process.flag(:trap_exit, trap) end defp expand(expr, env) do - {expr, _env} = :elixir_exp.expand(expr, env) + {expr, _env} = :elixir_expand.expand(expr, env) expr end end diff --git a/lib/elixir/test/elixir/protocol_test.exs b/lib/elixir/test/elixir/protocol_test.exs index 4392b189946..7b821144118 100644 --- a/lib/elixir/test/elixir/protocol_test.exs +++ b/lib/elixir/test/elixir/protocol_test.exs @@ -3,24 +3,32 @@ Code.require_file "test_helper.exs", __DIR__ defmodule ProtocolTest do use ExUnit.Case, async: true - defprotocol Sample do - @type t :: any - @doc "Ok" - @spec ok(t) :: boolean - def ok(thing) - end + doctest Protocol - defprotocol WithAny do - @fallback_to_any true - @doc "Ok" - def ok(thing) - end + {_, _, sample_binary, _} = + defprotocol Sample do + @type t :: any + @doc "Ok" + @spec ok(t) :: boolean + def ok(term) + end + + @sample_binary sample_binary + + {_, _, with_any_binary, _} = + defprotocol WithAny do + @fallback_to_any true + @doc "Ok" + def ok(term) + end + + @with_any_binary with_any_binary defprotocol Derivable do def ok(a) end - defimpl Derivable, for: Map do + defimpl Derivable, for: Any do defmacro __deriving__(module, struct, options) do quote do defimpl Derivable, for: unquote(module) do @@ -64,20 +72,20 @@ defmodule ProtocolTest do end test "protocol implementations without any" do - assert nil? Sample.impl_for(:foo) - assert nil? Sample.impl_for(fn(x) -> x end) - assert nil? Sample.impl_for(1) - assert nil? Sample.impl_for(1.1) - assert nil? Sample.impl_for([]) - assert nil? Sample.impl_for([1, 2, 3]) - assert nil? Sample.impl_for({}) - assert nil? Sample.impl_for({1, 2, 3}) - assert nil? Sample.impl_for("foo") - assert nil? Sample.impl_for(<<1>>) - assert nil? Sample.impl_for(%{}) - assert nil? Sample.impl_for(self) - assert nil? Sample.impl_for(hd(:erlang.ports)) - assert nil? Sample.impl_for(make_ref) + assert is_nil Sample.impl_for(:foo) + assert is_nil Sample.impl_for(fn(x) -> x end) + assert is_nil Sample.impl_for(1) + assert is_nil Sample.impl_for(1.1) + assert is_nil Sample.impl_for([]) + assert is_nil Sample.impl_for([1, 2, 3]) + assert is_nil Sample.impl_for({}) + assert is_nil Sample.impl_for({1, 2, 3}) + assert is_nil Sample.impl_for("foo") + assert is_nil Sample.impl_for(<<1>>) + assert is_nil Sample.impl_for(%{}) + assert is_nil Sample.impl_for(self()) + assert is_nil Sample.impl_for(hd(:erlang.ports)) + assert is_nil Sample.impl_for(make_ref()) assert Sample.impl_for(%ImplStruct{}) == Sample.ProtocolTest.ImplStruct @@ -86,11 +94,11 @@ defmodule ProtocolTest do end test "protocol implementation with any and structs fallback" do - assert WithAny.impl_for(%NoImplStruct{}) == WithAny.Any - assert WithAny.impl_for(%ImplStruct{}) == WithAny.Map # Derived + assert WithAny.impl_for(%NoImplStruct{}) == WithAny.Any + assert WithAny.impl_for(%ImplStruct{}) == WithAny.Any # Derived assert WithAny.impl_for(%{__struct__: "foo"}) == WithAny.Map - assert WithAny.impl_for(%{}) == WithAny.Map - assert WithAny.impl_for(self) == WithAny.Any + assert WithAny.impl_for(%{}) == WithAny.Map + assert WithAny.impl_for(self()) == WithAny.Any end test "protocol not implemented" do @@ -106,11 +114,11 @@ defmodule ProtocolTest do @type t :: any @doc "Ok" @spec ok(t) :: boolean - def ok(thing) + def ok(term) end) docs = Code.get_docs(SampleDocsProto, :docs) - assert {{:ok, 1}, _, :def, [{:thing, _, nil}], "Ok"} = + assert {{:ok, 1}, _, :def, [{:term, _, nil}], "Ok"} = List.keyfind(docs, {:ok, 1}, 0) end @@ -121,34 +129,50 @@ defmodule ProtocolTest do end test "protocol defines callbacks" do - assert get_callbacks(Sample, :ok, 1) == - [{:type, 9, :fun, [{:type, 9, :product, [{:type, 9, :t, []}]}, {:type, 9, :boolean, []}]}] + assert get_callbacks(@sample_binary, :ok, 1) == + [{:type, 12, :fun, [{:type, 12, :product, [{:user_type, 12, :t, []}]}, {:type, 12, :boolean, []}]}] - assert get_callbacks(WithAny, :ok, 1) == - [{:type, 16, :fun, [{:type, 16, :product, [{:type, 16, :t, []}]}, {:type, 16, :term, []}]}] + assert get_callbacks(@with_any_binary, :ok, 1) == + [{:type, 22, :fun, [{:type, 22, :product, [{:user_type, 22, :t, []}]}, {:type, 22, :term, []}]}] end - test "protocol defines attributes" do - assert Sample.__info__(:attributes)[:protocol] == [fallback_to_any: false, consolidated: false] - assert WithAny.__info__(:attributes)[:protocol] == [fallback_to_any: true, consolidated: false] + test "protocol defines functions and attributes" do + assert Sample.__protocol__(:module) == Sample + assert Sample.__protocol__(:functions) == [ok: 1] + refute Sample.__protocol__(:consolidated?) + assert Sample.__info__(:attributes)[:protocol] == [fallback_to_any: false] + + assert WithAny.__protocol__(:module) == WithAny + assert WithAny.__protocol__(:functions) == [ok: 1] + refute WithAny.__protocol__(:consolidated?) + assert WithAny.__info__(:attributes)[:protocol] == [fallback_to_any: true] end test "defimpl" do - defprotocol Attribute do - def test(thing) - end + module = Module.concat(Sample, ImplStruct) + assert module.__impl__(:for) == ImplStruct + assert module.__impl__(:target) == module + assert module.__impl__(:protocol) == Sample + assert module.__info__(:attributes)[:protocol_impl] == + [protocol: Sample, for: ImplStruct] + end - defimpl Attribute, for: ImplStruct do - def test(_) do - {@protocol, @for} - end - end + test "defimpl with implicit derive" do + module = Module.concat(WithAny, ImplStruct) + assert module.__impl__(:for) == ImplStruct + assert module.__impl__(:target) == WithAny.Any + assert module.__impl__(:protocol) == WithAny + assert module.__info__(:attributes)[:protocol_impl] == + [protocol: WithAny, for: ImplStruct] + end - assert Attribute.test(%ImplStruct{}) == {Attribute, ImplStruct} - assert Attribute.ProtocolTest.ImplStruct.__impl__(:protocol) == Attribute - assert Attribute.ProtocolTest.ImplStruct.__impl__(:for) == ImplStruct - assert Attribute.ProtocolTest.ImplStruct.__info__(:attributes)[:impl] == - [protocol: Attribute, for: ImplStruct] + test "defimpl with explicit derive" do + module = Module.concat(Derivable, ImplStruct) + assert module.__impl__(:for) == ImplStruct + assert module.__impl__(:target) == module + assert module.__impl__(:protocol) == Derivable + assert module.__info__(:attributes)[:protocol_impl] == + [protocol: Derivable, for: ImplStruct] end test "defimpl with multiple for" do @@ -164,25 +188,20 @@ defmodule ProtocolTest do assert Multi.test(:a) == :a end - defp get_callbacks(module, name, arity) do - callbacks = for {:callback, info} <- module.__info__(:attributes), do: hd(info) + defp get_callbacks(beam, name, arity) do + callbacks = Kernel.Typespec.beam_callbacks(beam) List.keyfind(callbacks, {name, arity}, 0) |> elem(1) end - test "derives protocol" do + test "derives protocol implicitly" do struct = %ImplStruct{a: 1, b: 1} assert WithAny.ok(struct) == {:ok, struct} - end - test "derived protocol keeps local file/line info" do - assert ProtocolTest.WithAny.ProtocolTest.ImplStruct.__info__(:compile)[:source] == - String.to_char_list(__ENV__.file) + struct = %NoImplStruct{a: 1, b: 1} + assert WithAny.ok(struct) == {:ok, struct} end - test "custom derive implementation" do - struct = %ImplStruct{a: 1, b: 1} - assert Derivable.ok(struct) == {:ok, struct, %ImplStruct{}, []} - + test "derives protocol explicitly" do struct = %ImplStruct{a: 1, b: 1} assert Derivable.ok(struct) == {:ok, struct, %ImplStruct{}, []} @@ -192,7 +211,7 @@ defmodule ProtocolTest do end end - test "custom derive implementation with options" do + test "derives protocol explicitly with options" do defmodule AnotherStruct do @derive [{Derivable, :ok}] @derive [WithAny] @@ -204,7 +223,7 @@ defmodule ProtocolTest do {:ok, struct, struct(AnotherStruct), :ok} end - test "custom derive implementation via API" do + test "derive protocol explicitly via API" do defmodule InlineStruct do defstruct a: 0, b: 0 end @@ -217,9 +236,14 @@ defmodule ProtocolTest do {:ok, struct, struct(InlineStruct), :oops} end - test "cannot derive without a map implementation" do + test "derived implementation keeps local file/line info" do + assert ProtocolTest.WithAny.ProtocolTest.ImplStruct.__info__(:compile)[:source] == + String.to_charlist(__ENV__.file) + end + + test "cannot derive without any implementation" do assert_raise ArgumentError, - ~r"#{inspect Sample.Map} is not available, cannot derive #{inspect Sample}", fn -> + ~r"#{inspect Sample.Any} is not available, cannot derive #{inspect Sample}", fn -> defmodule NotCompiled do @derive [Sample] defstruct hello: :world @@ -243,7 +267,7 @@ defmodule Protocol.ConsolidationTest do @type t :: any @doc "Ok" @spec ok(t) :: boolean - def ok(thing) + def ok(term) end ) @@ -251,7 +275,7 @@ defmodule Protocol.ConsolidationTest do defprotocol WithAny do @fallback_to_any true @doc "Ok" - def ok(thing) + def ok(term) end ) @@ -290,6 +314,8 @@ defmodule Protocol.ConsolidationTest do {:ok, binary} = Protocol.consolidate(Sample, [Any, ImplStruct]) :code.load_binary(Sample, 'protocol_test.exs', binary) + @sample_binary binary + # Any should be moved to the end :code.purge(WithAny) :code.delete(WithAny) @@ -301,21 +327,32 @@ defmodule Protocol.ConsolidationTest do refute Protocol.consolidated?(Enumerable) end + test "consolidation prevents new implementations" do + assert ExUnit.CaptureIO.capture_io(:stderr, fn -> + defimpl WithAny, for: Integer do + def ok(_any), do: :ok + end + end) =~ ~r"the .+WithAny protocol has already been consolidated" + after + :code.purge(WithAny.Atom) + :code.delete(WithAny.Atom) + end + test "consolidated implementations without any" do - assert nil? Sample.impl_for(:foo) - assert nil? Sample.impl_for(fn(x) -> x end) - assert nil? Sample.impl_for(1) - assert nil? Sample.impl_for(1.1) - assert nil? Sample.impl_for([]) - assert nil? Sample.impl_for([1, 2, 3]) - assert nil? Sample.impl_for({}) - assert nil? Sample.impl_for({1, 2, 3}) - assert nil? Sample.impl_for("foo") - assert nil? Sample.impl_for(<<1>>) - assert nil? Sample.impl_for(self) - assert nil? Sample.impl_for(%{}) - assert nil? Sample.impl_for(hd(:erlang.ports)) - assert nil? Sample.impl_for(make_ref) + assert is_nil Sample.impl_for(:foo) + assert is_nil Sample.impl_for(fn(x) -> x end) + assert is_nil Sample.impl_for(1) + assert is_nil Sample.impl_for(1.1) + assert is_nil Sample.impl_for([]) + assert is_nil Sample.impl_for([1, 2, 3]) + assert is_nil Sample.impl_for({}) + assert is_nil Sample.impl_for({1, 2, 3}) + assert is_nil Sample.impl_for("foo") + assert is_nil Sample.impl_for(<<1>>) + assert is_nil Sample.impl_for(self()) + assert is_nil Sample.impl_for(%{}) + assert is_nil Sample.impl_for(hd(:erlang.ports)) + assert is_nil Sample.impl_for(make_ref()) assert Sample.impl_for(%ImplStruct{}) == Sample.Protocol.ConsolidationTest.ImplStruct @@ -324,33 +361,33 @@ defmodule Protocol.ConsolidationTest do end test "consolidated implementations with any and tuple fallback" do - assert WithAny.impl_for(%NoImplStruct{}) == WithAny.Any - assert WithAny.impl_for(%ImplStruct{}) == WithAny.Map # Derived + assert WithAny.impl_for(%NoImplStruct{}) == WithAny.Any + assert WithAny.impl_for(%ImplStruct{}) == WithAny.Any # Derived assert WithAny.impl_for(%{__struct__: "foo"}) == WithAny.Map - assert WithAny.impl_for(%{}) == WithAny.Map - assert WithAny.impl_for(self) == WithAny.Any + assert WithAny.impl_for(%{}) == WithAny.Map + assert WithAny.impl_for(self()) == WithAny.Any end test "consolidation keeps docs" do docs = Code.get_docs(Sample, :docs) - assert {{:ok, 1}, _, :def, [{:thing, _, nil}], "Ok"} = + assert {{:ok, 1}, _, :def, [{:term, _, nil}], "Ok"} = List.keyfind(docs, {:ok, 1}, 0) end test "consolidated keeps callbacks" do - callbacks = for {:callback, info} <- Sample.__info__(:attributes), do: hd(info) + callbacks = Kernel.Typespec.beam_callbacks(@sample_binary) assert callbacks != [] end - test "consolidation errors on missing beams" do + test "consolidation errors on missing BEAM files" do defprotocol NoBeam, do: nil assert Protocol.consolidate(String, []) == {:error, :not_a_protocol} assert Protocol.consolidate(NoBeam, []) == {:error, :no_beam_info} end test "consolidation updates attributes" do - assert Sample.__info__(:attributes)[:protocol] == [fallback_to_any: false, consolidated: true] - assert WithAny.__info__(:attributes)[:protocol] == [fallback_to_any: true, consolidated: true] + assert Sample.__protocol__(:consolidated?) + assert WithAny.__protocol__(:consolidated?) end test "consolidation extracts protocols" do diff --git a/lib/elixir/test/elixir/range_test.exs b/lib/elixir/test/elixir/range_test.exs index cde88cc2fb5..6e35d340d7f 100644 --- a/lib/elixir/test/elixir/range_test.exs +++ b/lib/elixir/test/elixir/range_test.exs @@ -3,22 +3,28 @@ Code.require_file "test_helper.exs", __DIR__ defmodule RangeTest do use ExUnit.Case, async: true - test :precedence do + doctest Range + + test "precedence" do assert Enum.to_list(1..3+2) == [1, 2, 3, 4, 5] assert 1..3 |> Enum.to_list == [1, 2, 3] end - test :op do + test "op" do assert (1..3).first == 1 assert (1..3).last == 3 end - test :range? do + test "range?" do assert Range.range?(1..3) refute Range.range?(0) + assert %Range{first: -10, last: 10} |> Range.range? + refute %Range{first: nil, last: 10} |> Range.range? + refute %Range{first: -10, last: nil} |> Range.range? + refute %Range{} |> Range.range? end - test :enum do + test "enum" do refute Enum.empty?(1..1) assert Enum.member?(1..3, 2) @@ -34,8 +40,26 @@ defmodule RangeTest do assert Enum.map(3..1, &(&1 * 2)) == [6, 4, 2] end - test :inspect do + test "inspect" do assert inspect(1..3) == "1..3" assert inspect(3..1) == "3..1" end + + test "integer only" do + x = 1.0 + y = 3.0 + message = "ranges (first..last) expect both sides to be integers, got: 1.0..3.0" + assert_raise ArgumentError, message, fn -> + Enum.map(x..y, &(&1 * 2)) + end + + first = [] + last = [] + message = "ranges (first..last) expect both sides to be integers, got: []..[]" + assert_raise ArgumentError, message, fn -> + first..last + Enum.map(first..last, &(&1)) + end + + end end diff --git a/lib/elixir/test/elixir/record_test.exs b/lib/elixir/test/elixir/record_test.exs index 4916b01d56b..0cb3762f2b4 100644 --- a/lib/elixir/test/elixir/record_test.exs +++ b/lib/elixir/test/elixir/record_test.exs @@ -3,6 +3,8 @@ Code.require_file "test_helper.exs", __DIR__ defmodule RecordTest do use ExUnit.Case, async: true + doctest Record + require Record test "extract/2 extracts information from an Erlang file" do @@ -28,54 +30,195 @@ defmodule RecordTest do StructExtract.__struct__ end + test "extract_all/1 extracts all records information from an Erlang file" do + all_extract = Record.extract_all(from_lib: "kernel/include/file.hrl") + assert length(all_extract) == 2 # has been stable over the very long time + assert all_extract[:file_info] + assert all_extract[:file_descriptor] + end + # We need indirection to avoid warnings defp record?(data, kind) do - Record.record?(data, kind) + Record.is_record(data, kind) end - test "record?/2" do - assert record?({User, "jose", 27}, User) - refute record?({User, "jose", 27}, Author) + test "is_record/2" do + assert record?({User, "meg", 27}, User) + refute record?({User, "meg", 27}, Author) refute record?(13, Author) + refute record?({"user", "meg", 27}, "user") + refute record?({}, User) + refute record?([], User) end # We need indirection to avoid warnings defp record?(data) do - Record.record?(data) + Record.is_record(data) end - test "record?/1" do - assert record?({User, "jose", 27}) - refute record?({"jose", 27}) + test "is_record/1" do + assert record?({User, "john", 27}) + refute record?({"john", 27}) refute record?(13) + refute record?({}) + end + + def record_in_guard?(term) when Record.is_record(term), + do: true + def record_in_guard?(_), + do: false + + def record_in_guard?(term, kind) when Record.is_record(term, kind), + do: true + def record_in_guard?(_, _), + do: false + + test "is_record/1/2 (in guard)" do + assert record_in_guard?({User, "john", 27}) + refute record_in_guard?({"user", "john", 27}) + + assert record_in_guard?({User, "john", 27}, User) + refute record_in_guard?({"user", "john", 27}, "user") end - Record.defrecord :timestamp, [:date, :time] - Record.defrecord :user, __MODULE__, name: "José", age: 25 - Record.defrecordp :file_info, Record.extract(:file_info, from_lib: "kernel/include/file.hrl") + Record.defrecord :timestamp, [:date, :time] + Record.defrecord :user, __MODULE__, name: "john", age: 25 + + Record.defrecordp :file_info, + Record.extract(:file_info, from_lib: "kernel/include/file.hrl") + Record.defrecordp :certificate, :OTPCertificate, + Record.extract(:OTPCertificate, from_lib: "public_key/include/public_key.hrl") - test "records generates macros that generates tuples" do + test "records are tagged" do + assert elem(file_info(), 0) == :file_info + end + + test "records macros" do record = user() - assert user(record, :name) == "José" + assert user(record, :name) == "john" assert user(record, :age) == 25 - record = user(record, name: "Eric") - assert user(record, :name) == "Eric" + record = user(record, name: "meg") + assert user(record, :name) == "meg" - assert elem(record, user(:name)) == "Eric" + assert elem(record, user(:name)) == "meg" assert elem(record, 0) == RecordTest user(name: name) = record - assert name == "Eric" + assert name == "meg" + + assert user(:name) == 1 end - test "records with no tag" do - assert elem(file_info(), 0) == :file_info + test "records with default values" do + record = user(_: :_, name: "meg") + assert user(record, :name) == "meg" + assert user(record, :age) == :_ + + assert match?(user(_: _), user()) + refute match?(user(_: "other"), user()) + + record = user(user(), _: :_, name: "meg") + assert user(record, :name) == "meg" + assert user(record, :age) == :_ + end + + Record.defrecord :defaults, + struct: ~D[2016-01-01], + map: %{}, + tuple_zero: {}, + tuple_one: {1}, + tuple_two: {1, 2}, + tuple_three: {1, 2, 3}, + list: [1, 2, 3], + call: MapSet.new, + string: "abc", + binary: <<1, 2, 3>>, + charlist: 'abc' + + test "records with literal defaults and on-the-fly record" do + assert defaults(defaults()) == [ + struct: ~D[2016-01-01], + map: %{}, + tuple_zero: {}, + tuple_one: {1}, + tuple_two: {1, 2}, + tuple_three: {1, 2, 3}, + list: [1, 2, 3], + call: MapSet.new, + string: "abc", + binary: <<1, 2, 3>>, + charlist: 'abc' + ] + assert defaults(defaults(), :struct) == ~D[2016-01-01] + assert defaults(defaults(), :map) == %{} + assert defaults(defaults(), :tuple_zero) == {} + assert defaults(defaults(), :tuple_one) == {1} + assert defaults(defaults(), :tuple_two) == {1, 2} + assert defaults(defaults(), :tuple_three) == {1, 2, 3} + assert defaults(defaults(), :list) == [1, 2, 3] + assert defaults(defaults(), :call) == MapSet.new + assert defaults(defaults(), :string) == "abc" + assert defaults(defaults(), :binary) == <<1, 2, 3>> + assert defaults(defaults(), :charlist) == 'abc' + end + + test "records with literal defaults and record in a variable" do + defaults = defaults() + + assert defaults(defaults) == [ + struct: ~D[2016-01-01], + map: %{}, + tuple_zero: {}, + tuple_one: {1}, + tuple_two: {1, 2}, + tuple_three: {1, 2, 3}, + list: [1, 2, 3], + call: MapSet.new, + string: "abc", + binary: <<1, 2, 3>>, + charlist: 'abc' + ] + assert defaults(defaults, :struct) == ~D[2016-01-01] + assert defaults(defaults, :map) == %{} + assert defaults(defaults, :tuple_zero) == {} + assert defaults(defaults, :tuple_one) == {1} + assert defaults(defaults, :tuple_two) == {1, 2} + assert defaults(defaults, :tuple_three) == {1, 2, 3} + assert defaults(defaults, :list) == [1, 2, 3] + assert defaults(defaults, :call) == MapSet.new + assert defaults(defaults, :string) == "abc" + assert defaults(defaults, :binary) == <<1, 2, 3>> + assert defaults(defaults, :charlist) == 'abc' end test "records with dynamic arguments" do record = file_info() assert file_info(record, :size) == :undefined + + record = user() + assert user(record) == [name: "john", age: 25] + assert user(user()) == [name: "john", age: 25] + + msg = "expected argument to be a literal atom, literal keyword or a :file_info record, " <> + "got runtime: {RecordTest, \"john\", 25}" + assert_raise ArgumentError, msg, fn -> + file_info(record) + end + + pretender = {RecordTest, "john"} + msg = "expected argument to be a RecordTest record with 2 fields, " <> + "got: {RecordTest, \"john\"}" + assert_raise ArgumentError, msg, fn -> + user(pretender) + end + + pretender = {RecordTest, "john", 25, []} + msg = "expected argument to be a RecordTest record with 2 fields, " <> + "got: {RecordTest, \"john\", 25, []}" + assert_raise ArgumentError, msg, fn -> + user(pretender) + end end test "records visibility" do diff --git a/lib/elixir/test/elixir/regex_test.exs b/lib/elixir/test/elixir/regex_test.exs index 702c7594f66..df16567e168 100644 --- a/lib/elixir/test/elixir/regex_test.exs +++ b/lib/elixir/test/elixir/regex_test.exs @@ -3,45 +3,22 @@ Code.require_file "test_helper.exs", __DIR__ defmodule RegexTest do use ExUnit.Case, async: true - test :multiline do + doctest Regex + + test "multiline" do refute Regex.match?(~r/^b$/, "a\nb\nc") assert Regex.match?(~r/^b$/m, "a\nb\nc") end - test :precedence do + test "precedence" do assert {"aa", :unknown} |> elem(0) =~ ~r/(a)\1/ end - test :backreference do + test "backreference" do assert "aa" =~ ~r/(a)\1/ end - test :compile! do - assert Regex.regex?(Regex.compile!("foo")) - - assert_raise Regex.CompileError, ~r/position 0$/, fn -> - Regex.compile!("*foo") - end - end - - test :compile do - {:ok, regex} = Regex.compile("foo") - assert Regex.regex?(regex) - assert {:error, _} = Regex.compile("*foo") - assert {:error, _} = Regex.compile("foo", "y") - end - - test :compile_with_erl_opts do - {:ok, regex} = Regex.compile("foo\\sbar", [:dotall, {:newline, :anycrlf}]) - assert "foo\nbar" =~ regex - end - - test :regex? do - assert Regex.regex?(~r/foo/) - refute Regex.regex?(0) - end - - test :source do + test "source" do src = "foo" assert Regex.source(Regex.compile!(src)) == src assert Regex.source(~r/#{src}/) == src @@ -55,7 +32,7 @@ defmodule RegexTest do assert Regex.source(~r/#{src}/) == src end - test :literal_source do + test "literal source" do assert Regex.source(Regex.compile!("foo")) == "foo" assert Regex.source(~r"foo") == "foo" assert Regex.re_pattern(Regex.compile!("foo")) @@ -72,23 +49,71 @@ defmodule RegexTest do == Regex.re_pattern(~r"\a\b\d\e\f\n\r\s\t\v") end - test :opts do - assert Regex.opts(Regex.compile!("foo", "i")) == "i" - end - - test :unicode do - assert "josé" =~ ~r"\p{Latin}$"u + test "Unicode" do + assert "olá" =~ ~r"\p{Latin}$"u refute "£" =~ ~r/\p{Lu}/u + # Non breaking space matches [[:space:]] with Unicode + assert <<0xA0::utf8>> =~ ~r/[[:space:]]/u + assert <<0xA0::utf8>> =~ ~r/\s/u + assert <>> =~ ~r/<.>/ refute <>> =~ ~r/<.>/u end - test :names do + test "ungreedy" do + assert Regex.run(~r/[\d ]+/, "1 2 3 4 5"), ["1 2 3 4 5"] + assert Regex.run(~r/[\d ]?+/, "1 2 3 4 5"), ["1"] + assert Regex.run(~r/[\d ]+/U, "1 2 3 4 5"), ["1"] + end + + test "regex?/1" do + assert Regex.regex?(~r/foo/) + refute Regex.regex?(0) + end + + test "compile/1" do + {:ok, regex} = Regex.compile("foo") + assert Regex.regex?(regex) + assert {:error, _} = Regex.compile("*foo") + assert {:error, _} = Regex.compile("foo", "y") + assert {:error, _} = Regex.compile("foo", "uy") + end + + test "compile/1 with Erlang options" do + {:ok, regex} = Regex.compile("foo\\sbar", [:dotall, {:newline, :anycrlf}]) + assert "foo\nbar" =~ regex + end + + test "compile!/1" do + assert Regex.regex?(Regex.compile!("foo")) + + assert_raise Regex.CompileError, ~r/position 0$/, fn -> + Regex.compile!("*foo") + end + end + + test "recompile/1" do + new_regex = ~r/foo/ + {:ok, regex} = Regex.recompile(new_regex) + assert Regex.regex?(regex) + assert Regex.regex?(Regex.recompile!(new_regex)) + + old_regex = Map.delete(~r/foo/, :re_version) + {:ok, regex} = Regex.recompile(old_regex) + assert Regex.regex?(regex) + assert Regex.regex?(Regex.recompile!(old_regex)) + end + + test "opts/1" do + assert Regex.opts(Regex.compile!("foo", "i")) == "i" + end + + test "names/1" do assert Regex.names(~r/(?foo)/) == ["FOO"] end - test :match? do + test "match?/2" do assert Regex.match?(~r/foo/, "foo") refute Regex.match?(~r/foo/, "FOO") assert Regex.match?(~r/foo/i, "FOO") @@ -101,7 +126,7 @@ defmodule RegexTest do assert Regex.match?(~r/foo$/, "afoo") end - test :named_captures do + test "named_captures/2" do assert Regex.named_captures(~r/(?c)(?d)/, "abcd") == %{"bar" => "d", "foo" => "c"} assert Regex.named_captures(~r/c(?d)/, "abcd") == %{"foo" => "d"} assert Regex.named_captures(~r/c(?d)/, "no_match") == nil @@ -109,62 +134,102 @@ defmodule RegexTest do assert Regex.named_captures(~r/c(.)/, "cat") == %{} end - test :sigil_R do + test "sigil R" do assert Regex.match?(~R/f#{1,3}o/, "f#o") end - test :run do + test "run/2" do assert Regex.run(~r"c(d)", "abcd") == ["cd", "d"] assert Regex.run(~r"e", "abcd") == nil end - test :run_with_all_names do + test "run/3 with :all_names as the value of the :capture option" do assert Regex.run(~r/c(?d)/, "abcd", capture: :all_names) == ["d"] assert Regex.run(~r/c(?d)/, "no_match", capture: :all_names) == nil assert Regex.run(~r/c(?d|e)/, "abcd abce", capture: :all_names) == ["d"] end - test :run_with_indexes do + test "run/3 with :index as the value of the :return option" do assert Regex.run(~r"c(d)", "abcd", return: :index) == [{2, 2}, {3, 1}] assert Regex.run(~r"e", "abcd", return: :index) == nil end - test :scan do + test "scan/2" do assert Regex.scan(~r"c(d|e)", "abcd abce") == [["cd", "d"], ["ce", "e"]] assert Regex.scan(~r"c(?:d|e)", "abcd abce") == [["cd"], ["ce"]] assert Regex.scan(~r"e", "abcd") == [] end - test :scan_with_all_names do + test "scan/2 with :all_names as the value of the :capture option" do assert Regex.scan(~r/cd/, "abcd", capture: :all_names) == [] assert Regex.scan(~r/c(?d)/, "abcd", capture: :all_names) == [["d"]] assert Regex.scan(~r/c(?d)/, "no_match", capture: :all_names) == [] assert Regex.scan(~r/c(?d|e)/, "abcd abce", capture: :all_names) == [["d"], ["e"]] end - test :split do + test "split/2,3" do assert Regex.split(~r",", "") == [""] + assert Regex.split(~r",", "", trim: true) == [] + assert Regex.split(~r",", "", trim: true, parts: 2) == [] + + assert Regex.split(~r"=", "key=") == ["key", ""] + assert Regex.split(~r"=", "=value") == ["", "value"] + assert Regex.split(~r" ", "foo bar baz") == ["foo", "bar", "baz"] - assert Regex.split(~r" ", "foo bar baz", parts: 0) == ["foo", "bar", "baz"] assert Regex.split(~r" ", "foo bar baz", parts: :infinity) == ["foo", "bar", "baz"] assert Regex.split(~r" ", "foo bar baz", parts: 10) == ["foo", "bar", "baz"] assert Regex.split(~r" ", "foo bar baz", parts: 2) == ["foo", "bar baz"] - assert Regex.split(~r"\s", "foobar") == ["foobar"] + assert Regex.split(~r" ", " foo bar baz ") == ["", "foo", "bar", "baz", ""] assert Regex.split(~r" ", " foo bar baz ", trim: true) == ["foo", "bar", "baz"] - assert Regex.split(~r"=", "key=") == ["key", ""] - assert Regex.split(~r"=", "=value") == ["", "value"] + assert Regex.split(~r" ", " foo bar baz ", parts: 2) == ["", "foo bar baz "] + assert Regex.split(~r" ", " foo bar baz ", trim: true, parts: 2) == ["foo", "bar baz "] + end + + test "split/3 with the :on option" do + assert Regex.split(~r/()abc()/, "xabcxabcx", on: :none) == + ["xabcxabcx"] + assert Regex.split(~r/()abc()/, "xabcxabcx", on: :all_but_first) == + ["x", "abc", "x", "abc", "x"] + + assert Regex.split(~r/(?)abc(?)/, "xabcxabcx", on: [:first, :last]) == + ["x", "abc", "x", "abc", "x"] + assert Regex.split(~r/(?)abc(?)/, "xabcxabcx", on: [:last, :first]) == + ["xabc", "xabc", "x"] + + assert Regex.split(~r/a(?b)c/, "abc", on: [:second]) == + ["a", "c"] + assert Regex.split(~r/a(?b)c|a(?d)c/, "abc adc abc", on: [:second]) == + ["a", "c adc a", "c"] + assert Regex.split(~r/a(?b)c|a(?d)c/, "abc adc abc", on: [:second, :fourth]) == + ["a", "c a", "c a", "c"] + end + + test "split/3 with the :include_captures option" do + assert Regex.split(~r/([ln])/, "Erlang", include_captures: true) == ["Er", "l", "a", "n", "g"] + assert Regex.split(~r/([kw])/, "Elixir", include_captures: true) == ["Elixir"] + assert Regex.split(~r/([Ee]lixir)/, "Elixir", include_captures: true, trim: true) == ["Elixir"] + assert Regex.split(~r/([Ee]lixir)/, "Elixir", include_captures: true, trim: false) == ["", "Elixir", ""] end - test :replace do + test "replace/3,4" do assert Regex.replace(~r(d), "abc", "d") == "abc" assert Regex.replace(~r(b), "abc", "d") == "adc" assert Regex.replace(~r(b), "abc", "[\\0]") == "a[b]c" assert Regex.replace(~r[(b)], "abc", "[\\1]") == "a[b]c" + assert Regex.replace(~r[(b)], "abc", "[\\2]") == "a[]c" + assert Regex.replace(~r[(b)], "abc", "[\\3]") == "a[]c" + assert Regex.replace(~r(b), "abc", "[\\g{0}]") == "a[b]c" + assert Regex.replace(~r[(b)], "abc", "[\\g{1}]") == "a[b]c" assert Regex.replace(~r(b), "abcbe", "d") == "adcde" assert Regex.replace(~r(b), "abcbe", "d", global: false) == "adcbe" + assert Regex.replace(~r/ /, "first third", "\\second\\") == + "first\\second\\third" + assert Regex.replace(~r/ /, "first third", "\\\\second\\\\") == + "first\\second\\third" + assert Regex.replace(~r[a(b)c], "abcabc", fn -> "ac" end) == "acac" assert Regex.replace(~r[a(b)c], "abcabc", fn "abc" -> "ac" end) == "acac" assert Regex.replace(~r[a(b)c], "abcabc", fn "abc", "b" -> "ac" end) == "acac" @@ -172,7 +237,7 @@ defmodule RegexTest do assert Regex.replace(~r[a(b)c], "abcabc", fn "abc", "b" -> "ac" end, global: false) == "acabc" end - test :escape do + test "escape" do assert matches_escaped?(".") refute matches_escaped?(".", "x") @@ -190,10 +255,17 @@ defmodule RegexTest do assert matches_escaped?("\\A \\z") assert matches_escaped?(" x ") - assert matches_escaped?("  x    x ") # unicode spaces here + assert matches_escaped?("  x    x ") # Unicode spaces here assert matches_escaped?("# lol") - assert matches_escaped?("\\A.^$*+?()[{\\| \t\n\xff\\z #hello\x{202F}\x{205F}") + assert matches_escaped?("\\A.^$*+?()[{\\| \t\n\x20\\z #hello\u202F\u205F") + assert Regex.match? Regex.compile!("[" <> Regex.escape("!-#") <> "]"), "-" + + assert Regex.escape("{}") == "\\{\\}" + assert Regex.escape("[]") == "\\[\\]" + + assert Regex.escape("{foo}") == "\\{foo\\}" + assert Regex.escape("[foo]") == "\\[foo\\]" end defp matches_escaped?(string) do diff --git a/lib/elixir/test/elixir/registry_test.exs b/lib/elixir/test/elixir/registry_test.exs new file mode 100644 index 00000000000..8a59428520b --- /dev/null +++ b/lib/elixir/test/elixir/registry_test.exs @@ -0,0 +1,405 @@ +Code.require_file "test_helper.exs", __DIR__ + +defmodule RegistryTest do + use ExUnit.Case, async: true + doctest Registry, except: [:moduledoc] + + setup config do + kind = config[:kind] || :unique + partitions = config[:partitions] || 1 + listeners = List.wrap(config[:listener]) + {:ok, _} = Registry.start_link(kind, config.test, partitions: partitions, listeners: listeners) + {:ok, %{registry: config.test, partitions: partitions}} + end + + for {describe, partitions} <- ["with 1 partition": 1, "with 8 partitions": 8] do + describe "unique #{describe}" do + @describetag kind: :unique, partitions: partitions + + test "starts configured amount of partitions", %{registry: registry, partitions: partitions} do + assert length(Supervisor.which_children(registry)) == partitions + end + + test "has unique registrations", %{registry: registry} do + {:ok, pid} = Registry.register(registry, "hello", :value) + assert is_pid(pid) + assert Registry.keys(registry, self()) == ["hello"] + + assert {:error, {:already_registered, pid}} = + Registry.register(registry, "hello", :value) + assert pid == self() + assert Registry.keys(registry, self()) == ["hello"] + + {:ok, pid} = Registry.register(registry, "world", :value) + assert is_pid(pid) + assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "world"] + end + + test "has unique registrations across processes", %{registry: registry} do + {_, task} = register_task(registry, "hello", :value) + + assert {:error, {:already_registered, ^task}} = + Registry.register(registry, "hello", :recent) + assert Registry.keys(registry, self()) == [] + assert Process.info(self(), :links) == {:links, [Process.whereis(registry)]} + end + + test "has unique registrations even if partition is delayed", %{registry: registry} do + {owner, task} = register_task(registry, "hello", :value) + assert Registry.register(registry, "hello", :other) == + {:error, {:already_registered, task}} + + :sys.suspend(owner) + kill_and_assert_down(task) + Registry.register(registry, "hello", :other) + assert Registry.lookup(registry, "hello") == [{self(), :other}] + end + + test "supports match patterns", %{registry: registry} do + value = {1, :atom, 1} + {:ok, _} = Registry.register(registry, "hello", value) + assert Registry.match(registry, "hello", {1, :_, :_}) == + [{self(), value}] + assert Registry.match(registry, "hello", {1.0, :_, :_}) == + [] + assert Registry.match(registry, "hello", {:_, :atom, :_}) == + [{self(), value}] + assert Registry.match(registry, "hello", {:"$1", :_, :"$1"}) == + [{self(), value}] + end + + test "supports guard conditions", %{registry: registry} do + value = {1, :atom, 2} + {:ok, _} = Registry.register(registry, "hello", value) + assert Registry.match(registry, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 1}]) == + [{self(), value}] + assert Registry.match(registry, "hello", {:_, :_, :"$1"}, [{:>, :"$1", 2}]) == + [] + assert Registry.match(registry, "hello", {:_, :"$1", :_,}, [{:is_atom, :"$1"}]) == + [{self(), value}] + end + + test "compares using ===", %{registry: registry} do + {:ok, _} = Registry.register(registry, 1.0, :value) + {:ok, _} = Registry.register(registry, 1, :value) + assert Registry.keys(registry, self()) |> Enum.sort() == [1, 1.0] + end + + test "updates current process value", %{registry: registry} do + assert Registry.update_value(registry, "hello", &raise/1) == :error + register_task(registry, "hello", :value) + assert Registry.update_value(registry, "hello", &raise/1) == :error + + Registry.register(registry, "world", 1) + assert Registry.lookup(registry, "world") == [{self(), 1}] + assert Registry.update_value(registry, "world", & &1 + 1) == {2, 1} + assert Registry.lookup(registry, "world") == [{self(), 2}] + end + + test "dispatches to a single key", %{registry: registry} do + assert Registry.dispatch(registry, "hello", fn _ -> + raise "will never be invoked" + end) == :ok + + {:ok, _} = Registry.register(registry, "hello", :value) + + assert Registry.dispatch(registry, "hello", fn [{pid, value}] -> + send(pid, {:dispatch, value}) + end) + + assert_received {:dispatch, :value} + end + + test "allows process unregistering", %{registry: registry} do + :ok = Registry.unregister(registry, "hello") + + {:ok, _} = Registry.register(registry, "hello", :value) + {:ok, _} = Registry.register(registry, "world", :value) + assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "world"] + + :ok = Registry.unregister(registry, "hello") + assert Registry.keys(registry, self()) == ["world"] + + :ok = Registry.unregister(registry, "world") + assert Registry.keys(registry, self()) == [] + end + + test "allows unregistering with no entries", %{registry: registry} do + assert Registry.unregister(registry, "hello") == :ok + end + + @tag listener: :"unique_listener_#{partitions}" + test "allows listeners", %{registry: registry, listener: listener} do + Process.register(self(), listener) + {_, task} = register_task(registry, "hello", :world) + assert_received {:register, ^registry, "hello", ^task, :world} + + self = self() + {:ok, _} = Registry.register(registry, "world", :value) + assert_received {:register, ^registry, "world", ^self, :value} + + :ok = Registry.unregister(registry, "world") + assert_received {:unregister, ^registry, "world", ^self} + end + + test "links and unlinks on register/unregister", %{registry: registry} do + {:ok, pid} = Registry.register(registry, "hello", :value) + {:links, links} = Process.info(self(), :links) + assert pid in links + + {:ok, pid} = Registry.register(registry, "world", :value) + {:links, links} = Process.info(self(), :links) + assert pid in links + + :ok = Registry.unregister(registry, "hello") + {:links, links} = Process.info(self(), :links) + assert pid in links + + :ok = Registry.unregister(registry, "world") + {:links, links} = Process.info(self(), :links) + refute pid in links + end + + test "raises on unknown registry name" do + assert_raise ArgumentError, ~r/unknown registry/, fn -> + Registry.register(:unknown, "hello", :value) + end + end + + test "via callbacks", %{registry: registry} do + name = {:via, Registry, {registry, "hello"}} + + # register_name + {:ok, pid} = Agent.start_link(fn -> 0 end, name: name) + + # send + assert Agent.update(name, & &1 + 1) == :ok + + # whereis_name + assert Agent.get(name, & &1) == 1 + + # unregister_name + assert {:error, _} = + Agent.start(fn -> raise "oops" end) + + # errors + assert {:error, {:already_started, ^pid}} = + Agent.start(fn -> 0 end, name: name) + end + end + end + + for {describe, partitions} <- ["with 1 partition": 1, "with 8 partitions": 8] do + describe "duplicate #{describe}" do + @describetag kind: :duplicate, partitions: partitions + + test "starts configured amount of partitions", %{registry: registry, partitions: partitions} do + assert length(Supervisor.which_children(registry)) == partitions + end + + test "has duplicate registrations", %{registry: registry} do + {:ok, pid} = Registry.register(registry, "hello", :value) + assert is_pid(pid) + assert Registry.keys(registry, self()) == ["hello"] + + assert {:ok, pid} = Registry.register(registry, "hello", :value) + assert is_pid(pid) + assert Registry.keys(registry, self()) == ["hello", "hello"] + + {:ok, pid} = Registry.register(registry, "world", :value) + assert is_pid(pid) + assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "hello", "world"] + end + + test "compares using matches", %{registry: registry} do + {:ok, _} = Registry.register(registry, 1.0, :value) + {:ok, _} = Registry.register(registry, 1, :value) + assert Registry.keys(registry, self()) |> Enum.sort() == [1, 1.0] + end + + test "dispatches to multiple keys", %{registry: registry} do + assert Registry.dispatch(registry, "hello", fn _ -> + raise "will never be invoked" + end) == :ok + + {:ok, _} = Registry.register(registry, "hello", :value1) + {:ok, _} = Registry.register(registry, "hello", :value2) + {:ok, _} = Registry.register(registry, "world", :value3) + + assert Registry.dispatch(registry, "hello", fn entries -> + for {pid, value} <- entries, do: send(pid, {:dispatch, value}) + end) + + assert_received {:dispatch, :value1} + assert_received {:dispatch, :value2} + refute_received {:dispatch, :value3} + + assert Registry.dispatch(registry, "world", fn entries -> + for {pid, value} <- entries, do: send(pid, {:dispatch, value}) + end) + + refute_received {:dispatch, :value1} + refute_received {:dispatch, :value2} + assert_received {:dispatch, :value3} + end + + test "allows process unregistering", %{registry: registry} do + {:ok, _} = Registry.register(registry, "hello", :value) + {:ok, _} = Registry.register(registry, "hello", :value) + {:ok, _} = Registry.register(registry, "world", :value) + assert Registry.keys(registry, self()) |> Enum.sort() == ["hello", "hello", "world"] + + :ok = Registry.unregister(registry, "hello") + assert Registry.keys(registry, self()) == ["world"] + + :ok = Registry.unregister(registry, "world") + assert Registry.keys(registry, self()) == [] + end + + test "allows unregistering with no entries", %{registry: registry} do + assert Registry.unregister(registry, "hello") == :ok + end + + test "supports match patterns", %{registry: registry} do + value1 = {1, :atom, 1} + {:ok, _} = Registry.register(registry, "hello", value1) + value2 = {2, :atom, 2} + {:ok, _} = Registry.register(registry, "hello", value2) + + assert Registry.match(registry, "hello", {1, :_, :_}) == + [{self(), value1}] + assert Registry.match(registry, "hello", {1.0, :_, :_}) == + [] + assert Registry.match(registry, "hello", {:_, :atom, :_}) |> Enum.sort() == + [{self(), value1}, {self(), value2}] + assert Registry.match(registry, "hello", {:"$1", :_, :"$1"}) |> Enum.sort() == + [{self(), value1}, {self(), value2}] + assert Registry.match(registry, "hello", {2, :_, :_}) == + [{self(), value2}] + assert Registry.match(registry, "hello", {2.0, :_, :_}) == + [] + end + + test "supports guards", %{registry: registry} do + value1 = {1, :atom, 1} + {:ok, _} = Registry.register(registry, "hello", value1) + value2 = {2, :atom, 2} + {:ok, _} = Registry.register(registry, "hello", value2) + + assert Registry.match(registry, "hello", {:"$1", :_, :_}, [{:<, :"$1", 2}]) == + [{self(), value1}] + assert Registry.match(registry, "hello", {:"$1", :_, :_}, [{:>, :"$1", 3}]) == + [] + assert Registry.match(registry, "hello", {:"$1", :_, :_}, [{:<, :"$1", 3}]) |> Enum.sort() == + [{self(), value1}, {self(), value2}] + assert Registry.match(registry, "hello", {:_, :"$1", :_}, [{:is_atom, :"$1"}]) |> Enum.sort() == + [{self(), value1}, {self(), value2}] + end + + @tag listener: :"duplicate_listener_#{partitions}" + test "allows listeners", %{registry: registry, listener: listener} do + Process.register(self(), listener) + {_, task} = register_task(registry, "hello", :world) + assert_received {:register, ^registry, "hello", ^task, :world} + + self = self() + {:ok, _} = Registry.register(registry, "hello", :value) + assert_received {:register, ^registry, "hello", ^self, :value} + + :ok = Registry.unregister(registry, "hello") + assert_received {:unregister, ^registry, "hello", ^self} + end + + test "links and unlinks on register/unregister", %{registry: registry} do + {:ok, pid} = Registry.register(registry, "hello", :value) + {:links, links} = Process.info(self(), :links) + assert pid in links + + {:ok, pid} = Registry.register(registry, "world", :value) + {:links, links} = Process.info(self(), :links) + assert pid in links + + :ok = Registry.unregister(registry, "hello") + {:links, links} = Process.info(self(), :links) + assert pid in links + + :ok = Registry.unregister(registry, "world") + {:links, links} = Process.info(self(), :links) + refute pid in links + end + + test "raises on unknown registry name" do + assert_raise ArgumentError, ~r/unknown registry/, fn -> + Registry.register(:unknown, "hello", :value) + end + end + + test "raises if attempt to be used on via", %{registry: registry} do + assert_raise ArgumentError, ":via is not supported for duplicate registries", fn -> + name = {:via, Registry, {registry, "hello"}} + Agent.start_link(fn -> 0 end, name: name) + end + end + end + end + + # Note: those tests relies on internals + for kind <- [:unique, :duplicate] do + describe "clean up #{kind} registry on process crash" do + @describetag kind: kind + + @tag partitions: 8 + test "with 8 partitions", %{registry: registry} do + {_, task1} = register_task(registry, "hello", :value) + {_, task2} = register_task(registry, "world", :value) + + kill_and_assert_down(task1) + kill_and_assert_down(task2) + + # pid might be in different parition to key so need to sync with all + # paritions before checking ets tables are empty. + for i <- 0..7 do + [{_, _, {partition, _}}] = :ets.lookup(registry, i) + GenServer.call(partition, :sync) + end + + for i <- 0..7 do + [{_, key, {_, pid}}] = :ets.lookup(registry, i) + assert :ets.tab2list(key) == [] + assert :ets.tab2list(pid) == [] + end + end + + @tag partitions: 1 + test "with 1 partition", %{registry: registry} do + {_, task1} = register_task(registry, "hello", :value) + {_, task2} = register_task(registry, "world", :value) + + kill_and_assert_down(task1) + kill_and_assert_down(task2) + + [{-1, {_, _, key, {partition, pid}, _}}] = :ets.lookup(registry, -1) + GenServer.call(partition, :sync) + assert :ets.tab2list(key) == [] + assert :ets.tab2list(pid) == [] + end + end + end + + defp register_task(registry, key, value) do + parent = self() + {:ok, task} = + Task.start(fn -> + send(parent, Registry.register(registry, key, value)) + Process.sleep(:infinity) + end) + assert_receive {:ok, owner} + {owner, task} + end + + defp kill_and_assert_down(pid) do + ref = Process.monitor(pid) + Process.exit(pid, :kill) + assert_receive {:DOWN, ^ref, _, _, _} + end +end diff --git a/lib/elixir/test/elixir/set_test.exs b/lib/elixir/test/elixir/set_test.exs deleted file mode 100644 index 52545b8a6fd..00000000000 --- a/lib/elixir/test/elixir/set_test.exs +++ /dev/null @@ -1,189 +0,0 @@ -Code.require_file "test_helper.exs", __DIR__ - -# A TestSet implementation used only for testing. -defmodule TestSet do - defstruct list: [] - def new(list \\ []) when is_list(list) do - %TestSet{list: list} - end - - def reduce(%TestSet{list: list}, acc, fun) do - Enumerable.reduce(list, acc, fun) - end - - def member?(%TestSet{list: list}, v) do - v in list - end - - def size(%TestSet{list: list}) do - length(list) - end -end - -defmodule SetTest.Common do - defmacro __using__(_) do - quote location: :keep do - defp new_set(list \\ []) do - Enum.into list, set_impl.new - end - - defp new_set(list, fun) do - Enum.into list, set_impl.new, fun - end - - defp int_set() do - Enum.into [1, 2, 3], set_impl.new - end - - test "delete/2" do - result = Set.delete(new_set([1, 2, 3]), 2) - assert Set.equal?(result, new_set([1, 3])) - end - - test "delete/2 with match" do - refute Set.member?(Set.delete(int_set, 1), 1) - assert Set.member?(Set.delete(int_set, 1.0), 1) - end - - test "difference/2" do - result = Set.difference(new_set([1, 2, 3]), new_set([3])) - assert Set.equal?(result, new_set([1, 2])) - end - - test "difference/2 with match" do - refute Set.member?(Set.difference(int_set, new_set([1])), 1) - assert Set.member?(Set.difference(int_set, new_set([1.0])), 1) - end - - test "difference/2 with other set" do - result = Set.difference(new_set([1, 2, 3]), TestSet.new([3])) - assert Set.equal?(result, new_set([1, 2])) - end - - test "disjoint?/2" do - assert Set.disjoint?(new_set([1, 2, 3]), new_set([4, 5 ,6])) - refute Set.disjoint?(new_set([1, 2, 3]), new_set([3, 4 ,5])) - end - - test "disjoint/2 with other set" do - assert Set.disjoint?(new_set([1, 2, 3]), TestSet.new([4, 5 ,6])) - refute Set.disjoint?(new_set([1, 2, 3]), TestSet.new([3, 4 ,5])) - end - - test "equal?/2" do - assert Set.equal?(new_set([1, 2, 3]), new_set([3, 2, 1])) - refute Set.equal?(new_set([1, 2, 3]), new_set([3.0, 2.0, 1.0])) - end - - test "equal?/2 with other set" do - assert Set.equal?(new_set([1, 2, 3]), TestSet.new([3, 2, 1])) - refute Set.equal?(new_set([1, 2, 3]), TestSet.new([3.0, 2.0, 1.0])) - end - - test "intersection/2" do - result = Set.intersection(new_set([1, 2, 3]), new_set([2, 3, 4])) - assert Set.equal?(result, new_set([2, 3])) - end - - test "intersection/2 with match" do - assert Set.member?(Set.intersection(int_set, new_set([1])), 1) - refute Set.member?(Set.intersection(int_set, new_set([1.0])), 1) - end - - test "intersection/2 with other set" do - result = Set.intersection(new_set([1, 2, 3]), TestSet.new([2, 3, 4])) - assert Set.equal?(result, new_set([2, 3])) - end - - test "member?/2" do - assert Set.member?(new_set([1, 2, 3]), 2) - refute Set.member?(new_set([1, 2, 3]), 4) - refute Set.member?(new_set([1, 2, 3]), 1.0) - end - - test "put/2" do - result = Set.put(new_set([1, 2]), 3) - assert Set.equal?(result, new_set([1, 2, 3])) - end - - test "put/2 with match" do - assert Set.size(Set.put(int_set, 1)) == 3 - assert Set.size(Set.put(int_set, 1.0)) == 4 - end - - test "size/1" do - assert Set.size(new_set([1, 2, 3])) == 3 - end - - test "subset?/2" do - assert Set.subset?(new_set([1, 2]), new_set([1, 2, 3])) - refute Set.subset?(new_set([1, 2, 3]), new_set([1, 2])) - end - - test "subset/2 with match?" do - assert Set.subset?(new_set([1]), int_set) - refute Set.subset?(new_set([1.0]), int_set) - end - - test "subset?/2 with other set" do - assert Set.subset?(new_set([1, 2]), TestSet.new([1, 2, 3])) - refute Set.subset?(new_set([1, 2, 3]), TestSet.new([1, 2])) - end - - test "to_list/1" do - assert Set.to_list(new_set([1, 2, 3])) |> Enum.sort == [1, 2, 3] - end - - test "union/2" do - result = Set.union(new_set([1, 2, 3]), new_set([2, 3, 4])) - assert Set.equal?(result, new_set([1, 2, 3, 4])) - end - - test "union/2 with match" do - assert Set.size(Set.union(int_set, new_set([1]))) == 3 - assert Set.size(Set.union(int_set, new_set([1.0]))) == 4 - end - - test "union/2 with other set" do - result = Set.union(new_set([1, 2, 3]), TestSet.new([2, 3, 4])) - assert Set.equal?(result, new_set([1, 2, 3, 4])) - end - - test "is enumerable" do - assert Enum.member?(int_set, 1) - refute Enum.member?(int_set, 1.0) - assert Enum.sort(int_set) == [1,2,3] - end - - test "is collectable" do - assert Set.equal?(new_set([1, 1, 2, 3, 3, 3]), new_set([1, 2, 3])) - assert Set.equal?(new_set([1, 1, 2, 3, 3, 3], &(&1 * 2)), new_set([2, 4, 6])) - assert Collectable.empty(new_set([1, 2, 3])) == new_set - end - - test "is zippable" do - set = new_set(1..8) - list = Dict.to_list(set) - assert Enum.zip(list, list) == Enum.zip(set, set) - - set = new_set(1..100) - list = Dict.to_list(set) - assert Enum.zip(list, list) == Enum.zip(set, set) - end - - test "unsupported set" do - assert_raise ArgumentError, "unsupported set: :bad_set", fn -> - Set.to_list :bad_set - end - end - end - end -end - -defmodule Set.HashSetTest do - use ExUnit.Case, async: true - use SetTest.Common - - doctest Set - def set_impl, do: HashSet -end diff --git a/lib/elixir/test/elixir/stream_test.exs b/lib/elixir/test/elixir/stream_test.exs index bc1e3ad64b5..69d7f9efc2e 100644 --- a/lib/elixir/test/elixir/stream_test.exs +++ b/lib/elixir/test/elixir/stream_test.exs @@ -3,11 +3,28 @@ Code.require_file "test_helper.exs", __DIR__ defmodule StreamTest do use ExUnit.Case, async: true + doctest Stream + + defmodule Pdict do + defstruct [] + + defimpl Collectable do + def into(struct) do + {struct, + fn + _, {:cont, x} -> Process.put(:stream_cont, [x | Process.get(:stream_cont)]) + _, :done -> Process.put(:stream_done, true) + _, :halt -> Process.put(:stream_halt, true) + end} + end + end + end + test "streams as enumerables" do - stream = Stream.map([1,2,3], &(&1 * 2)) + stream = Stream.map([1, 2, 3], &(&1 * 2)) # Reduce - assert Enum.map(stream, &(&1 + 1)) == [3,5,7] + assert Enum.map(stream, &(&1 + 1)) == [3, 5, 7] # Member assert Enum.member?(stream, 4) refute Enum.member?(stream, 1) @@ -16,13 +33,13 @@ defmodule StreamTest do end test "streams are composable" do - stream = Stream.map([1,2,3], &(&1 * 2)) - assert is_lazy(stream) + stream = Stream.map([1, 2, 3], &(&1 * 2)) + assert lazy?(stream) stream = Stream.map(stream, &(&1 + 1)) - assert is_lazy(stream) + assert lazy?(stream) - assert Enum.to_list(stream) == [3,5,7] + assert Enum.to_list(stream) == [3, 5, 7] end test "chunk/2, chunk/3 and chunk/4" do @@ -48,14 +65,29 @@ defmodule StreamTest do assert Enum.zip(list, list) == Enum.zip(stream, stream) end + test "chunk/4 is haltable" do + assert 1..10 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Enum.to_list == + [[1, 2, 3, 4], [5, 6, 7, 8]] + assert 1..10 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Stream.take(3) |> Enum.to_list == + [[1, 2, 3, 4], [5, 6, 7, 8]] + assert 1..10 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Stream.take(2) |> Enum.to_list == + [[1, 2, 3, 4], [5, 6, 7, 8]] + assert 1..10 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Stream.take(1) |> Enum.to_list == + [[1, 2, 3, 4]] + assert 1..6 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Enum.to_list == + [[1, 2, 3, 4], [5, 6, 7, 8]] + end + test "chunk_by/2" do stream = Stream.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1)) - assert is_lazy(stream) + assert lazy?(stream) assert Enum.to_list(stream) == [[1], [2, 2], [3], [4, 4, 6], [7, 7]] assert stream |> Stream.take(3) |> Enum.to_list == [[1], [2, 2], [3]] + assert 1..10 |> Stream.chunk(2) |> Enum.take(2) == + [[1, 2], [3, 4]] end test "chunk_by/2 is zippable" do @@ -64,35 +96,60 @@ defmodule StreamTest do assert Enum.zip(list, list) == Enum.zip(stream, stream) end + test "chunk_by/4" do + chunk_fun = fn i, acc -> + if rem(i, 2) == 0 do + {:cont, Enum.reverse([i | acc]), []} + else + {:cont, [i | acc]} + end + end + + after_fun = fn + [] -> {:cont, []} + acc -> {:cont, Enum.reverse(acc), []} + end + + stream = Stream.chunk_by(1..10, [], chunk_fun, after_fun) + assert lazy?(stream) + + assert 1..10 |> Stream.chunk_by([], chunk_fun, after_fun) |> Enum.to_list() == + [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]] + assert 0..10 |> Stream.chunk_by([], chunk_fun, after_fun) |> Enum.to_list() == + [[0], [1, 2], [3, 4], [5, 6], [7, 8], [9, 10]] + assert 0..11 |> Stream.chunk_by([], chunk_fun, after_fun) |> Enum.to_list() == + [[0], [1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11]] + end + test "concat/1" do stream = Stream.concat([1..3, [], [4, 5, 6], [], 7..9]) assert is_function(stream) - assert Enum.to_list(stream) == [1,2,3,4,5,6,7,8,9] - assert Enum.take(stream, 5) == [1,2,3,4,5] + assert Enum.to_list(stream) == [1, 2, 3, 4, 5, 6, 7, 8, 9] + assert Enum.take(stream, 5) == [1, 2, 3, 4, 5] stream = Stream.concat([1..3, [4, 5, 6], Stream.cycle(7..100)]) assert is_function(stream) - assert Enum.take(stream, 13) == [1,2,3,4,5,6,7,8,9,10,11,12,13] + assert Enum.take(stream, 13) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] end test "concat/2" do stream = Stream.concat(1..3, 4..6) assert is_function(stream) - assert Stream.cycle(stream) |> Enum.take(16) == [1,2,3,4,5,6,1,2,3,4,5,6,1,2,3,4] + assert Stream.cycle(stream) |> Enum.take(16) == [1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4] stream = Stream.concat(1..3, []) assert is_function(stream) - assert Stream.cycle(stream) |> Enum.take(5) == [1,2,3,1,2] + assert Stream.cycle(stream) |> Enum.take(5) == [1, 2, 3, 1, 2] stream = Stream.concat(1..6, Stream.cycle(7..9)) assert is_function(stream) - assert Stream.drop(stream, 3) |> Enum.take(13) == [4,5,6,7,8,9,7,8,9,7,8,9,7] + assert Stream.drop(stream, 3) |> Enum.take(13) == [4, 5, 6, 7, 8, 9, 7, 8, 9, 7, 8, 9, 7] stream = Stream.concat(Stream.cycle(1..3), Stream.cycle(4..6)) assert is_function(stream) - assert Enum.take(stream, 13) == [1,2,3,1,2,3,1,2,3,1,2,3,1] + assert Enum.take(stream, 13) == [1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1] end test "concat/2 does not intercept wrapped lazy enumeration" do @@ -108,39 +165,59 @@ defmodule StreamTest do end test "cycle/1" do - stream = Stream.cycle([1,2,3]) + stream = Stream.cycle([1, 2, 3]) assert is_function(stream) - assert Stream.cycle([1,2,3]) |> Stream.take(5) |> Enum.to_list == [1,2,3,1,2] - assert Enum.take(stream, 5) == [1,2,3,1,2] + assert Stream.cycle([1, 2, 3]) |> Stream.take(5) |> Enum.to_list == [1, 2, 3, 1, 2] + assert Enum.take(stream, 5) == [1, 2, 3, 1, 2] end test "cycle/1 is zippable" do - stream = Stream.cycle([1,2,3]) - assert Enum.zip(1..6, [1,2,3,1,2,3]) == Enum.zip(1..6, stream) + stream = Stream.cycle([1, 2, 3]) + assert Enum.zip(1..6, [1, 2, 3, 1, 2, 3]) == Enum.zip(1..6, stream) end test "cycle/1 with inner stream" do - assert [1,2,3] |> Stream.take(2) |> Stream.cycle |> Enum.take(4) == - [1,2,1,2] + assert [1, 2, 3] |> Stream.take(2) |> Stream.cycle |> Enum.take(4) == + [1, 2, 1, 2] + end + + test "dedup/1 is lazy" do + assert lazy? Stream.dedup([1, 2, 3]) + end + + test "dedup/1" do + assert Stream.dedup([1, 1, 2, 1, 1, 2, 1]) |> Enum.to_list == [1, 2, 1, 2, 1] + assert Stream.dedup([2, 1, 1, 2, 1]) |> Enum.to_list == [2, 1, 2, 1] + assert Stream.dedup([1, 2, 3, 4]) |> Enum.to_list == [1, 2, 3, 4] + assert Stream.dedup([1, 1.0, 2.0, 2]) |> Enum.to_list == [1, 1.0, 2.0, 2] + assert Stream.dedup([]) |> Enum.to_list == [] + assert Stream.dedup([nil, nil, true, {:value, true}]) |> Enum.to_list + == [nil, true, {:value, true}] + assert Stream.dedup([nil]) |> Enum.to_list == [nil] + end + + test "dedup_by/2" do + assert Stream.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end) |> Enum.to_list + == [{1, :x}, {2, :y}, {1, :x}] end test "drop/2" do stream = Stream.drop(1..10, 5) - assert is_lazy(stream) - assert Enum.to_list(stream) == [6,7,8,9,10] + assert lazy?(stream) + assert Enum.to_list(stream) == [6, 7, 8, 9, 10] - assert Enum.to_list(Stream.drop(1..5, 0)) == [1,2,3,4,5] + assert Enum.to_list(Stream.drop(1..5, 0)) == [1, 2, 3, 4, 5] assert Enum.to_list(Stream.drop(1..3, 5)) == [] nats = Stream.iterate(1, &(&1 + 1)) - assert Stream.drop(nats, 2) |> Enum.take(5) == [3,4,5,6,7] + assert Stream.drop(nats, 2) |> Enum.take(5) == [3, 4, 5, 6, 7] end test "drop/2 with negative count" do stream = Stream.drop(1..10, -5) - assert is_lazy(stream) - assert Enum.to_list(stream) == [1,2,3,4,5] + assert lazy?(stream) + assert Enum.to_list(stream) == [1, 2, 3, 4, 5] stream = Stream.drop(1..10, -5) list = Enum.to_list(stream) @@ -148,7 +225,7 @@ defmodule StreamTest do end test "drop/2 with negative count stream entries" do - par = self + par = self() pid = spawn_link fn -> Enum.each Stream.drop(&inbox_stream/2, -3), fn x -> send par, {:stream, x} end @@ -167,52 +244,76 @@ defmodule StreamTest do refute_receive {:stream, 3} end + test "drop_every/2" do + assert 1..10 + |> Stream.drop_every(2) + |> Enum.to_list == [2, 4, 6, 8, 10] + + assert 1..10 + |> Stream.drop_every(3) + |> Enum.to_list == [2, 3, 5, 6, 8, 9] + + assert 1..10 + |> Stream.drop(2) + |> Stream.drop_every(2) + |> Stream.drop(1) + |> Enum.to_list == [6, 8, 10] + + assert 1..10 + |> Stream.drop_every(0) + |> Enum.to_list == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + assert [] + |> Stream.drop_every(10) + |> Enum.to_list == [] + end + + test "drop_every/2 without non-negative integer" do + assert_raise FunctionClauseError, fn -> + Stream.drop_every(1..10, -1) + end + + assert_raise FunctionClauseError, fn -> + Stream.drop_every(1..10, 3.33) + end + end + test "drop_while/2" do stream = Stream.drop_while(1..10, &(&1 <= 5)) - assert is_lazy(stream) - assert Enum.to_list(stream) == [6,7,8,9,10] + assert lazy?(stream) + assert Enum.to_list(stream) == [6, 7, 8, 9, 10] - assert Enum.to_list(Stream.drop_while(1..5, &(&1 <= 0))) == [1,2,3,4,5] + assert Enum.to_list(Stream.drop_while(1..5, &(&1 <= 0))) == [1, 2, 3, 4, 5] assert Enum.to_list(Stream.drop_while(1..3, &(&1 <= 5))) == [] nats = Stream.iterate(1, &(&1 + 1)) - assert Stream.drop_while(nats, &(&1 <= 5)) |> Enum.take(5) == [6,7,8,9,10] + assert Stream.drop_while(nats, &(&1 <= 5)) |> Enum.take(5) == [6, 7, 8, 9, 10] end test "each/2" do Process.put(:stream_each, []) - stream = Stream.each([1,2,3], fn x -> - Process.put(:stream_each, [x|Process.get(:stream_each)]) + stream = Stream.each([1, 2, 3], fn x -> + Process.put(:stream_each, [x | Process.get(:stream_each)]) end) - assert is_lazy(stream) - assert Enum.to_list(stream) == [1,2,3] - assert Process.get(:stream_each) == [3,2,1] + assert lazy?(stream) + assert Enum.to_list(stream) == [1, 2, 3] + assert Process.get(:stream_each) == [3, 2, 1] end test "filter/2" do - stream = Stream.filter([1,2,3], fn(x) -> rem(x, 2) == 0 end) - assert is_lazy(stream) + stream = Stream.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) + assert lazy?(stream) assert Enum.to_list(stream) == [2] nats = Stream.iterate(1, &(&1 + 1)) - assert Stream.filter(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [2,4,6,8,10] - end - - test "filter_map/3" do - stream = Stream.filter_map([1,2,3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) - assert is_lazy(stream) - assert Enum.to_list(stream) == [4] - - nats = Stream.iterate(1, &(&1 + 1)) - assert Stream.filter_map(nats, &(rem(&1, 2) == 0), &(&1 * 2)) - |> Enum.take(5) == [4,8,12,16,20] + assert Stream.filter(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [2, 4, 6, 8, 10] end test "flat_map/2" do stream = Stream.flat_map([1, 2, 3], &[&1, &1 * 2]) - assert is_lazy(stream) + assert lazy?(stream) assert Enum.to_list(stream) == [1, 2, 2, 4, 3, 6] nats = Stream.iterate(1, &(&1 + 1)) @@ -248,32 +349,32 @@ defmodule StreamTest do end test "flat_map/2 does not leave inner stream suspended" do - stream = Stream.flat_map [1,2,3], + stream = Stream.flat_map [1, 2, 3], fn i -> Stream.resource(fn -> i end, - fn acc -> {acc, acc + 1} end, + fn acc -> {[acc], acc + 1} end, fn _ -> Process.put(:stream_flat_map, true) end) end Process.put(:stream_flat_map, false) - assert stream |> Enum.take(3) == [1,2,3] + assert stream |> Enum.take(3) == [1, 2, 3] assert Process.get(:stream_flat_map) end test "flat_map/2 does not leave outer stream suspended" do stream = Stream.resource(fn -> 1 end, - fn acc -> {acc, acc + 1} end, + fn acc -> {[acc], acc + 1} end, fn _ -> Process.put(:stream_flat_map, true) end) stream = Stream.flat_map(stream, fn i -> [i, i + 1, i + 2] end) Process.put(:stream_flat_map, false) - assert stream |> Enum.take(3) == [1,2,3] + assert stream |> Enum.take(3) == [1, 2, 3] assert Process.get(:stream_flat_map) end test "flat_map/2 closes on error" do stream = Stream.resource(fn -> 1 end, - fn acc -> {acc, acc + 1} end, + fn acc -> {[acc], acc + 1} end, fn _ -> Process.put(:stream_flat_map, true) end) stream = Stream.flat_map(stream, fn _ -> throw(:error) end) @@ -282,20 +383,49 @@ defmodule StreamTest do assert Process.get(:stream_flat_map) end + test "flat_map/2 with inner flat_map/2" do + stream = Stream.flat_map(1..5, fn x -> + Stream.flat_map([x], fn x -> + x..x * x + end) |> Stream.map(& &1 * 1) + end) + + assert Enum.take(stream, 5) == [1, 2, 3, 4, 3] + end + + test "flat_map/2 properly halts both inner and outer stream when inner stream is halted" do + # Fixes a bug that, when the inner stream was done, + # sending it a halt would cause it to return the + # inner stream was halted, forcing flat_map to get + # the next value from the outer stream, evaluate it, + # get annother inner stream, just to halt it. + assert [1, 2] # 2 should never be used + |> Stream.flat_map(fn 1 -> Stream.repeatedly(fn -> 1 end) end) + |> Stream.flat_map(fn 1 -> Stream.repeatedly(fn -> 1 end) end) + |> Enum.take(1) == [1] + end + + test "interval/1" do + stream = Stream.interval(10) + now = :os.timestamp + assert Enum.take(stream, 5) == [0, 1, 2, 3, 4] + assert :timer.now_diff(:os.timestamp, now) > 50000 + end + test "into/2 and run/1" do Process.put(:stream_cont, []) Process.put(:stream_done, false) Process.put(:stream_halt, false) - stream = Stream.into([1, 2, 3], collectable_pdict) + stream = Stream.into([1, 2, 3], %Pdict{}) - assert is_lazy(stream) + assert lazy?(stream) assert Stream.run(stream) == :ok - assert Process.get(:stream_cont) == [3,2,1] + assert Process.get(:stream_cont) == [3, 2, 1] assert Process.get(:stream_done) refute Process.get(:stream_halt) - stream = Stream.into(fn _, _ -> raise "error" end, collectable_pdict) + stream = Stream.into(fn _, _ -> raise "error" end, %Pdict{}) catch_error(Stream.run(stream)) assert Process.get(:stream_halt) end @@ -305,9 +435,9 @@ defmodule StreamTest do Process.put(:stream_done, false) Process.put(:stream_halt, false) - stream = Stream.into([1, 2, 3], collectable_pdict, fn x -> x*2 end) + stream = Stream.into([1, 2, 3], %Pdict{}, fn x -> x*2 end) - assert is_lazy(stream) + assert lazy?(stream) assert Enum.to_list(stream) == [1, 2, 3] assert Process.get(:stream_cont) == [6, 4, 2] assert Process.get(:stream_done) @@ -319,9 +449,9 @@ defmodule StreamTest do Process.put(:stream_done, false) Process.put(:stream_halt, false) - stream = Stream.into([1, 2, 3], collectable_pdict) + stream = Stream.into([1, 2, 3], %Pdict{}) - assert is_lazy(stream) + assert lazy?(stream) assert Enum.take(stream, 1) == [1] assert Process.get(:stream_cont) == [1] assert Process.get(:stream_done) @@ -330,29 +460,55 @@ defmodule StreamTest do test "transform/3" do stream = Stream.transform([1, 2, 3], 0, &{[&1, &2], &1 + &2}) - assert is_lazy(stream) + assert lazy?(stream) assert Enum.to_list(stream) == [1, 0, 2, 1, 3, 3] nats = Stream.iterate(1, &(&1 + 1)) assert Stream.transform(nats, 0, &{[&1, &2], &1 + &2}) |> Enum.take(6) == [1, 0, 2, 1, 3, 3] end + test "transform/3 with early halt" do + stream = Stream.repeatedly(fn -> throw(:error) end) + |> Stream.transform(nil, &{[&1, &2], &1}) + + assert {:halted, nil} = + Enumerable.reduce(stream, {:halt, nil}, fn _, _ -> throw(:error) end) + end + + test "transform/3 with early suspend" do + stream = Stream.repeatedly(fn -> throw(:error) end) + |> Stream.transform(nil, &{[&1, &2], &1}) + + assert {:suspended, nil, _} = + Enumerable.reduce(stream, {:suspend, nil}, fn _, _ -> throw(:error) end) + end + test "transform/3 with halt" do stream = Stream.resource(fn -> 1 end, - fn acc -> {acc, acc + 1} end, + fn acc -> {[acc], acc + 1} end, fn _ -> Process.put(:stream_transform, true) end) - stream = Stream.transform(stream, 0, fn i, acc -> if acc < 3, do: {[i], acc + 1}, else: {:halt, acc} end) + stream = Stream.transform(stream, 0, fn i, acc -> + if acc < 3, do: {[i], acc + 1}, else: {:halt, acc} + end) Process.put(:stream_transform, false) - assert Enum.to_list(stream) == [1,2,3] + assert Enum.to_list(stream) == [1, 2, 3] assert Process.get(:stream_transform) end + test "transform/3 (via flat_map) handles multiple returns from suspension" do + assert [false] + |> Stream.take(1) + |> Stream.concat([true]) + |> Stream.flat_map(&[&1]) + |> Enum.to_list() == [false, true] + end + test "iterate/2" do stream = Stream.iterate(0, &(&1+2)) - assert Enum.take(stream, 5) == [0,2,4,6,8] + assert Enum.take(stream, 5) == [0, 2, 4, 6, 8] stream = Stream.iterate(5, &(&1+2)) - assert Enum.take(stream, 5) == [5,7,9,11,13] + assert Enum.take(stream, 5) == [5, 7, 9, 11, 13] # Only calculate values if needed stream = Stream.iterate("HELLO", &raise/1) @@ -360,35 +516,101 @@ defmodule StreamTest do end test "map/2" do - stream = Stream.map([1,2,3], &(&1 * 2)) - assert is_lazy(stream) - assert Enum.to_list(stream) == [2,4,6] + stream = Stream.map([1, 2, 3], &(&1 * 2)) + assert lazy?(stream) + assert Enum.to_list(stream) == [2, 4, 6] nats = Stream.iterate(1, &(&1 + 1)) - assert Stream.map(nats, &(&1 * 2)) |> Enum.take(5) == [2,4,6,8,10] + assert Stream.map(nats, &(&1 * 2)) |> Enum.take(5) == [2, 4, 6, 8, 10] assert Stream.map(nats, &(&1 - 2)) |> Stream.map(&(&1 * 2)) |> Enum.take(3) == [-2, 0, 2] end + test "map_every/3" do + assert 1..10 + |> Stream.map_every(2, &(&1 * 2)) + |> Enum.to_list == [2, 2, 6, 4, 10, 6, 14, 8, 18, 10] + + assert 1..10 + |> Stream.map_every(3, &(&1 * 2)) + |> Enum.to_list == [2, 2, 3, 8, 5, 6, 14, 8, 9, 20] + + assert 1..10 + |> Stream.drop(2) + |> Stream.map_every(2, &(&1 * 2)) + |> Stream.drop(1) + |> Enum.to_list == [4, 10, 6, 14, 8, 18, 10] + + assert 1..5 + |> Stream.map_every(0, &(&1 * 2)) + |> Enum.to_list == [1, 2, 3, 4, 5] + + assert [] + |> Stream.map_every(10, &(&1 * 2)) + |> Enum.to_list == [] + + assert_raise FunctionClauseError, fn -> + Stream.map_every(1..10, -1, &(&1 * 2)) + end + + assert_raise FunctionClauseError, fn -> + Stream.map_every(1..10, 3.33, &(&1 * 2)) + end + end + test "reject/2" do - stream = Stream.reject([1,2,3], fn(x) -> rem(x, 2) == 0 end) - assert is_lazy(stream) - assert Enum.to_list(stream) == [1,3] + stream = Stream.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) + assert lazy?(stream) + assert Enum.to_list(stream) == [1, 3] nats = Stream.iterate(1, &(&1 + 1)) - assert Stream.reject(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [1,3,5,7,9] + assert Stream.reject(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [1, 3, 5, 7, 9] end test "repeatedly/1" do stream = Stream.repeatedly(fn -> 1 end) - assert Enum.take(stream, 5) == [1,1,1,1,1] - stream = Stream.repeatedly(&:random.uniform/0) - [r1,r2] = Enum.take(stream, 2) + assert Enum.take(stream, 5) == [1, 1, 1, 1, 1] + stream = Stream.repeatedly(&:rand.uniform/0) + [r1, r2] = Enum.take(stream, 2) assert r1 != r2 end - test "resource/3 closes on errors" do + test "resource/3 closes on outer errors" do + stream = Stream.resource(fn -> 1 end, + fn 2 -> throw(:error) + acc -> {[acc], acc + 1} end, + fn 2 -> Process.put(:stream_resource, true) end) + + Process.put(:stream_resource, false) + assert catch_throw(Enum.to_list(stream)) == :error + assert Process.get(:stream_resource) + end + + test "resource/3 is zippable" do + stream = Stream.resource(fn -> 1 end, + fn 10 -> {:halt, 10} + acc -> {[acc], acc + 1} + end, + fn _ -> Process.put(:stream_resource, true) end) + + list = Enum.to_list(stream) + Process.put(:stream_resource, false) + assert Enum.zip(list, list) == Enum.zip(stream, stream) + assert Process.get(:stream_resource) + end + + test "resource/3 halts with inner list" do + stream = Stream.resource(fn -> 1 end, + fn acc -> {[acc, acc+1, acc+2], acc + 1} end, + fn _ -> Process.put(:stream_resource, true) end) + + Process.put(:stream_resource, false) + assert Enum.take(stream, 5) == [1, 2, 3, 2, 3] + assert Process.get(:stream_resource) + end + + test "resource/3 closes on errors with inner list" do stream = Stream.resource(fn -> 1 end, - fn acc -> {acc, acc + 1} end, + fn acc -> {[acc, acc+1, acc+2], acc + 1} end, fn _ -> Process.put(:stream_resource, true) end) Process.put(:stream_resource, false) @@ -397,10 +619,10 @@ defmodule StreamTest do assert Process.get(:stream_resource) end - test "resource/3 is zippable" do + test "resource/3 is zippable with inner list" do stream = Stream.resource(fn -> 1 end, - fn 10 -> nil - acc -> {acc, acc + 1} + fn 10 -> {:halt, 10} + acc -> {[acc, acc+1, acc+2], acc + 1} end, fn _ -> Process.put(:stream_resource, true) end) @@ -410,50 +632,243 @@ defmodule StreamTest do assert Process.get(:stream_resource) end + test "resource/3 halts with inner enum" do + stream = Stream.resource(fn -> 1 end, + fn acc -> {acc..acc+2, acc + 1} end, + fn _ -> Process.put(:stream_resource, true) end) + + Process.put(:stream_resource, false) + assert Enum.take(stream, 5) == [1, 2, 3, 2, 3] + assert Process.get(:stream_resource) + end + + test "resource/3 closes on errors with inner enum" do + stream = Stream.resource(fn -> 1 end, + fn acc -> {acc..acc+2, acc + 1} end, + fn _ -> Process.put(:stream_resource, true) end) + + Process.put(:stream_resource, false) + stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end) + assert catch_throw(Enum.to_list(stream)) == :error + assert Process.get(:stream_resource) + end + + test "resource/3 is zippable with inner enum" do + stream = Stream.resource(fn -> 1 end, + fn 10 -> {:halt, 10} + acc -> {acc..acc+2, acc + 1} + end, + fn _ -> Process.put(:stream_resource, true) end) + + list = Enum.to_list(stream) + Process.put(:stream_resource, false) + assert Enum.zip(list, list) == Enum.zip(stream, stream) + assert Process.get(:stream_resource) + end + + test "transform/4" do + stream = Stream.transform(1..10, fn -> 0 end, + fn x, acc -> {[x, x + acc], x} end, + fn 10 -> Process.put(:stream_transform, true) end) + + Process.put(:stream_transform, false) + assert Enum.to_list(stream) == + [1, 1, 2, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19] + assert Process.get(:stream_transform) + end + + test "transform/4 with early halt" do + stream = Stream.repeatedly(fn -> throw(:error) end) + |> Stream.transform(fn -> nil end, &{[&1, &2], &1}, + fn nil -> Process.put(:stream_transform, true) end) + + Process.put(:stream_transform, false) + assert {:halted, nil} = + Enumerable.reduce(stream, {:halt, nil}, fn _, _ -> throw(:error) end) + assert Process.get(:stream_transform) + end + + test "transform/4 with early suspend" do + stream = Stream.repeatedly(fn -> throw(:error) end) + |> Stream.transform(fn -> nil end, &{[&1, &2], &1}, + fn nil -> Process.put(:stream_transform, true) end) + + refute Process.get(:stream_transform) + assert {:suspended, nil, _} = + Enumerable.reduce(stream, {:suspend, nil}, fn _, _ -> throw(:error) end) + end + + test "transform/4 closes on outer errors" do + stream = Stream.transform(1..10, fn -> 0 end, + fn 3, _ -> throw(:error) + x, acc -> {[x + acc], x} end, + fn 2 -> Process.put(:stream_transform, true) end) + + Process.put(:stream_transform, false) + assert catch_throw(Enum.to_list(stream)) == :error + assert Process.get(:stream_transform) + end + +test "transform/4 closes on nested errors" do + stream = + 1..10 + |> Stream.transform(fn -> 0 end, + fn 3, _ -> throw(:error) + x, acc -> {[x + acc], x} end, + fn _ -> Process.put(:stream_transform_inner, true) end) + |> Stream.transform(fn -> 0 end, + fn x, acc -> {[x], acc} end, + fn 0 -> Process.put(:stream_transform_outer, true) end) + + Process.put(:stream_transform_inner, false) + Process.put(:stream_transform_outer, false) + assert catch_throw(Enum.to_list(stream)) == :error + assert Process.get(:stream_transform_inner) + assert Process.get(:stream_transform_outer) + end + + test "transform/4 is zippable" do + stream = Stream.transform(1..20, fn -> 0 end, + fn 10, acc -> {:halt, acc} + x, acc -> {[x + acc], x} + end, + fn 9 -> Process.put(:stream_transform, true) end) + + list = Enum.to_list(stream) + Process.put(:stream_transform, false) + assert Enum.zip(list, list) == Enum.zip(stream, stream) + assert Process.get(:stream_transform) + end + + test "transform/4 halts with inner list" do + stream = Stream.transform(1..10, fn -> :acc end, + fn x, acc -> {[x, x+1, x+2], acc} end, + fn :acc -> Process.put(:stream_transform, true) end) + + Process.put(:stream_transform, false) + assert Enum.take(stream, 5) == [1, 2, 3, 2, 3] + assert Process.get(:stream_transform) + end + + test "transform/4 closes on errors with inner list" do + stream = Stream.transform(1..10, fn -> :acc end, + fn x, acc -> {[x, x+1, x+2], acc} end, + fn :acc -> Process.put(:stream_transform, true) end) + + Process.put(:stream_transform, false) + stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end) + assert catch_throw(Enum.to_list(stream)) == :error + assert Process.get(:stream_transform) + end + + test "transform/4 is zippable with inner list" do + stream = Stream.transform(1..20, fn -> :inner end, + fn 10, acc -> {:halt, acc} + x, acc -> {[x, x+1, x+2], acc} + end, + fn :inner -> Process.put(:stream_transform, true) end) + + list = Enum.to_list(stream) + Process.put(:stream_transform, false) + assert Enum.zip(list, list) == Enum.zip(stream, stream) + assert Process.get(:stream_transform) + end + + test "transform/4 halts with inner enum" do + stream = Stream.transform(1..10, fn -> :acc end, + fn x, acc -> {x..x+2, acc} end, + fn :acc -> Process.put(:stream_transform, true) end) + + Process.put(:stream_transform, false) + assert Enum.take(stream, 5) == [1, 2, 3, 2, 3] + assert Process.get(:stream_transform) + end + + test "transform/4 closes on errors with inner enum" do + stream = Stream.transform(1..10, fn -> :acc end, + fn x, acc -> {x..x+2, acc} end, + fn :acc -> Process.put(:stream_transform, true) end) + + Process.put(:stream_transform, false) + stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end) + assert catch_throw(Enum.to_list(stream)) == :error + assert Process.get(:stream_transform) + end + + test "transform/4 is zippable with inner enum" do + stream = Stream.transform(1..20, fn -> :inner end, + fn 10, acc -> {:halt, acc} + x, acc -> {x..x+2, acc} + end, + fn :inner -> Process.put(:stream_transform, true) end) + + list = Enum.to_list(stream) + Process.put(:stream_transform, false) + assert Enum.zip(list, list) == Enum.zip(stream, stream) + assert Process.get(:stream_transform) + end + test "scan/2" do stream = Stream.scan(1..5, &(&1 + &2)) - assert is_lazy(stream) - assert Enum.to_list(stream) == [1,3,6,10,15] + assert lazy?(stream) + assert Enum.to_list(stream) == [1, 3, 6, 10, 15] assert Stream.scan([], &(&1 + &2)) |> Enum.to_list == [] end test "scan/3" do stream = Stream.scan(1..5, 0, &(&1 + &2)) - assert is_lazy(stream) - assert Enum.to_list(stream) == [1,3,6,10,15] + assert lazy?(stream) + assert Enum.to_list(stream) == [1, 3, 6, 10, 15] assert Stream.scan([], 0, &(&1 + &2)) |> Enum.to_list == [] end test "take/2" do stream = Stream.take(1..1000, 5) - assert is_lazy(stream) - assert Enum.to_list(stream) == [1,2,3,4,5] + assert lazy?(stream) + assert Enum.to_list(stream) == [1, 2, 3, 4, 5] assert Enum.to_list(Stream.take(1..1000, 0)) == [] - assert Enum.to_list(Stream.take(1..3, 5)) == [1,2,3] + assert Enum.to_list(Stream.take([], 5)) == [] + assert Enum.to_list(Stream.take(1..3, 5)) == [1, 2, 3] nats = Stream.iterate(1, &(&1 + 1)) - assert Enum.to_list(Stream.take(nats, 5)) == [1,2,3,4,5] + assert Enum.to_list(Stream.take(nats, 5)) == [1, 2, 3, 4, 5] stream = Stream.drop(1..100, 5) - assert Stream.take(stream, 5) |> Enum.to_list == [6,7,8,9,10] + assert Stream.take(stream, 5) |> Enum.to_list == [6, 7, 8, 9, 10] stream = 1..5 |> Stream.take(10) |> Stream.drop(15) assert {[], []} = Enum.split(stream, 5) stream = 1..20 |> Stream.take(10 + 5) |> Stream.drop(4) - assert Enum.to_list(stream) == [5,6,7,8,9,10,11,12,13,14,15] + assert Enum.to_list(stream) == [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + end + + test "take/2 does not consume next element on halt" do + assert [false, true] + |> Stream.each(& &1 && raise "oops") + |> Stream.take(1) + |> Stream.take_while(& &1) + |> Enum.to_list() == [] + end + + test "take/2 does not consume next element on suspend" do + assert [false, true] + |> Stream.each(& &1 && raise "oops") + |> Stream.take(1) + |> Stream.flat_map(&[&1]) + |> Enum.to_list() == [false] end test "take/2 with negative count" do Process.put(:stream_each, []) stream = Stream.take(1..100, -5) - assert is_lazy(stream) + assert lazy?(stream) - stream = Stream.each(stream, &Process.put(:stream_each, [&1|Process.get(:stream_each)])) - assert Enum.to_list(stream) == [96,97,98,99,100] - assert Process.get(:stream_each) == [100,99,98,97,96] + stream = Stream.each(stream, &Process.put(:stream_each, [&1 | Process.get(:stream_each)])) + assert Enum.to_list(stream) == [96, 97, 98, 99, 100] + assert Process.get(:stream_each) == [100, 99, 98, 97, 96] end test "take/2 is zippable" do @@ -467,32 +882,61 @@ defmodule StreamTest do |> Stream.take_every(2) |> Enum.to_list == [1, 3, 5, 7, 9] + assert 1..10 + |> Stream.take_every(3) + |> Enum.to_list == [1, 4, 7, 10] + assert 1..10 |> Stream.drop(2) |> Stream.take_every(2) |> Stream.drop(1) |> Enum.to_list == [5, 7, 9] + + assert 1..10 + |> Stream.take_every(0) + |> Enum.to_list == [] + + assert [] + |> Stream.take_every(10) + |> Enum.to_list == [] + end + + test "take_every/2 without non-negative integer" do + assert_raise FunctionClauseError, fn -> + Stream.take_every(1..10, -1) + end + + assert_raise FunctionClauseError, fn -> + Stream.take_every(1..10, 3.33) + end end test "take_while/2" do stream = Stream.take_while(1..1000, &(&1 <= 5)) - assert is_lazy(stream) - assert Enum.to_list(stream) == [1,2,3,4,5] + assert lazy?(stream) + assert Enum.to_list(stream) == [1, 2, 3, 4, 5] assert Enum.to_list(Stream.take_while(1..1000, &(&1 <= 0))) == [] - assert Enum.to_list(Stream.take_while(1..3, &(&1 <= 5))) == [1,2,3] + assert Enum.to_list(Stream.take_while(1..3, &(&1 <= 5))) == [1, 2, 3] nats = Stream.iterate(1, &(&1 + 1)) - assert Enum.to_list(Stream.take_while(nats, &(&1 <= 5))) == [1,2,3,4,5] + assert Enum.to_list(Stream.take_while(nats, &(&1 <= 5))) == [1, 2, 3, 4, 5] stream = Stream.drop(1..100, 5) - assert Stream.take_while(stream, &(&1 < 11)) |> Enum.to_list == [6,7,8,9,10] + assert Stream.take_while(stream, &(&1 < 11)) |> Enum.to_list == [6, 7, 8, 9, 10] + end + + test "timer/1" do + stream = Stream.timer(10) + now = :os.timestamp + assert Enum.to_list(stream) == [0] + assert :timer.now_diff(:os.timestamp, now) > 10000 end test "unfold/2" do - stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x-1}, else: nil end) + stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x-1} end) assert Enum.take(stream, 5) == [10, 9, 8, 7, 6] - stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x-1}, else: nil end) + stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x-1} end) assert Enum.to_list(stream) == [5, 4, 3, 2, 1] end @@ -500,68 +944,78 @@ defmodule StreamTest do stream = Stream.unfold(1, fn x -> if x > 0, do: {x, x-1}, else: throw(:boom) end) assert Enum.take(stream, 1) == [1] - stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x-1}, else: nil end) + stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x-1} end) assert Enum.to_list(Stream.take(stream, 2)) == [5, 4] end test "unfold/2 is zippable" do - stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x-1}, else: nil end) + stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x-1} end) list = Enum.to_list(stream) assert Enum.zip(list, list) == Enum.zip(stream, stream) end - test "uniq/1" do - assert Stream.uniq([1, 2, 3, 2, 1]) |> Enum.to_list == - [1, 2, 3] + test "uniq/1 & uniq/2" do + assert Stream.uniq([1, 2, 3, 2, 1]) |> Enum.to_list == [1, 2, 3] + end + + test "uniq_by/2" do + assert Stream.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list == + [{1, :x}, {2, :y}] - assert Stream.uniq([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list == - [{1,:x}, {2,:y}] + assert Stream.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end) |> Enum.to_list == + [a: {:tea, 2}, c: {:coffee, 1}] end test "zip/2" do concat = Stream.concat(1..3, 4..6) cycle = Stream.cycle([:a, :b, :c]) assert Stream.zip(concat, cycle) |> Enum.to_list == - [{1,:a},{2,:b},{3,:c},{4,:a},{5,:b},{6,:c}] + [{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}] + end + + test "zip/1" do + concat = Stream.concat(1..3, 4..6) + cycle = Stream.cycle([:a, :b, :c]) + assert Stream.zip([concat, cycle]) |> Enum.to_list == + [{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}] end - test "zip/2 does not leave streams suspended" do + test "zip/1 does not leave streams suspended" do stream = Stream.resource(fn -> 1 end, - fn acc -> {acc, acc + 1} end, + fn acc -> {[acc], acc + 1} end, fn _ -> Process.put(:stream_zip, true) end) Process.put(:stream_zip, false) - assert Stream.zip([:a, :b, :c], stream) |> Enum.to_list == [a: 1, b: 2, c: 3] + assert Stream.zip([[:a, :b, :c], stream]) |> Enum.to_list == [a: 1, b: 2, c: 3] assert Process.get(:stream_zip) Process.put(:stream_zip, false) - assert Stream.zip(stream, [:a, :b, :c]) |> Enum.to_list == [{1, :a}, {2, :b}, {3, :c}] + assert Stream.zip([stream, [:a, :b, :c]]) |> Enum.to_list == [{1, :a}, {2, :b}, {3, :c}] assert Process.get(:stream_zip) end - test "zip/2 does not leave streams suspended on halt" do + test "zip/1 does not leave streams suspended on halt" do stream = Stream.resource(fn -> 1 end, - fn acc -> {acc, acc + 1} end, + fn acc -> {[acc], acc + 1} end, fn _ -> Process.put(:stream_zip, :done) end) - assert Stream.zip([:a, :b, :c, :d, :e], stream) |> Enum.take(3) == + assert Stream.zip([[:a, :b, :c, :d, :e], stream]) |> Enum.take(3) == [a: 1, b: 2, c: 3] assert Process.get(:stream_zip) == :done end - test "zip/2 closes on inner error" do - stream = Stream.into([1, 2, 3], collectable_pdict) - stream = Stream.zip(stream, Stream.map([:a, :b, :c], fn _ -> throw(:error) end)) + test "zip/1 closes on inner error" do + stream = Stream.into([1, 2, 3], %Pdict{}) + stream = Stream.zip([stream, Stream.map([:a, :b, :c], fn _ -> throw(:error) end)]) Process.put(:stream_done, false) assert catch_throw(Enum.to_list(stream)) == :error assert Process.get(:stream_done) end - test "zip/2 closes on outer error" do - stream = Stream.into([1, 2, 3], collectable_pdict) - |> Stream.zip([:a, :b, :c]) + test "zip/1 closes on outer error" do + stream = Stream.zip([Stream.into([1, 2, 3], %Pdict{}), [:a, :b, :c]]) |> Stream.map(fn _ -> throw(:error) end) Process.put(:stream_done, false) @@ -570,26 +1024,21 @@ defmodule StreamTest do end test "with_index/2" do - stream = Stream.with_index([1,2,3]) - assert is_lazy(stream) - assert Enum.to_list(stream) == [{1,0},{2,1},{3,2}] + stream = Stream.with_index([1, 2, 3]) + assert lazy?(stream) + assert Enum.to_list(stream) == [{1, 0}, {2, 1}, {3, 2}] + + stream = Stream.with_index([1, 2, 3], 10) + assert Enum.to_list(stream) == [{1, 10}, {2, 11}, {3, 12}] nats = Stream.iterate(1, &(&1 + 1)) - assert Stream.with_index(nats) |> Enum.take(3) == [{1,0},{2,1},{3,2}] + assert Stream.with_index(nats) |> Enum.take(3) == [{1, 0}, {2, 1}, {3, 2}] end - defp is_lazy(stream) do + defp lazy?(stream) do match?(%Stream{}, stream) or is_function(stream, 2) end - defp collectable_pdict do - fn - _, {:cont, x} -> Process.put(:stream_cont, [x|Process.get(:stream_cont)]) - _, :done -> Process.put(:stream_done, true) - _, :halt -> Process.put(:stream_halt, true) - end - end - defp inbox_stream({:suspend, acc}, f) do {:suspended, acc, &inbox_stream(&1, f)} end diff --git a/lib/elixir/test/elixir/string/chars_test.exs b/lib/elixir/test/elixir/string/chars_test.exs index 173439287bb..be45c0074b9 100644 --- a/lib/elixir/test/elixir/string/chars_test.exs +++ b/lib/elixir/test/elixir/string/chars_test.exs @@ -3,26 +3,28 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule String.Chars.AtomTest do use ExUnit.Case, async: true - test :basic do + doctest String.Chars + + test "basic" do assert to_string(:foo) == "foo" end - test :empty do + test "empty" do assert to_string(:"") == "" end - test :true_false_nil do + test "true false nil" do assert to_string(false) == "false" assert to_string(true) == "true" assert to_string(nil) == "" end - test :with_uppercase do + test "with uppercase" do assert to_string(:fOO) == "fOO" assert to_string(:FOO) == "FOO" end - test :alias_atom do + test "alias atom" do assert to_string(Foo.Bar) == "Elixir.Foo.Bar" end end @@ -30,7 +32,7 @@ end defmodule String.Chars.BitStringTest do use ExUnit.Case, async: true - test :binary do + test "binary" do assert to_string("foo") == "foo" assert to_string(<>) == "abc" assert to_string("我今天要学习.") == "我今天要学习." @@ -40,11 +42,11 @@ end defmodule String.Chars.NumberTest do use ExUnit.Case, async: true - test :integer do + test "integer" do assert to_string(100) == "100" end - test :float do + test "float" do assert to_string(1.0) == "1.0" assert to_string(1.0e10) == "1.0e10" end @@ -53,15 +55,15 @@ end defmodule String.Chars.ListTest do use ExUnit.Case, async: true - test :basic do + test "basic" do assert to_string([ 1, "b", 3 ]) == <<1, 98, 3>> end - test :printable do + test "printable" do assert to_string('abc') == "abc" end - test :char_list do + test "charlist" do assert to_string([0, 1, 2, 3, 255]) == <<0, 1, 2, 3, 195, 191>> @@ -69,48 +71,69 @@ defmodule String.Chars.ListTest do <<0, 1, 104, 101, 108, 108, 111, 2, 98, 121, 101>> end - test :empty do + test "empty" do assert to_string([]) == "" end end +defmodule String.Chars.Version.RequirementTest do + use ExUnit.Case, async: true + + test "version requirement" do + {:ok, requirement} = Version.parse_requirement("== 2.0.1") + assert String.Chars.to_string(requirement) == "== 2.0.1" + end +end + +defmodule String.Chars.URITest do + use ExUnit.Case, async: true + + test "uri" do + uri = URI.parse("http://google.com/") + assert String.Chars.to_string(uri) == "http://google.com/" + + uri_no_host = URI.parse("/foo/bar") + assert String.Chars.to_string(uri_no_host) == "/foo/bar" + end +end + defmodule String.Chars.ErrorsTest do use ExUnit.Case, async: true - test :bitstring do + test "bitstring" do assert_raise Protocol.UndefinedError, "protocol String.Chars not implemented for <<0, 1::size(4)>>, " <> "cannot convert a bitstring to a string", fn -> - to_string(<<1 :: [size(12), integer, signed]>>) + to_string(<<1 :: size(12)-integer-signed>>) end end - test :tuple do + test "tuple" do assert_raise Protocol.UndefinedError, "protocol String.Chars not implemented for {1, 2, 3}", fn -> to_string({1, 2, 3}) end end - test :pid do + test "PID" do assert_raise Protocol.UndefinedError, ~r"^protocol String\.Chars not implemented for #PID<.+?>$", fn -> to_string(self()) end end - test :ref do + test "ref" do assert_raise Protocol.UndefinedError, ~r"^protocol String\.Chars not implemented for #Reference<.+?>$", fn -> to_string(make_ref()) == "" end end - test :function do + test "function" do assert_raise Protocol.UndefinedError, ~r"^protocol String\.Chars not implemented for #Function<.+?>$", fn -> - to_string(fn -> end) + to_string(fn -> nil end) end end - test :port do - [port|_] = Port.list + test "port" do + [port | _] = Port.list assert_raise Protocol.UndefinedError, ~r"^protocol String\.Chars not implemented for #Port<.+?>$", fn -> to_string(port) end diff --git a/lib/elixir/test/elixir/string_io_test.exs b/lib/elixir/test/elixir/string_io_test.exs index da589631d29..ba858537f92 100644 --- a/lib/elixir/test/elixir/string_io_test.exs +++ b/lib/elixir/test/elixir/string_io_test.exs @@ -3,22 +3,26 @@ Code.require_file "test_helper.exs", __DIR__ defmodule StringIOTest do use ExUnit.Case, async: true - test "start and stop" do - {:ok, pid} = StringIO.open("") - assert StringIO.close(pid) == {:ok, {"", ""}} - end + doctest StringIO - test "start_link and stop" do + test "open and close" do {:ok, pid} = StringIO.open("") assert StringIO.close(pid) == {:ok, {"", ""}} end - test "peek" do + test "contents" do {:ok, pid} = StringIO.open("abc") IO.write(pid, "edf") assert StringIO.contents(pid) == {"abc", "edf"} end + test "flush" do + {:ok, pid} = StringIO.open("") + IO.write(pid, "edf") + assert StringIO.flush(pid) == "edf" + assert StringIO.contents(pid) == {"", ""} + end + ## IO module def start(string, opts \\ []) do @@ -50,10 +54,17 @@ defmodule StringIOTest do assert contents(pid) == {"", ""} end - test "IO.read :line with invalid utf8" do - pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>) + test "IO.read :line with UTF-8" do + pid = start("⼊\n") + assert IO.read(pid, :line) == "⼊\n" + assert IO.read(pid, :line) == :eof + assert contents(pid) == {"", ""} + end + + test "IO.read :line with invalid UTF-8" do + pid = start(<<130, 227, 129, 132, 227, 129, 134>>) assert IO.read(pid, :line) == {:error, :collect_line} - assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""} + assert contents(pid) == {<<130, 227, 129, 132, 227, 129, 134>>, ""} end test "IO.read count" do @@ -64,7 +75,7 @@ defmodule StringIOTest do assert contents(pid) == {"", ""} end - test "IO.read count with utf8" do + test "IO.read count with UTF-8" do pid = start("あいう") assert IO.read(pid, 2) == "あい" assert IO.read(pid, 8) == "う" @@ -72,10 +83,10 @@ defmodule StringIOTest do assert contents(pid) == {"", ""} end - test "IO.read count with invalid utf8" do - pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>) + test "IO.read count with invalid UTF-8" do + pid = start(<<130, 227, 129, 132, 227, 129, 134>>) assert IO.read(pid, 2) == {:error, :invalid_unicode} - assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""} + assert contents(pid) == {<<130, 227, 129, 132, 227, 129, 134>>, ""} end test "IO.binread :line with \\n" do @@ -99,6 +110,13 @@ defmodule StringIOTest do assert contents(pid) == {"", ""} end + test "IO.binread :line with raw bytes" do + pid = start(<<181, 255, 194, ?\n>>) + assert IO.binread(pid, :line) == <<181, 255, 194, ?\n>> + assert IO.binread(pid, :line) == :eof + assert contents(pid) == {"", ""} + end + test "IO.binread count" do pid = start("abc") assert IO.binread(pid, 2) == "ab" @@ -107,10 +125,10 @@ defmodule StringIOTest do assert contents(pid) == {"", ""} end - test "IO.binread count with utf8" do + test "IO.binread count with UTF-8" do pid = start("あいう") - assert IO.binread(pid, 2) == << 227, 129 >> - assert IO.binread(pid, 8) == << 130, 227, 129, 132, 227, 129, 134 >> + assert IO.binread(pid, 2) == <<227, 129>> + assert IO.binread(pid, 8) == <<130, 227, 129, 132, 227, 129, 134>> assert IO.binread(pid, 1) == :eof assert contents(pid) == {"", ""} end @@ -121,7 +139,7 @@ defmodule StringIOTest do assert contents(pid) == {"", "foo"} end - test "IO.write with utf8" do + test "IO.write with UTF-8" do pid = start("") assert IO.write(pid, "あいう") == :ok assert contents(pid) == {"", "あいう"} @@ -133,10 +151,10 @@ defmodule StringIOTest do assert contents(pid) == {"", "foo"} end - test "IO.binwrite with utf8" do + test "IO.binwrite with UTF-8" do pid = start("") assert IO.binwrite(pid, "あいう") == :ok - assert contents(pid) == {"", "あいう"} + assert contents(pid) == {"", <<195, 163, 194, 129, 194, 130, 195, 163, 194, 129, 194, 132, 195, 163, 194, 129, 194, 134>>} end test "IO.puts" do @@ -157,16 +175,16 @@ defmodule StringIOTest do assert contents(pid) == {"c", ""} end - test "IO.getn with utf8" do + test "IO.getn with UTF-8" do pid = start("あいう") assert IO.getn(pid, ">", 2) == "あい" assert contents(pid) == {"う", ""} end - test "IO.getn with invalid utf8" do - pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>) + test "IO.getn with invalid UTF-8" do + pid = start(<<130, 227, 129, 132, 227, 129, 134>>) assert IO.getn(pid, ">", 2) == {:error, :invalid_unicode} - assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""} + assert contents(pid) == {<<130, 227, 129, 132, 227, 129, 134>>, ""} end test "IO.getn with capture_prompt" do @@ -193,10 +211,10 @@ defmodule StringIOTest do assert contents(pid) == {"", ""} end - test "IO.gets with invalid utf8" do - pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>) + test "IO.gets with invalid UTF-8" do + pid = start(<<130, 227, 129, 132, 227, 129, 134>>) assert IO.gets(pid, ">") == {:error, :collect_line} - assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""} + assert contents(pid) == {<<130, 227, 129, 132, 227, 129, 134>>, ""} end test "IO.gets with capture_prompt" do @@ -217,12 +235,12 @@ defmodule StringIOTest do assert contents(pid) == {"", ""} end - test "IO.stream with invalid utf8" do - pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>) + test "IO.stream with invalid UTF-8" do + pid = start(<<130, 227, 129, 132, 227, 129, 134>>) assert_raise IO.StreamError, fn-> IO.stream(pid, 2) |> Enum.to_list end - assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""} + assert contents(pid) == {<<130, 227, 129, 132, 227, 129, 134>>, ""} end test "IO.binstream" do diff --git a/lib/elixir/test/elixir/string_test.exs b/lib/elixir/test/elixir/string_test.exs index f6f38e7624d..f84ed2a6f16 100644 --- a/lib/elixir/test/elixir/string_test.exs +++ b/lib/elixir/test/elixir/string_test.exs @@ -3,20 +3,16 @@ Code.require_file "test_helper.exs", __DIR__ defmodule StringTest do use ExUnit.Case, async: true - test :integer_codepoints do - assert ?é == 233 - assert ?\xE9 == 233 - assert ?\351 == 233 - end + doctest String - test :next_codepoint do + test "next_codepoint/1" do assert String.next_codepoint("ésoj") == {"é", "soj"} assert String.next_codepoint(<<255>>) == {<<255>>, ""} assert String.next_codepoint("") == nil end # test cases described in http://mortoray.com/2013/11/27/the-string-type-is-broken/ - test :unicode do + test "Unicode" do assert String.reverse("noël") == "lëon" assert String.slice("noël", 0..2) == "noë" assert String.length("noël") == 4 @@ -26,41 +22,58 @@ defmodule StringTest do assert String.reverse("") == "" assert String.upcase("baffle") == "BAFFLE" + + assert String.equivalent?("noël", "noël") end - test :split do - assert String.split("") == [""] + test "split/1,2,3" do + assert String.split("") == [] assert String.split("foo bar") == ["foo", "bar"] assert String.split(" foo bar") == ["foo", "bar"] assert String.split("foo bar ") == ["foo", "bar"] assert String.split(" foo bar ") == ["foo", "bar"] assert String.split("foo\t\n\v\f\r\sbar\n") == ["foo", "bar"] - assert String.split("foo" <> <<31>> <> "bar") == ["foo", "bar"] assert String.split("foo" <> <<194, 133>> <> "bar") == ["foo", "bar"] + # information separators are not considered whitespace + assert String.split("foo\u001Fbar") == ["foo\u001Fbar"] + # no-break space is excluded + assert String.split("foo\00A0bar") == ["foo\00A0bar"] + assert String.split("foo\u202Fbar") == ["foo\u202Fbar"] - assert String.split("", ",") == [""] assert String.split("a,b,c", ",") == ["a", "b", "c"] assert String.split("a,b", ".") == ["a,b"] assert String.split("1,2 3,4", [" ", ","]) == ["1", "2", "3", "4"] + + assert String.split("", ",") == [""] assert String.split(" a b c ", " ") == ["", "a", "b", "c", ""] + assert String.split(" a b c ", " ", parts: :infinity) == ["", "a", "b", "c", ""] + assert String.split(" a b c ", " ", parts: 1) == [" a b c "] + assert String.split(" a b c ", " ", parts: 2) == ["", "a b c "] + assert String.split("", ",", trim: true) == [] assert String.split(" a b c ", " ", trim: true) == ["a", "b", "c"] - assert String.split(" a b c ", " ", trim: true, parts: 0) == ["a", "b", "c"] assert String.split(" a b c ", " ", trim: true, parts: :infinity) == ["a", "b", "c"] assert String.split(" a b c ", " ", trim: true, parts: 1) == [" a b c "] + assert String.split(" a b c ", " ", trim: true, parts: 2) == ["a", "b c "] assert String.split("abé", "") == ["a", "b", "é", ""] - assert String.split("abé", "", parts: 0) == ["a", "b", "é", ""] + assert String.split("abé", "", parts: :infinity) == ["a", "b", "é", ""] assert String.split("abé", "", parts: 1) == ["abé"] assert String.split("abé", "", parts: 2) == ["a", "bé"] assert String.split("abé", "", parts: 10) == ["a", "b", "é", ""] assert String.split("abé", "", trim: true) == ["a", "b", "é"] - assert String.split("abé", "", trim: true, parts: 0) == ["a", "b", "é"] + assert String.split("abé", "", trim: true, parts: :infinity) == ["a", "b", "é"] assert String.split("abé", "", trim: true, parts: 2) == ["a", "bé"] + + assert String.split("noël", "") == ["n", "o", "ë", "l", ""] + + assert String.split("x-", "-", parts: 2, trim: true) == ["x"] + assert String.split("x-x-", "-", parts: 3, trim: true) == ["x", "x"] end - test :split_with_regex do + test "split/2,3 with regex" do assert String.split("", ~r{,}) == [""] + assert String.split("", ~r{,}, trim: true) == [] assert String.split("a,b", ~r{,}) == ["a", "b"] assert String.split("a,b,c", ~r{,}) == ["a", "b", "c"] assert String.split("a,b,c", ~r{,}, parts: 2) == ["a", "b,c"] @@ -69,7 +82,27 @@ defmodule StringTest do assert String.split("a,b", ~r{\.}) == ["a,b"] end - test :split_at do + test "split/2,3 with compiled pattern" do + pattern = :binary.compile_pattern("-") + + assert String.split("x-", pattern) == ["x", ""] + assert String.split("x-", pattern, parts: 2, trim: true) == ["x"] + assert String.split("x-x-", pattern, parts: 3, trim: true) == ["x", "x"] + end + + test "splitter/2,3" do + assert String.splitter("a,b,c", ",") |> Enum.to_list == ["a", "b", "c"] + assert String.splitter("a,b", ".") |> Enum.to_list == ["a,b"] + assert String.splitter("1,2 3,4", [" ", ","]) |> Enum.to_list == ["1", "2", "3", "4"] + assert String.splitter("", ",") |> Enum.to_list == [""] + + assert String.splitter("", ",", trim: true) |> Enum.to_list == [] + assert String.splitter(" a b c ", " ", trim: true) |> Enum.to_list == ["a", "b", "c"] + assert String.splitter(" a b c ", " ", trim: true) |> Enum.take(1) == ["a"] + assert String.splitter(" a b c ", " ", trim: true) |> Enum.take(2) == ["a", "b"] + end + + test "split_at/2" do assert String.split_at("", 0) == {"", ""} assert String.split_at("", -1) == {"", ""} assert String.split_at("", 1) == {"", ""} @@ -84,37 +117,45 @@ defmodule StringTest do assert String.split_at("abc", -3) == {"", "abc"} assert String.split_at("abc", -4) == {"", "abc"} assert String.split_at("abc", -1000) == {"", "abc"} + + assert_raise FunctionClauseError, fn -> + String.split_at("abc", 0.1) + end + + assert_raise FunctionClauseError, fn -> + String.split_at("abc", -0.1) + end end - test :upcase do + test "upcase/1" do assert String.upcase("123 abcd 456 efg hij ( %$#) kl mnop @ qrst = -_ uvwxyz") == "123 ABCD 456 EFG HIJ ( %$#) KL MNOP @ QRST = -_ UVWXYZ" assert String.upcase("") == "" assert String.upcase("abcD") == "ABCD" end - test :upcase_utf8 do + test "upcase/1 with UTF-8" do assert String.upcase("& % # àáâ ãäå 1 2 ç æ") == "& % # ÀÁ ÃÄÅ 1 2 Ç Æ" assert String.upcase("àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ") == "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ" end - test :upcase_utf8_multibyte do + test "upcase/1 with UTF-8 multibyte" do assert String.upcase("straße") == "STRASSE" assert String.upcase("áüÈß") == "ÁÜÈSS" end - test :downcase do + test "downcase/1" do assert String.downcase("123 ABcD 456 EfG HIJ ( %$#) KL MNOP @ QRST = -_ UVWXYZ") == "123 abcd 456 efg hij ( %$#) kl mnop @ qrst = -_ uvwxyz" assert String.downcase("abcD") == "abcd" assert String.downcase("") == "" end - test :downcase_utf8 do + test "downcase/1 with UTF-8" do assert String.downcase("& % # ÀÁ ÃÄÅ 1 2 Ç Æ") == "& % # àáâ ãäå 1 2 ç æ" assert String.downcase("ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ") == "àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ" assert String.downcase("áüÈß") == "áüèß" end - test :capitalize do + test "capitalize/1" do assert String.capitalize("") == "" assert String.capitalize("abc") == "Abc" assert String.capitalize("ABC") == "Abc" @@ -124,7 +165,7 @@ defmodule StringTest do assert String.capitalize(" aBc1") == " abc1" end - test :capitalize_utf8 do + test "capitalize/1 with UTF-8" do assert String.capitalize("àáâ") == "Àáâ" assert String.capitalize("ÀÁÂ") == "Àáâ" assert String.capitalize("âáà") == "Âáà" @@ -134,68 +175,183 @@ defmodule StringTest do assert String.capitalize("fin") == "Fin" end - test :rstrip do - assert String.rstrip("") == "" - assert String.rstrip(" abc ") == " abc" - assert String.rstrip(" abc a") == " abc a" - assert String.rstrip("a abc a\n\n") == "a abc a" - assert String.rstrip("a abc a\t\n\v\f\r\s") == "a abc a" - assert String.rstrip("a abc a " <> <<31>>) == "a abc a" - assert String.rstrip("a abc a" <> <<194, 133>>) == "a abc a" - assert String.rstrip(" abc aa", ?a) == " abc " - assert String.rstrip(" abc __", ?_) == " abc " - assert String.rstrip(" cat 猫猫", ?猫) == " cat " - end - - test :lstrip do - assert String.lstrip("") == "" - assert String.lstrip(" abc ") == "abc " - assert String.lstrip("a abc a") == "a abc a" - assert String.lstrip("\n\na abc a") == "a abc a" - assert String.lstrip("\t\n\v\f\r\sa abc a") == "a abc a" - assert String.lstrip(<<31>> <> " a abc a") == "a abc a" - assert String.lstrip(<<194, 133>> <> "a abc a") == "a abc a" - assert String.lstrip("__ abc _", ?_) == " abc _" - assert String.lstrip("猫猫 cat ", ?猫) == " cat " - end - - test :strip do - assert String.strip("") == "" - assert String.strip(" abc ") == "abc" - assert String.strip("a abc a\n\n") == "a abc a" - assert String.strip("a abc a\t\n\v\f\r\s") == "a abc a" - assert String.strip("___ abc ___", ?_) == " abc " - assert String.strip("猫猫猫 cat 猫猫猫", ?猫) == " cat " - end - - test :rjust do - assert String.rjust("", 5) == " " - assert String.rjust("abc", 5) == " abc" - assert String.rjust(" abc ", 9) == " abc " - assert String.rjust("猫", 5) == " 猫" - assert String.rjust("abc", 5, ?-) == "--abc" - assert String.rjust("abc", 5, ?猫) == "猫猫abc" - end - - test :ljust do - assert String.ljust("", 5) == " " - assert String.ljust("abc", 5) == "abc " - assert String.ljust(" abc ", 9) == " abc " - assert String.ljust("猫", 5) == "猫 " - assert String.ljust("abc", 5, ?-) == "abc--" - assert String.ljust("abc", 5, ?猫) == "abc猫猫" - end - - test :reverse do + test "replace_leading/3" do + assert String.replace_leading("aa abc ", "a", "b") == "bb abc " + assert String.replace_leading("__ abc ", "_", "b") == "bb abc " + assert String.replace_leading("aaaaaaaa ", "a", "b") == "bbbbbbbb " + assert String.replace_leading("aaaaaaaa ", "aaa", "b") == "bbaa " + assert String.replace_leading("aaaaaaaaa", "a", "b") == "bbbbbbbbb" + assert String.replace_leading("]]]]]]", "]", "[]") == "[][][][][][]" + assert String.replace_leading("]]]]]]]]", "]", "") == "" + assert String.replace_leading("]]]]]] ]", "]", "") == " ]" + assert String.replace_leading("猫猫 cat ", "猫", "й") == "йй cat " + assert String.replace_leading("test", "t", "T") == "Test" + assert String.replace_leading("t", "t", "T") == "T" + assert String.replace_leading("aaa", "b", "c") == "aaa" + + message = ~r/cannot use an empty string/ + assert_raise ArgumentError, message, fn -> + String.replace_leading("foo", "", "bar") + end + assert_raise ArgumentError, message, fn -> + String.replace_leading("", "", "bar") + end + end + + test "replace_trailing/3" do + assert String.replace_trailing(" abc aa", "a", "b") == " abc bb" + assert String.replace_trailing(" abc __", "_", "b") == " abc bb" + assert String.replace_trailing(" aaaaaaaa", "a", "b") == " bbbbbbbb" + assert String.replace_trailing(" aaaaaaaa", "aaa", "b") == " aabb" + assert String.replace_trailing("aaaaaaaaa", "a", "b") == "bbbbbbbbb" + assert String.replace_trailing("]]]]]]", "]", "[]") == "[][][][][][]" + assert String.replace_trailing("]]]]]]]]", "]", "") == "" + assert String.replace_trailing("] ]]]]]]", "]", "") == "] " + assert String.replace_trailing(" cat 猫猫", "猫", "й") == " cat йй" + assert String.replace_trailing("test", "t", "T") == "tesT" + assert String.replace_trailing("t", "t", "T") == "T" + assert String.replace_trailing("aaa", "b", "c") == "aaa" + + message = ~r/cannot use an empty string/ + assert_raise ArgumentError, message, fn -> + String.replace_trailing("foo", "", "bar") + end + assert_raise ArgumentError, message, fn -> + String.replace_trailing("", "", "bar") + end + end + + test "trim/1,2" do + assert String.trim("") == "" + assert String.trim(" abc ") == "abc" + assert String.trim("a abc a\n\n") == "a abc a" + assert String.trim("a abc a\t\n\v\f\r\s") == "a abc a" + + assert String.trim("___ abc ___", "_") == " abc " + assert String.trim("猫猫猫cat猫猫猫", "猫猫") == "猫cat猫" + # no-break space + assert String.trim("\u00A0a abc a\u00A0") == "a abc a" + # whitespace defined as a range + assert String.trim("\u2008a abc a\u2005") == "a abc a" + end + + test "trim_leading/1,2" do + assert String.trim_leading("") == "" + assert String.trim_leading(" abc ") == "abc " + assert String.trim_leading("a abc a") == "a abc a" + assert String.trim_leading("\n\na abc a") == "a abc a" + assert String.trim_leading("\t\n\v\f\r\sa abc a") == "a abc a" + assert String.trim_leading(<<194, 133, "a abc a">>) == "a abc a" + # information separators are not whitespace + assert String.trim_leading("\u001F a abc a") == "\u001F a abc a" + # no-break space + assert String.trim_leading("\u00A0 a abc a") == "a abc a" + + assert String.trim_leading("aa aaa", "aaa") == "aa aaa" + assert String.trim_leading("aaa aaa", "aa") == "a aaa" + assert String.trim_leading("aa abc ", "a") == " abc " + assert String.trim_leading("__ abc ", "_") == " abc " + assert String.trim_leading("aaaaaaaaa ", "a") == " " + assert String.trim_leading("aaaaaaaaaa", "a") == "" + assert String.trim_leading("]]]]]] ]", "]") == " ]" + assert String.trim_leading("猫猫 cat ", "猫") == " cat " + assert String.trim_leading("test", "t") == "est" + assert String.trim_leading("t", "t") == "" + assert String.trim_leading("", "t") == "" + end + + test "trim_trailing/1,2" do + assert String.trim_trailing("") == "" + assert String.trim_trailing("1\n") == "1" + assert String.trim_trailing("\r\n") == "" + assert String.trim_trailing(" abc ") == " abc" + assert String.trim_trailing(" abc a") == " abc a" + assert String.trim_trailing("a abc a\n\n") == "a abc a" + assert String.trim_trailing("a abc a\t\n\v\f\r\s") == "a abc a" + assert String.trim_trailing(<<"a abc a", 194, 133>>) == "a abc a" + # information separators are not whitespace + assert String.trim_trailing("a abc a \u001F") == "a abc a \u001F" + # no-break space + assert String.trim_trailing("a abc a \u00A0") == "a abc a" + + assert String.trim_trailing("aaa aa", "aaa") == "aaa aa" + assert String.trim_trailing("aaa aaa", "aa") == "aaa a" + assert String.trim_trailing(" abc aa", "a") == " abc " + assert String.trim_trailing(" abc __", "_") == " abc " + assert String.trim_trailing(" aaaaaaaaa", "a") == " " + assert String.trim_trailing("aaaaaaaaaa", "a") == "" + assert String.trim_trailing("] ]]]]]]", "]") == "] " + assert String.trim_trailing(" cat 猫猫", "猫") == " cat " + assert String.trim_trailing("test", "t") == "tes" + assert String.trim_trailing("t", "t") == "" + assert String.trim_trailing("", "t") == "" + end + + test "pad_leading/2,3" do + assert String.pad_leading("", 5) == " " + assert String.pad_leading("abc", 5) == " abc" + assert String.pad_leading(" abc ", 9) == " abc " + assert String.pad_leading("猫", 5) == " 猫" + assert String.pad_leading("-", 0) == "-" + assert String.pad_leading("-", 1) == "-" + + assert String.pad_leading("---", 5, "abc") == "ab---" + assert String.pad_leading("---", 9, "abc") == "abcabc---" + + assert String.pad_leading("---", 5, ["abc"]) == "abcabc---" + assert String.pad_leading("--", 6, ["a", "bc"]) == "abcabc--" + + assert_raise FunctionClauseError, fn -> + String.pad_leading("-", -1) + end + assert_raise FunctionClauseError, fn -> + String.pad_leading("-", 1, []) + end + + message = "expected a string padding element, got: 10" + assert_raise ArgumentError, message, fn -> + String.pad_leading("-", 3, ["-", 10]) + end + end + + test "pad_trailing/2,3" do + assert String.pad_trailing("", 5) == " " + assert String.pad_trailing("abc", 5) == "abc " + assert String.pad_trailing(" abc ", 9) == " abc " + assert String.pad_trailing("猫", 5) == "猫 " + assert String.pad_trailing("-", 0) == "-" + assert String.pad_trailing("-", 1) == "-" + + assert String.pad_trailing("---", 5, "abc") == "---ab" + assert String.pad_trailing("---", 9, "abc") == "---abcabc" + + assert String.pad_trailing("---", 5, ["abc"]) == "---abcabc" + assert String.pad_trailing("--", 6, ["a", "bc"]) == "--abcabc" + + assert_raise FunctionClauseError, fn -> + String.pad_trailing("-", -1) + end + assert_raise FunctionClauseError, fn -> + String.pad_trailing("-", 1, []) + end + + message = "expected a string padding element, got: 10" + assert_raise ArgumentError, message, fn -> + String.pad_trailing("-", 3, ["-", 10]) + end + end + + test "reverse/1" do assert String.reverse("") == "" assert String.reverse("abc") == "cba" assert String.reverse("Hello World") == "dlroW olleH" assert String.reverse("Hello ∂og") == "go∂ olleH" assert String.reverse("Ā̀stute") == "etutsĀ̀" assert String.reverse(String.reverse("Hello World")) == "Hello World" + assert String.reverse(String.reverse("Hello \r\n World")) == "Hello \r\n World" end - test :replace do + test "replace/3" do assert String.replace("a,b,c", ",", "-") == "a-b-c" assert String.replace("a,b,c", [",", "b"], "-") == "a---c" @@ -211,14 +367,17 @@ defmodule StringTest do assert String.replace("a,b,c", ~r/,(.)/, ",\\1\\1", global: false) == "a,bb,c" end - test :duplicate do + test "duplicate/2" do assert String.duplicate("abc", 0) == "" assert String.duplicate("abc", 1) == "abc" assert String.duplicate("abc", 2) == "abcabc" assert String.duplicate("&ã$", 2) == "&ã$&ã$" + assert_raise FunctionClauseError, fn -> + String.duplicate("abc", -1) + end end - test :codepoints do + test "codepoints/1" do assert String.codepoints("elixir") == ["e", "l", "i", "x", "i", "r"] assert String.codepoints("elixír") == ["e", "l", "i", "x", "í", "r"] # slovak assert String.codepoints("ոգելից ըմպելիք") == ["ո", "գ", "ե", "լ", "ի", "ց", " ", "ը", "մ", "պ", "ե", "լ", "ի", "ք"] # armenian @@ -235,25 +394,75 @@ defmodule StringTest do ["ϖ", "Ͳ", "ϥ", "Ы", "ݎ", "ߟ", "Έ", "ټ", "Ϙ", "ለ", "Д", "Ш", "व", "׆", "ש", "؇", "؊", "ص", "ل", "ټ", "ܗ", "ݎ", "ޥ", "ޘ", "߉", "ऌ", "૫", "ሏ", "ᶆ", "℆", "ℙ", "ℱ", " ", "⅚", "Ⅷ", "↠", "∈", "⌘", "①", "ffi"] end - test :graphemes do + test "equivalent?/2" do + assert String.equivalent?("", "") + assert String.equivalent?("elixir", "elixir") + assert String.equivalent?("뢴", "뢴") + assert String.equivalent?("ṩ", "ṩ") + refute String.equivalent?("ELIXIR", "elixir") + refute String.equivalent?("døge", "dóge") + end + + test "normalize/2" do + assert String.normalize("ŝ", :nfd) == "ŝ" + assert String.normalize("ḇravô", :nfd) == "ḇravô" + assert String.normalize("ṩierra", :nfd) == "ṩierra" + assert String.normalize("뢴", :nfd) == "뢴" + assert String.normalize("êchǭ", :nfc) == "êchǭ" + assert String.normalize("거̄", :nfc) == "거̄" + assert String.normalize("뢴", :nfc) == "뢴" + + ## Cases from NormalizationTest.txt + + # 05B8 05B9 05B1 0591 05C3 05B0 05AC 059F + # 05B1 05B8 05B9 0591 05C3 05B0 05AC 059F + # HEBREW POINT QAMATS, HEBREW POINT HOLAM, HEBREW POINT HATAF SEGOL, + # HEBREW ACCENT ETNAHTA, HEBREW PUNCTUATION SOF PASUQ, HEBREW POINT SHEVA, + # HEBREW ACCENT ILUY, HEBREW ACCENT QARNEY PARA + assert String.normalize("ֱָֹ֑׃ְ֬֟", :nfc) == "ֱָֹ֑׃ְ֬֟" + + # 095D (exclusion list) + # 0922 093C + # DEVANAGARI LETTER RHA + assert String.normalize("ढ़", :nfc) == "ढ़" + + # 0061 0315 0300 05AE 0340 0062 + # 00E0 05AE 0300 0315 0062 + # LATIN SMALL LETTER A, COMBINING COMMA ABOVE RIGHT, COMBINING GRAVE ACCENT, + # HEBREW ACCENT ZINOR, COMBINING GRAVE TONE MARK, LATIN SMALL LETTER B + assert String.normalize("à֮̀̕b", :nfc) == "à֮̀̕b" + + # 0344 + # 0308 0301 + # COMBINING GREEK DIALYTIKA TONOS + assert String.normalize("\u0344", :nfc) == "\u0308\u0301" + + # 115B9 0334 115AF + # 115B9 0334 115AF + # SIDDHAM VOWEL SIGN AI, COMBINING TILDE OVERLAY, SIDDHAM VOWEL SIGN AA + assert String.normalize("𑖹̴𑖯", :nfc) == "𑖹̴𑖯" + end + + test "graphemes/1" do # Extended assert String.graphemes("Ā̀stute") == ["Ā̀", "s", "t", "u", "t", "e"] # CLRF - assert String.graphemes("\n\r\f") == ["\n\r", "\f"] + assert String.graphemes("\r\n\f") == ["\r\n", "\f"] # Regional indicator - assert String.graphemes("\x{1F1E6}\x{1F1E7}\x{1F1E8}") == ["\x{1F1E6}\x{1F1E7}\x{1F1E8}"] + assert String.graphemes("\u{1F1E6}\u{1F1E7}") == ["\u{1F1E6}\u{1F1E7}"] + assert String.graphemes("\u{1F1E6}\u{1F1E7}\u{1F1E8}") == ["\u{1F1E6}\u{1F1E7}", "\u{1F1E8}"] # Hangul - assert String.graphemes("\x{1100}\x{115D}\x{B4A4}") == ["ᄀᅝ뒤"] + assert String.graphemes("\u1100\u115D\uB4A4") == ["ᄀᅝ뒤"] # Special Marking with Extended - assert String.graphemes("a\x{0300}\x{0903}") == ["a\x{0300}\x{0903}"] + assert String.graphemes("a\u0300\u0903") == ["a\u0300\u0903"] end - test :next_grapheme do + test "next_grapheme/1" do assert String.next_grapheme("Ā̀stute") == {"Ā̀", "stute"} assert String.next_grapheme("") == nil end - test :first do + test "first/1" do assert String.first("elixir") == "e" assert String.first("íelixr") == "í" assert String.first("եոգլից ըմպելիք") == "ե" @@ -265,7 +474,7 @@ defmodule StringTest do assert String.first("") == nil end - test :last do + test "last/1" do assert String.last("elixir") == "r" assert String.last("elixrí") == "í" assert String.last("եոգլից ըմպելիքե") == "ե" @@ -277,7 +486,7 @@ defmodule StringTest do assert String.last("") == nil end - test :length do + test "length/1" do assert String.length("elixir") == 6 assert String.length("elixrí") == 6 assert String.length("եոգլից") == 6 @@ -289,7 +498,7 @@ defmodule StringTest do assert String.length("") == 0 end - test :at do + test "at/2" do assert String.at("л", 0) == "л" assert String.at("elixir", 1) == "l" assert String.at("がガちゃ", 2) == "ち" @@ -299,9 +508,17 @@ defmodule StringTest do assert String.at("л", -3) == nil assert String.at("Ā̀stute", 1) == "s" assert String.at("elixir", 6) == nil + + assert_raise FunctionClauseError, fn -> + String.at("elixir", 0.1) + end + + assert_raise FunctionClauseError, fn -> + String.at("elixir", -0.1) + end end - test :slice do + test "slice/2,3" do assert String.slice("elixir", 1, 3) == "lix" assert String.slice("あいうえお", 2, 2) == "うえ" assert String.slice("ειξήριολ", 2, 3) == "ξήρ" @@ -344,81 +561,54 @@ defmodule StringTest do assert String.slice("", 1..1) == "" assert String.slice("あいうえお", -2..-4) == "" assert String.slice("あいうえお", -10..-15) == "" + assert String.slice("hello あいうえお unicode", 8..-1) == "うえお unicode" + assert String.slice("abc", -1..14) == "c" end - test :valid? do + test "valid?/1" do assert String.valid?("afds") assert String.valid?("øsdfh") assert String.valid?("dskfjあska") + assert String.valid?(<<0xEF, 0xB7, 0x90>>) - refute String.valid?(<<0xffff :: 16>>) - refute String.valid?("asd" <> <<0xffff :: 16>>) - end - - test :valid_character? do - assert String.valid_character?("a") - assert String.valid_character?("ø") - assert String.valid_character?("あ") - - refute String.valid_character?("\x{ffff}") - refute String.valid_character?("ab") + refute String.valid?(<<0xFFFF::16>>) + refute String.valid?("asd" <> <<0xFFFF::16>>) end - test :chunk_valid do + test "chunk/2 with :valid trait" do assert String.chunk("", :valid) == [] - assert String.chunk("ødskfjあ\011ska", :valid) - == ["ødskfjあ\011ska"] - assert String.chunk("abc\x{0ffff}def", :valid) - == ["abc", <<0x0ffff::utf8>>, "def"] - assert String.chunk("\x{0fffe}\x{3ffff}привет\x{0ffff}мир", :valid) - == [<<0x0fffe::utf8, 0x3ffff::utf8>>, "привет", <<0x0ffff::utf8>>, "мир"] - assert String.chunk("日本\x{0ffff}\x{fdef}ござございます\x{fdd0}", :valid) - == ["日本", <<0x0ffff::utf8, 0xfdef::utf8>>, "ござございます", <<0xfdd0::utf8>>] + assert String.chunk("ødskfjあ\x11ska", :valid) + == ["ødskfjあ\x11ska"] end - test :chunk_printable do + test "chunk/2 with :printable trait" do assert String.chunk("", :printable) == [] assert String.chunk("ødskfjあska", :printable) == ["ødskfjあska"] - assert String.chunk("abc\x{0ffff}def", :printable) - == ["abc", <<0x0ffff::utf8>>, "def"] - assert String.chunk("\006ab\005cdef\003\000", :printable) - == [<<06>>, "ab", <<05>>, "cdef", <<03, 0>>] + assert String.chunk("abc\u{0FFFF}def", :printable) + == ["abc", <<0x0FFFF::utf8>>, "def"] + assert String.chunk("\x06ab\x05cdef\x03\0", :printable) + == [<<6>>, "ab", <<5>>, "cdef", <<3, 0>>] end - test :starts_with? do - ## Normal cases ## + test "starts_with?/2" do assert String.starts_with? "hello", "he" assert String.starts_with? "hello", "hello" + refute String.starts_with? "hello", [] assert String.starts_with? "hello", ["hellö", "hell"] assert String.starts_with? "エリクシア", "エリ" refute String.starts_with? "hello", "lo" refute String.starts_with? "hello", "hellö" refute String.starts_with? "hello", ["hellö", "goodbye"] refute String.starts_with? "エリクシア", "仙丹" - - ## Edge cases ## - assert String.starts_with? "", "" - assert String.starts_with? "", ["", "a"] - assert String.starts_with? "b", ["", "a"] - - assert String.starts_with? "abc", "" - assert String.starts_with? "abc", [""] - - refute String.starts_with? "", "abc" - refute String.starts_with? "", [" "] - - ## Sanity checks ## - assert String.starts_with? "", ["", ""] - assert String.starts_with? "abc", ["", ""] end - test :ends_with? do - ## Normal cases ## + test "ends_with?/2" do assert String.ends_with? "hello", "lo" assert String.ends_with? "hello", "hello" + refute String.ends_with? "hello", [] assert String.ends_with? "hello", ["hell", "lo", "xx"] assert String.ends_with? "hello", ["hellö", "lo"] assert String.ends_with? "エリクシア", "シア" @@ -426,57 +616,74 @@ defmodule StringTest do refute String.ends_with? "hello", "hellö" refute String.ends_with? "hello", ["hel", "goodbye"] refute String.ends_with? "エリクシア", "仙丹" - - ## Edge cases ## - assert String.ends_with? "", "" - assert String.ends_with? "", ["", "a"] - refute String.ends_with? "", ["a", "b"] - - assert String.ends_with? "abc", "" - assert String.ends_with? "abc", ["", "x"] - - refute String.ends_with? "", "abc" - refute String.ends_with? "", [" "] - - ## Sanity checks ## - assert String.ends_with? "", ["", ""] - assert String.ends_with? "abc", ["", ""] end - test :contains? do - ## Normal cases ## + test "contains?/2" do assert String.contains? "elixir of life", "of" assert String.contains? "エリクシア", "シ" + refute String.contains? "elixir of life", [] assert String.contains? "elixir of life", ["mercury", "life"] refute String.contains? "elixir of life", "death" refute String.contains? "エリクシア", "仙" refute String.contains? "elixir of life", ["death", "mercury", "eternal life"] - - ## Edge cases ## - assert String.contains? "", "" - assert String.contains? "abc", "" - assert String.contains? "abc", ["", "x"] - - refute String.contains? "", " " - refute String.contains? "", "a" - - ## Sanity checks ## - assert String.contains? "", ["", ""] - assert String.contains? "abc", ["", ""] end - test :to_char_list do - assert String.to_char_list("æß") == [?æ, ?ß] - assert String.to_char_list("abc") == [?a, ?b, ?c] + test "to_charlist/1" do + assert String.to_charlist("æß") == [?æ, ?ß] + assert String.to_charlist("abc") == [?a, ?b, ?c] assert_raise UnicodeConversionError, "invalid encoding starting at <<223, 255>>", fn -> - String.to_char_list(<< 0xDF, 0xFF >>) + String.to_charlist(<< 0xDF, 0xFF >>) end assert_raise UnicodeConversionError, "incomplete encoding starting at <<195>>", fn -> - String.to_char_list(<< 106, 111, 115, 195 >>) + String.to_charlist(<< 106, 111, 115, 195 >>) end end + + test "to_float/1" do + assert String.to_float("3.0") == 3.0 + + three = fn -> "3" end + assert_raise ArgumentError, fn -> String.to_float(three.()) end + end + + test "jaro_distance/2" do + assert String.jaro_distance("same", "same") == 1.0 + assert String.jaro_distance("any", "") == 0.0 + assert String.jaro_distance("", "any") == 0.0 + assert String.jaro_distance("martha", "marhta") == 0.9444444444444445 + assert String.jaro_distance("martha", "marhha") == 0.888888888888889 + assert String.jaro_distance("marhha", "martha") == 0.888888888888889 + assert String.jaro_distance("dwayne", "duane") == 0.8222222222222223 + assert String.jaro_distance("dixon", "dicksonx") == 0.7666666666666666 + assert String.jaro_distance("xdicksonx", "dixon") == 0.7851851851851852 + assert String.jaro_distance("shackleford", "shackelford") == 0.9696969696969697 + assert String.jaro_distance("dunningham", "cunnigham") == 0.8962962962962964 + assert String.jaro_distance("nichleson", "nichulson") == 0.9259259259259259 + assert String.jaro_distance("jones", "johnson") == 0.7904761904761904 + assert String.jaro_distance("massey", "massie") == 0.888888888888889 + assert String.jaro_distance("abroms", "abrams") == 0.888888888888889 + assert String.jaro_distance("hardin", "martinez") == 0.7222222222222222 + assert String.jaro_distance("itman", "smith") == 0.4666666666666666 + assert String.jaro_distance("jeraldine", "geraldine") == 0.9259259259259259 + assert String.jaro_distance("michelle", "michael") == 0.8690476190476191 + assert String.jaro_distance("julies", "julius") == 0.888888888888889 + assert String.jaro_distance("tanya", "tonya") == 0.8666666666666667 + assert String.jaro_distance("sean", "susan") == 0.7833333333333333 + assert String.jaro_distance("jon", "john") == 0.9166666666666666 + assert String.jaro_distance("jon", "jan") == 0.7777777777777777 + assert String.jaro_distance("семена", "стремя") == 0.6666666666666666 + end + + test "myers_difference/2" do + assert String.myers_difference("", "abc") == [ins: "abc"] + assert String.myers_difference("abc", "") == [del: "abc"] + assert String.myers_difference("", "") == [] + assert String.myers_difference("abc", "abc") == [eq: "abc"] + assert String.myers_difference("abc", "aйbc") == [eq: "a", ins: "й", eq: "bc"] + assert String.myers_difference("aйbc", "abc") == [eq: "a", del: "й", eq: "bc"] + end end diff --git a/lib/elixir/test/elixir/supervisor/spec_test.exs b/lib/elixir/test/elixir/supervisor/spec_test.exs index a15234bcef3..6d07d3ecc2b 100644 --- a/lib/elixir/test/elixir/supervisor/spec_test.exs +++ b/lib/elixir/test/elixir/supervisor/spec_test.exs @@ -64,7 +64,7 @@ defmodule Supervisor.SpecTest do test "supervise/2" do assert supervise([], strategy: :one_for_one) == { - :ok, {{:one_for_one, 5, 5}, []} + :ok, {{:one_for_one, 3, 5}, []} } children = [worker(GenEvent, [])] diff --git a/lib/elixir/test/elixir/supervisor_test.exs b/lib/elixir/test/elixir/supervisor_test.exs index eed8887e107..24d13a52ca9 100644 --- a/lib/elixir/test/elixir/supervisor_test.exs +++ b/lib/elixir/test/elixir/supervisor_test.exs @@ -10,23 +10,23 @@ defmodule SupervisorTest do GenServer.start_link(__MODULE__, state, opts) end - def handle_call(:pop, _from, [h|t]) do + def handle_call(:pop, _from, [h | t]) do {:reply, h, t} end def handle_call(:stop, _from, stack) do - # There is a race condition in between genserver terminations. + # There is a race condition between genserver terminations. # So we will explicitly unregister it here. try do - self |> Process.info(:registered_name) |> elem(1) |> Process.unregister + self() |> Process.info(:registered_name) |> elem(1) |> Process.unregister rescue _ -> :ok end {:stop, :normal, :ok, stack} end - def handle_cast({:push, h}, _from, t) do - {:noreply, [h|t]} + def handle_cast({:push, h}, t) do + {:noreply, [h | t]} end end @@ -41,6 +41,21 @@ defmodule SupervisorTest do import Supervisor.Spec + test "start_link/2 with via" do + Supervisor.start_link([], strategy: :one_for_one, name: {:via, :global, :via_sup}) + assert Supervisor.which_children({:via, :global, :via_sup}) == [] + end + + test "start_link/3 with global" do + Supervisor.start_link([], strategy: :one_for_one, name: {:global, :global_sup}) + assert Supervisor.which_children({:global, :global_sup}) == [] + end + + test "start_link/3 with local" do + Supervisor.start_link([], strategy: :one_for_one, name: :my_sup) + assert Supervisor.which_children(:my_sup) == [] + end + test "start_link/2" do children = [worker(Stack, [[:hello], [name: :dyn_stack]])] {:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one) @@ -51,16 +66,26 @@ defmodule SupervisorTest do wait_until_registered(:dyn_stack) assert GenServer.call(:dyn_stack, :pop) == :hello + Supervisor.stop(pid) - Process.exit(pid, :normal) + assert_raise ArgumentError, ~r"expected :name option to be one of:", fn -> + Supervisor.start_link(children, name: "my_gen_server_name", strategy: :one_for_one) + end + + assert_raise ArgumentError, ~r"expected :name option to be one of:", fn -> + Supervisor.start_link(children, name: {:invalid_tuple, "my_gen_server_name"}, strategy: :one_for_one) + end + + assert_raise ArgumentError, ~r"expected :name option to be one of:", fn -> + Supervisor.start_link(children, name: {:via, "Via", "my_gen_server_name"}, strategy: :one_for_one) + end end test "start_link/3" do - {:ok, pid} = Supervisor.start_link(Stack.Sup, {[:hello], [name: :stat_stack]}, name: :stack_sup) - wait_until_registered(:stack_sup) - + {:ok, pid} = Supervisor.start_link(Stack.Sup, {[:hello], [name: :stat_stack]}) + wait_until_registered(:stat_stack) assert GenServer.call(:stat_stack, :pop) == :hello - Process.exit(pid, :normal) + Supervisor.stop(pid) end test "*_child functions" do @@ -86,8 +111,7 @@ defmodule SupervisorTest do assert Supervisor.terminate_child(pid, Stack) == :ok assert Supervisor.delete_child(pid, Stack) == :ok - - Process.exit(pid, :normal) + Supervisor.stop(pid) end defp wait_until_registered(name) do diff --git a/lib/elixir/test/elixir/system_test.exs b/lib/elixir/test/elixir/system_test.exs index 13949d346ec..84cbfb38d6e 100644 --- a/lib/elixir/test/elixir/system_test.exs +++ b/lib/elixir/test/elixir/system_test.exs @@ -4,20 +4,31 @@ defmodule SystemTest do use ExUnit.Case import PathHelpers - test "build_info" do - assert is_map System.build_info - assert not nil?(System.build_info[:version]) - assert not nil?(System.build_info[:tag]) - assert not nil?(System.build_info[:date]) + test "build_info/0" do + build_info = System.build_info + assert is_map build_info + assert is_binary build_info[:build] + assert is_binary build_info[:date] + assert is_binary build_info[:revision] + assert is_binary build_info[:version] + + if build_info[:revision] != "" do + assert String.length(build_info[:revision]) >= 7 + end + + version_file = Path.join([__DIR__, "../../../..", "VERSION"]) |> Path.expand + {:ok, version} = File.read(version_file) + assert build_info[:version] == String.trim(version) + assert build_info[:build] != "" end - test "cwd" do + test "cwd/0" do assert is_binary System.cwd assert is_binary System.cwd! end if :file.native_name_encoding == :utf8 do - test "cwd_with_utf8" do + test "cwd/0 with UTF-8" do File.mkdir_p(tmp_path("héllò")) File.cd!(tmp_path("héllò"), fn -> @@ -28,17 +39,22 @@ defmodule SystemTest do end end - test "user_home" do + test "user_home/0" do assert is_binary System.user_home assert is_binary System.user_home! end - test "tmp_dir" do + test "tmp_dir/0" do assert is_binary System.tmp_dir assert is_binary System.tmp_dir! end - test "argv" do + test "endianness/0" do + assert System.endianness in [:little, :big] + assert System.endianness == System.compiled_endianness + end + + test "argv/0" do list = elixir('-e "IO.inspect System.argv" -- -o opt arg1 arg2 --long-opt 10') {args, _} = Code.eval_string list, [] assert args == ["-o", "opt", "arg1", "arg2", "--long-opt", "10"] @@ -46,7 +62,7 @@ defmodule SystemTest do @test_var "SYSTEM_ELIXIR_ENV_TEST_VAR" - test "env" do + test "*_env/*" do assert System.get_env(@test_var) == nil System.put_env(@test_var, "SAMPLE") assert System.get_env(@test_var) == "SAMPLE" @@ -59,20 +75,128 @@ defmodule SystemTest do assert System.get_env(@test_var) == "OTHER_SAMPLE" end - test "cmd" do - assert is_binary(System.cmd "echo hello") - assert is_list(System.cmd 'echo hello') + if windows?() do + test "cmd/2 win" do + assert {"hello\r\n", 0} = System.cmd "cmd", ~w[/c echo hello] + end + + test "cmd/3 (with options) win" do + assert {["hello\r\n"], 0} = System.cmd "cmd", ~w[/c echo hello], + into: [], cd: System.cwd!, env: %{"foo" => "bar", "baz" => nil}, + arg0: "echo", stderr_to_stdout: true, parallelism: true + end + + @echo "echo-elixir-test" + + test "cmd/2 with absolute and relative paths win" do + echo = tmp_path(@echo) + File.mkdir_p! Path.dirname(echo) + File.cp! System.find_executable("cmd"), echo + + File.cd! Path.dirname(echo), fn -> + # There is a bug in OTP where find_executable is finding + # entries on the current directory. If this is the case, + # we should avoid the assertion below. + unless System.find_executable(@echo) do + assert :enoent = catch_error(System.cmd(@echo, ~w[/c echo hello])) + end + + assert {"hello\r\n", 0} = System.cmd(Path.join(System.cwd!, @echo), ~w[/c echo hello], [{:arg0, "echo"}]) + end + after + File.rm_rf! Path.dirname(tmp_path(@echo)) + end + else + test "cmd/2 Unix" do + assert {"hello\n", 0} = System.cmd "echo", ["hello"] + end + + test "cmd/3 (with options) Unix" do + assert {["hello\n"], 0} = System.cmd "echo", ["hello"], + into: [], cd: System.cwd!, env: %{"foo" => "bar", "baz" => nil}, + arg0: "echo", stderr_to_stdout: true, parallelism: true + end + + @echo "echo-elixir-test" + + test "cmd/2 with absolute and relative paths Unix" do + echo = tmp_path(@echo) + File.mkdir_p! Path.dirname(echo) + File.cp! System.find_executable("echo"), echo + + File.cd! Path.dirname(echo), fn -> + # There is a bug in OTP where find_executable is finding + # entries on the current directory. If this is the case, + # we should avoid the assertion below. + unless System.find_executable(@echo) do + assert :enoent = catch_error(System.cmd(@echo, ["hello"])) + end + + assert {"hello\n", 0} = System.cmd(Path.join(System.cwd!, @echo), ["hello"], [{:arg0, "echo"}]) + end + after + File.rm_rf! tmp_path(@echo) + end end - test "find_executable with binary" do + test "find_executable/1" do assert System.find_executable("erl") assert is_binary System.find_executable("erl") assert !System.find_executable("does-not-really-exist-from-elixir") end - test "find_executable with list" do - assert System.find_executable('erl') - assert is_list System.find_executable('erl') - assert !System.find_executable('does-not-really-exist-from-elixir') + test "monotonic_time/0" do + assert is_integer(System.monotonic_time()) + end + + test "monotonic_time/1" do + assert is_integer(System.monotonic_time(:nanosecond)) + assert abs(System.monotonic_time(:microsecond)) < abs(System.monotonic_time(:nanosecond)) + end + + test "system_time/0" do + assert is_integer(System.system_time()) + end + + test "system_time/1" do + assert is_integer(System.system_time(:nanosecond)) + assert abs(System.system_time(:microsecond)) < abs(System.system_time(:nanosecond)) + end + + test "time_offset/0 and time_offset/1" do + assert is_integer(System.time_offset()) + assert is_integer(System.time_offset(:second)) + end + + test "os_time/0" do + assert is_integer(System.os_time()) + end + + test "os_time/1" do + assert is_integer(System.os_time(:nanosecond)) + assert abs(System.os_time(:microsecond)) < abs(System.os_time(:nanosecond)) + end + + test "unique_integer/0 and unique_integer/1" do + assert is_integer(System.unique_integer()) + assert System.unique_integer([:positive]) > 0 + assert System.unique_integer([:positive, :monotonic]) < System.unique_integer([:positive, :monotonic]) + end + + test "convert_time_unit/3" do + time = System.monotonic_time(:nanosecond) + assert abs(System.convert_time_unit(time, :nanosecond, :microsecond)) < abs(time) + end + + test "schedulers/0" do + assert System.schedulers >= 1 + end + + test "schedulers_online/0" do + assert System.schedulers_online >= 1 + end + + test "otp_release/0" do + assert is_binary System.otp_release end end diff --git a/lib/elixir/test/elixir/task/supervisor_test.exs b/lib/elixir/test/elixir/task/supervisor_test.exs index 154773b9dbf..db29006d672 100644 --- a/lib/elixir/test/elixir/task/supervisor_test.exs +++ b/lib/elixir/test/elixir/task/supervisor_test.exs @@ -1,25 +1,27 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Task.SupervisorTest do - use ExUnit.Case, async: true + use ExUnit.Case + + @moduletag report: [:supervisor] + @moduletag :capture_log setup do {:ok, pid} = Task.Supervisor.start_link() {:ok, supervisor: pid} end - setup do - :error_logger.tty(false) - on_exit fn -> :error_logger.tty(true) end - :ok - end - def wait_and_send(caller, atom) do send caller, :ready receive do: (true -> true) send caller, atom end + def sleep(number) do + Process.sleep(number) + number + end + test "async/1", config do parent = self() fun = fn -> wait_and_send(parent, :done) end @@ -33,7 +35,7 @@ defmodule Task.SupervisorTest do assert is_reference task.ref # Assert the link - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) assert task.pid in links receive do: (:ready -> :ok) @@ -63,13 +65,56 @@ defmodule Task.SupervisorTest do assert Task.await(task) == :done end + test "async_nolink/1", config do + parent = self() + fun = fn -> wait_and_send(parent, :done) end + task = Task.Supervisor.async_nolink(config[:supervisor], fun) + + assert Task.Supervisor.children(config[:supervisor]) == [task.pid] + + # Assert the struct + assert task.__struct__ == Task + assert is_pid task.pid + assert is_reference task.ref + + # Refute the link + {:links, links} = Process.info(self(), :links) + refute task.pid in links + + receive do: (:ready -> :ok) + + # Assert the initial call + {:name, fun_name} = :erlang.fun_info(fun, :name) + assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(task.pid) + + # Run the task + send task.pid, true + + # Assert response and monitoring messages + ref = task.ref + assert_receive {^ref, :done} + assert_receive {:DOWN, ^ref, _, _, :normal} + end + + test "async_nolink/3", config do + task = Task.Supervisor.async_nolink(config[:supervisor], __MODULE__, :wait_and_send, [self(), :done]) + assert Task.Supervisor.children(config[:supervisor]) == [task.pid] + + receive do: (:ready -> :ok) + assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(task.pid) + + send task.pid, true + assert task.__struct__ == Task + assert Task.await(task) == :done + end + test "start_child/1", config do parent = self() fun = fn -> wait_and_send(parent, :done) end {:ok, pid} = Task.Supervisor.start_child(config[:supervisor], fun) assert Task.Supervisor.children(config[:supervisor]) == [pid] - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) refute pid in links receive do: (:ready -> :ok) @@ -84,7 +129,7 @@ defmodule Task.SupervisorTest do {:ok, pid} = Task.Supervisor.start_child(config[:supervisor], __MODULE__, :wait_and_send, [self(), :done]) assert Task.Supervisor.children(config[:supervisor]) == [pid] - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) refute pid in links receive do: (:ready -> :ok) @@ -92,6 +137,14 @@ defmodule Task.SupervisorTest do send pid, true assert_receive :done + + assert_raise FunctionClauseError, fn -> + Task.Supervisor.start_child(config[:supervisor], __MODULE__, :wait_and_send, :illegal_arg) + end + + assert_raise FunctionClauseError, fn -> + Task.Supervisor.start_child(config[:supervisor], __MODULE__, "wait_and_send", [self(), :done]) + end end test "terminate_child/2", config do @@ -102,24 +155,139 @@ defmodule Task.SupervisorTest do assert Task.Supervisor.terminate_child(config[:supervisor], pid) == :ok end - test "await/1 exits on task throw", config do - Process.flag(:trap_exit, true) - task = Task.Supervisor.async(config[:supervisor], fn -> throw :unknown end) - assert {{{:nocatch, :unknown}, _}, {Task, :await, [^task, 5000]}} = - catch_exit(Task.await(task)) + describe "await/1" do + test "exits on task throw", config do + Process.flag(:trap_exit, true) + task = Task.Supervisor.async(config[:supervisor], fn -> throw :unknown end) + assert {{{:nocatch, :unknown}, _}, {Task, :await, [^task, 5000]}} = + catch_exit(Task.await(task)) + end + + test "exits on task error", config do + Process.flag(:trap_exit, true) + task = Task.Supervisor.async(config[:supervisor], fn -> raise "oops" end) + assert {{%RuntimeError{}, _}, {Task, :await, [^task, 5000]}} = + catch_exit(Task.await(task)) + end + + test "exits on task exit", config do + Process.flag(:trap_exit, true) + task = Task.Supervisor.async(config[:supervisor], fn -> exit :unknown end) + assert {:unknown, {Task, :await, [^task, 5000]}} = + catch_exit(Task.await(task)) + end end - test "await/1 exits on task error", config do - Process.flag(:trap_exit, true) - task = Task.Supervisor.async(config[:supervisor], fn -> raise "oops" end) - assert {{%RuntimeError{}, _}, {Task, :await, [^task, 5000]}} = - catch_exit(Task.await(task)) + describe "async_stream" do + @opts [] + test "streams an enumerable with fun", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream(1..4, &sleep/1, @opts) + |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + end + + test "streams an enumerable with mfa", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream(1..4, __MODULE__, :sleep, [], @opts) + |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + end + + test "streams an enumerable without leaking tasks", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream(1..4, &sleep/1, @opts) + |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + refute_received _ + end + + test "streams an enumerable with slowest first", %{supervisor: supervisor} do + Process.flag(:trap_exit, true) + assert supervisor + |> Task.Supervisor.async_stream(4..1, &sleep/1, @opts) + |> Enum.to_list == + [ok: 4, ok: 3, ok: 2, ok: 1] + end + + test "streams an enumerable with exits", %{supervisor: supervisor} do + Process.flag(:trap_exit, true) + assert supervisor + |> Task.Supervisor.async_stream(1..4, &exit/1, @opts) + |> Enum.to_list == + [exit: 1, exit: 2, exit: 3, exit: 4] + end + + test "shuts down unused tasks", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream([0, :infinity, :infinity, :infinity], &sleep/1, @opts) + |> Enum.take(1) == + [ok: 0] + assert Process.info(self(), :links) == {:links, [supervisor]} + end + + test "shuts down unused tasks without leaking messages", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream([0, :infinity, :infinity, :infinity], &sleep/1, @opts) + |> Enum.take(1) == + [ok: 0] + refute_received _ + end end - test "await/1 exits on task exit", config do - Process.flag(:trap_exit, true) - task = Task.Supervisor.async(config[:supervisor], fn -> exit :unknown end) - assert {:unknown, {Task, :await, [^task, 5000]}} = - catch_exit(Task.await(task)) + describe "async_stream_nolink" do + @opts [max_concurrency: 4] + + test "streams an enumerable with fun", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream_nolink(1..4, &sleep/1, @opts) + |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + end + + test "streams an enumerable with mfa", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream_nolink(1..4, __MODULE__, :sleep, [], @opts) + |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + end + + test "streams an enumerable without leaking tasks", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream_nolink(1..4, &sleep/1, @opts) + |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + refute_received _ + end + + test "streams an enumerable with slowest first", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream_nolink(4..1, &sleep/1, @opts) + |> Enum.to_list == + [ok: 4, ok: 3, ok: 2, ok: 1] + end + + test "streams an enumerable with exits", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream_nolink(1..4, &exit/1, @opts) + |> Enum.to_list == + [exit: 1, exit: 2, exit: 3, exit: 4] + end + + test "shuts down unused tasks", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream_nolink([0, :infinity, :infinity, :infinity], &sleep/1, @opts) + |> Enum.take(1) == + [ok: 0] + assert Process.info(self(), :links) == {:links, [supervisor]} + end + + test "shuts down unused tasks without leaking messages", %{supervisor: supervisor} do + assert supervisor + |> Task.Supervisor.async_stream_nolink([0, :infinity, :infinity, :infinity], &sleep/1, @opts) + |> Enum.take(1) == + [ok: 0] + refute_received _ + end end end diff --git a/lib/elixir/test/elixir/task_test.exs b/lib/elixir/test/elixir/task_test.exs index a75d80228b7..f68c3ccc48e 100644 --- a/lib/elixir/test/elixir/task_test.exs +++ b/lib/elixir/test/elixir/task_test.exs @@ -1,13 +1,9 @@ Code.require_file "test_helper.exs", __DIR__ defmodule TaskTest do - use ExUnit.Case, async: true - - setup do - :error_logger.tty(false) - on_exit fn -> :error_logger.tty(true) end - :ok - end + use ExUnit.Case + doctest Task + @moduletag :capture_log def wait_and_send(caller, atom) do send caller, :ready @@ -15,6 +11,25 @@ defmodule TaskTest do send caller, atom end + defp create_task_in_other_process do + caller = self() + spawn fn -> send caller, Task.async(fn -> nil end) end + receive do: (task -> task) + end + + defp create_dummy_task(reason) do + {pid, ref} = spawn_monitor(Kernel, :exit, [reason]) + receive do + {:DOWN, ^ref, _, _, _} -> + %Task{ref: ref, pid: pid, owner: self()} + end + end + + def sleep(number) do + Process.sleep(number) + number + end + test "async/1" do parent = self() fun = fn -> wait_and_send(parent, :done) end @@ -26,7 +41,7 @@ defmodule TaskTest do assert is_reference task.ref # Assert the link - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) assert task.pid in links receive do: (:ready -> :ok) @@ -48,7 +63,7 @@ defmodule TaskTest do task = Task.async(__MODULE__, :wait_and_send, [self(), :done]) assert task.__struct__ == Task - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) assert task.pid in links receive do: (:ready -> :ok) @@ -61,12 +76,43 @@ defmodule TaskTest do assert_receive :done end + test "start/1" do + parent = self() + fun = fn -> wait_and_send(parent, :done) end + {:ok, pid} = Task.start(fun) + + {:links, links} = Process.info(self(), :links) + refute pid in links + + receive do: (:ready -> :ok) + + {:name, fun_name} = :erlang.fun_info(fun, :name) + assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(pid) + + send pid, true + assert_receive :done + end + + test "start/3" do + {:ok, pid} = Task.start(__MODULE__, :wait_and_send, [self(), :done]) + + {:links, links} = Process.info(self(), :links) + refute pid in links + + receive do: (:ready -> :ok) + + assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(pid) + + send pid, true + assert_receive :done + end + test "start_link/1" do parent = self() fun = fn -> wait_and_send(parent, :done) end {:ok, pid} = Task.start_link(fun) - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) assert pid in links receive do: (:ready -> :ok) @@ -81,7 +127,7 @@ defmodule TaskTest do test "start_link/3" do {:ok, pid} = Task.start_link(__MODULE__, :wait_and_send, [self(), :done]) - {:links, links} = Process.info(self, :links) + {:links, links} = Process.info(self(), :links) assert pid in links receive do: (:ready -> :ok) @@ -92,53 +138,457 @@ defmodule TaskTest do assert_receive :done end - test "await/1 exits on timeout" do - task = %Task{ref: make_ref()} - assert catch_exit(Task.await(task, 0)) == {:timeout, {Task, :await, [task, 0]}} + describe "await/2" do + test "exits on timeout" do + task = %Task{ref: make_ref(), owner: self()} + assert catch_exit(Task.await(task, 0)) == {:timeout, {Task, :await, [task, 0]}} + end + + test "exits on normal exit" do + task = Task.async(fn -> exit :normal end) + assert catch_exit(Task.await(task)) == {:normal, {Task, :await, [task, 5000]}} + end + + test "exits on task throw" do + Process.flag(:trap_exit, true) + task = Task.async(fn -> throw :unknown end) + assert {{{:nocatch, :unknown}, _}, {Task, :await, [^task, 5000]}} = + catch_exit(Task.await(task)) + end + + test "exits on task error" do + Process.flag(:trap_exit, true) + task = Task.async(fn -> raise "oops" end) + assert {{%RuntimeError{}, _}, {Task, :await, [^task, 5000]}} = + catch_exit(Task.await(task)) + end + + test "exits on task undef module error" do + Process.flag(:trap_exit, true) + task = Task.async(&:module_does_not_exist.undef/0) + assert {{:undef, [{:module_does_not_exist, :undef, _, _} | _]}, + {Task, :await, [^task, 5000]}} = + catch_exit(Task.await(task)) + end + + test "exits on task undef function error" do + Process.flag(:trap_exit, true) + task = Task.async(&TaskTest.undef/0) + assert {{:undef, [{TaskTest, :undef, _, _} | _]}, + {Task, :await, [^task, 5000]}} = + catch_exit(Task.await(task)) + end + + test "exits on task exit" do + Process.flag(:trap_exit, true) + task = Task.async(fn -> exit :unknown end) + assert {:unknown, {Task, :await, [^task, 5000]}} = + catch_exit(Task.await(task)) + end + + test "exits on :noconnection" do + ref = make_ref() + task = %Task{ref: ref, pid: self(), owner: self()} + send self(), {:DOWN, ref, :process, self(), :noconnection} + assert catch_exit(Task.await(task)) |> elem(0) == {:nodedown, :nonode@nohost} + end + + test "exits on :noconnection from named monitor" do + ref = make_ref() + task = %Task{ref: ref, pid: nil, owner: self()} + send self(), {:DOWN, ref, :process, {:name, :node}, :noconnection} + assert catch_exit(Task.await(task)) |> elem(0) == {:nodedown, :node} + end + + test "raises when invoked from a non-owner process" do + task = create_task_in_other_process() + message = "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}" + assert_raise ArgumentError, message, fn -> Task.await(task, 1) end + end end - test "await/1 exits on normal exit" do - task = Task.async(fn -> exit :normal end) - assert catch_exit(Task.await(task)) == {:normal, {Task, :await, [task, 5000]}} + describe "yield/2" do + test "returns {:ok, result} when reply and :DOWN in message queue" do + task = %Task{ref: make_ref(), owner: self()} + send(self(), {task.ref, :result}) + send(self(), {:DOWN, task.ref, :process, self(), :abnormal}) + assert Task.yield(task, 0) == {:ok, :result} + refute_received {:DOWN, _, _, _, _} + end + + test "returns nil on timeout" do + task = %Task{ref: make_ref(), owner: self()} + assert Task.yield(task, 0) == nil + end + + test "return exit on normal exit" do + task = Task.async(fn -> exit :normal end) + assert Task.yield(task) == {:exit, :normal} + end + + test "exits on :noconnection" do + ref = make_ref() + task = %Task{ref: ref, pid: self(), owner: self()} + send self(), {:DOWN, ref, self(), self(), :noconnection} + assert catch_exit(Task.yield(task)) |> elem(0) == {:nodedown, :nonode@nohost} + end + + test "raises when invoked from a non-owner process" do + task = create_task_in_other_process() + message = "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}" + assert_raise ArgumentError, message, fn -> Task.yield(task, 1) end + end end - test "await/1 exits on task throw" do - Process.flag(:trap_exit, true) - task = Task.async(fn -> throw :unknown end) - assert {{{:nocatch, :unknown}, _}, {Task, :await, [^task, 5000]}} = - catch_exit(Task.await(task)) + describe "yield_many/2" do + test "returns {:ok, result} when reply and :DOWN in message queue" do + task = %Task{ref: make_ref(), owner: self()} + send(self(), {task.ref, :result}) + send(self(), {:DOWN, task.ref, :process, self(), :abnormal}) + assert Task.yield_many([task], 0) == [{task, {:ok, :result}}] + refute_received {:DOWN, _, _, _, _} + end + + test "returns nil on timeout" do + task = %Task{ref: make_ref(), owner: self()} + assert Task.yield_many([task], 0) == [{task, nil}] + end + + test "return exit on normal exit" do + task = Task.async(fn -> exit :normal end) + assert Task.yield_many([task]) == [{task, {:exit, :normal}}] + end + + test "exits on :noconnection" do + ref = make_ref() + task = %Task{ref: ref, pid: self(), owner: self()} + send self(), {:DOWN, ref, :process, self(), :noconnection} + assert catch_exit(Task.yield_many([task])) |> elem(0) == {:nodedown, :nonode@nohost} + end + + test "raises when invoked from a non-owner process" do + task = create_task_in_other_process() + message = "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}" + assert_raise ArgumentError, message, fn -> Task.yield_many([task], 1) end + end + + test "returns results from multiple tasks" do + task1 = %Task{ref: make_ref(), owner: self()} + task2 = %Task{ref: make_ref(), owner: self()} + task3 = %Task{ref: make_ref(), owner: self()} + + send(self(), {task1.ref, :result}) + send(self(), {:DOWN, task3.ref, :process, self(), :normal}) + + assert Task.yield_many([task1, task2, task3], 0) == + [{task1, {:ok, :result}}, {task2, nil}, {task3, {:exit, :normal}}] + end end - test "await/1 exits on task error" do - Process.flag(:trap_exit, true) - task = Task.async(fn -> raise "oops" end) - assert {{%RuntimeError{}, _}, {Task, :await, [^task, 5000]}} = - catch_exit(Task.await(task)) + describe "shutdown/2" do + test "returns {:ok, result} when reply and abnormal :DOWN in message queue" do + task = create_dummy_task(:abnormal) + send(self(), {task.ref, :result}) + send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal}) + assert Task.shutdown(task) == {:ok, :result} + refute_received {:DOWN, _, _, _, _} + end + + test "returns {:ok, result} when reply and normal :DOWN in message queue" do + task = create_dummy_task(:normal) + send(self(), {task.ref, :result}) + send(self(), {:DOWN, task.ref, :process, task.pid, :normal}) + assert Task.shutdown(task) == {:ok, :result} + refute_received {:DOWN, _, _, _, _} + end + + test "returns {:ok, result} when reply and shutdown :DOWN in message queue" do + task = create_dummy_task(:shutdown) + send(self(), {task.ref, :result}) + send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown}) + assert Task.shutdown(task) == {:ok, :result} + refute_received {:DOWN, _, _, _, _} + end + + test "returns nil on shutting down task" do + task = Task.async(:timer, :sleep, [:infinity]) + assert Task.shutdown(task) == nil + end + + test "returns exit on abnormal :DOWN in message queue" do + task = create_dummy_task(:abnormal) + send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal}) + assert Task.shutdown(task) == {:exit, :abnormal} + end + + test "returns exit on normal :DOWN in message queue" do + task = create_dummy_task(:normal) + send(self(), {:DOWN, task.ref, :process, task.pid, :normal}) + assert Task.shutdown(task) == {:exit, :normal} + end + + test "returns nil on shutdown :DOWN in message queue" do + task = create_dummy_task(:shutdown) + send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown}) + assert Task.shutdown(task) == nil + end + + test "returns exit on killed :DOWN in message queue" do + task = create_dummy_task(:killed) + send(self(), {:DOWN, task.ref, :process, task.pid, :killed}) + assert Task.shutdown(task) == {:exit, :killed} + end + + test "exits on noconnection :DOWN in message queue" do + task = create_dummy_task(:noconnection) + send(self(), {:DOWN, task.ref, :process, task.pid, :noconnection}) + assert catch_exit(Task.shutdown(task)) == + {{:nodedown, node()}, {Task, :shutdown, [task, 5000]}} + end + + test "raises if task PID is nil" do + task = %Task{ref: make_ref(), pid: nil} + assert_raise ArgumentError, "task #{inspect task} does not have an associated task process", + fn -> Task.shutdown(task) end + end + + test "raises when invoked from a non-owner process" do + task = create_task_in_other_process() + message = "task #{inspect task} must be queried from the owner but was queried from #{inspect self()}" + assert_raise ArgumentError, message, fn -> Task.shutdown(task) end + end + + test "returns nil on killing task" do + caller = self() + + task = Task.async(fn() -> + Process.flag(:trap_exit, true) + wait_and_send(caller, :ready) + Process.sleep(:infinity) + end) + + receive do: (:ready -> :ok) + + assert Task.shutdown(task, :brutal_kill) == nil + refute_received {:DOWN, _, _, _, _} + end + + test "returns {:exit, :noproc} if task handled" do + task = create_dummy_task(:noproc) + assert Task.shutdown(task) == {:exit, :noproc} + end end - test "await/1 exits on task exit" do - Process.flag(:trap_exit, true) - task = Task.async(fn -> exit :unknown end) - assert {:unknown, {Task, :await, [^task, 5000]}} = - catch_exit(Task.await(task)) + describe "shutdown/2 with :brutal_kill" do + test "returns {:ok, result} when reply and abnormal :DOWN in message queue" do + task = create_dummy_task(:abnormal) + send(self(), {task.ref, :result}) + send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal}) + assert Task.shutdown(task, :brutal_kill) == {:ok, :result} + refute_received {:DOWN, _, _, _, _} + end + + test "returns {:ok, result} when reply and normal :DOWN in message queue" do + task = create_dummy_task(:normal) + send(self(), {task.ref, :result}) + send(self(), {:DOWN, task.ref, :process, task.pid, :normal}) + assert Task.shutdown(task, :brutal_kill) == {:ok, :result} + refute_received {:DOWN, _, _, _, _} + end + + test "returns {:ok, result} when reply and shutdown :DOWN in message queue" do + task = create_dummy_task(:shutdown) + send(self(), {task.ref, :result}) + send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown}) + assert Task.shutdown(task, :brutal_kill) == {:ok, :result} + refute_received {:DOWN, _, _, _, _} + end + + test "returns nil on killed :DOWN in message queue" do + task = create_dummy_task(:killed) + send(self(), {:DOWN, task.ref, :process, task.pid, :killed}) + assert Task.shutdown(task, :brutal_kill) == nil + end + + test "returns exit on abnormal :DOWN in message queue" do + task = create_dummy_task(:abnormal) + send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal}) + assert Task.shutdown(task, :brutal_kill) == {:exit, :abnormal} + end + + test "returns exit on normal :DOWN in message queue" do + task = create_dummy_task(:normal) + send(self(), {:DOWN, task.ref, :process, task.pid, :normal}) + assert Task.shutdown(task, :brutal_kill) == {:exit, :normal} + end + + test "returns exit on shutdown :DOWN in message queue" do + task = create_dummy_task(:shutdown) + send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown}) + assert Task.shutdown(task, :brutal_kill) == {:exit, :shutdown} + end + + test "exits on noconnection :DOWN in message queue" do + task = create_dummy_task(:noconnection) + send(self(), {:DOWN, task.ref, :process, task.pid, :noconnection}) + assert catch_exit(Task.shutdown(task, :brutal_kill)) == + {{:nodedown, node()}, {Task, :shutdown, [task, :brutal_kill]}} + end + + test "returns exit on killing task after shutdown timeout" do + caller = self() + + task = Task.async(fn() -> + Process.flag(:trap_exit, true) + wait_and_send(caller, :ready) + Process.sleep(:infinity) + end) + + receive do: (:ready -> :ok) + assert Task.shutdown(task, 1) == {:exit, :killed} + end + + test "returns {:exit, :noproc} if task handled" do + task = create_dummy_task(:noproc) + assert Task.shutdown(task, :brutal_kill) == {:exit, :noproc} + end end - test "await/1 exits on :noconnection" do - ref = make_ref() - task = %Task{ref: ref, pid: self()} - send self(), {:DOWN, ref, self(), self(), :noconnection} - assert catch_exit(Task.await(task)) |> elem(0) == {:nodedown, :nonode@nohost} + describe "async_stream/2" do + test "timeout" do + assert catch_exit([:infinity] |> Task.async_stream(&sleep/1, [timeout: 0]) |> Enum.to_list) == + {:timeout, {Task.Supervised, :stream, [0]}} + end end - test "find/2" do - task = %Task{ref: make_ref} - assert Task.find([task], {make_ref, :ok}) == nil - assert Task.find([task], {task.ref, :ok}) == {:ok, task} - - assert Task.find([task], {:DOWN, make_ref, :process, self, :kill}) == nil - msg = {:DOWN, task.ref, :process, self, :kill} - assert catch_exit(Task.find([task], msg)) == - {:kill, {Task, :find, [[task], msg]}} + for {desc, concurrency} <- ["==": 4, "<": 2, ">": 8] do + describe "async_stream with max_concurrency #{desc} tasks" do + @opts [max_concurrency: concurrency] + + test "streams an enumerable with fun" do + assert 1..4 |> Task.async_stream(&sleep/1, @opts) |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + end + + test "streams an enumerable with mfa" do + assert 1..4 |> Task.async_stream(__MODULE__, :sleep, [], @opts) |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + end + + test "streams an enumerable without leaking tasks" do + assert 1..4 |> Task.async_stream(&sleep/1, @opts) |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + refute_received _ + end + + test "streams an enumerable with slowest first" do + Process.flag(:trap_exit, true) + assert 4..1 |> Task.async_stream(&sleep/1, @opts) |> Enum.to_list == + [ok: 4, ok: 3, ok: 2, ok: 1] + end + + test "streams an enumerable with exits" do + Process.flag(:trap_exit, true) + assert 1..4 |> Task.async_stream(&exit/1, @opts) |> Enum.to_list == + [exit: 1, exit: 2, exit: 3, exit: 4] + refute_received {:EXIT, _, _} + end + + test "shuts down unused tasks" do + assert [0, :infinity, :infinity, :infinity] |> Task.async_stream(&sleep/1, @opts) |> Enum.take(1) == + [ok: 0] + assert Process.info(self(), :links) == {:links, []} + end + + test "shuts down unused tasks without leaking messages" do + assert [0, :infinity, :infinity, :infinity] |> Task.async_stream(&sleep/1, @opts) |> Enum.take(1) == + [ok: 0] + refute_received _ + end + + test "is zippable on success" do + task = 1..4 |> Task.async_stream(&sleep/1, @opts) |> Stream.map(&elem(&1, 1)) + assert Enum.zip(task, task) == + [{1, 1}, {2, 2}, {3, 3}, {4, 4}] + end + + test "is zippable on failure" do + Process.flag(:trap_exit, true) + task = 1..4 |> Task.async_stream(&exit/1, @opts) |> Stream.map(&elem(&1, 1)) + assert Enum.zip(task, task) == + [{1, 1}, {2, 2}, {3, 3}, {4, 4}] + end + + test "is zippable with slowest first" do + task = 4..1 |> Task.async_stream(&sleep/1, @opts) |> Stream.map(&elem(&1, 1)) + assert Enum.zip(task, task) == + [{4, 4}, {3, 3}, {2, 2}, {1, 1}] + end + + test "with inner halt on success" do + assert 1..8 |> Stream.take(4) |> Task.async_stream(&sleep/1, @opts) |> Enum.to_list == + [ok: 1, ok: 2, ok: 3, ok: 4] + end + + test "with inner halt on failure" do + Process.flag(:trap_exit, true) + assert 1..8 |> Stream.take(4) |> Task.async_stream(&exit/1, @opts) |> Enum.to_list == + [exit: 1, exit: 2, exit: 3, exit: 4] + end + + test "with inner halt and slowest first" do + assert 8..1 |> Stream.take(4) |> Task.async_stream(&sleep/1, @opts) |> Enum.to_list == + [ok: 8, ok: 7, ok: 6, ok: 5] + end + + test "with outer halt on success" do + assert 1..8 |> Task.async_stream(&sleep/1, @opts) |> Enum.take(4) == + [ok: 1, ok: 2, ok: 3, ok: 4] + end + + test "with outer halt on failure" do + Process.flag(:trap_exit, true) + assert 1..8 |> Task.async_stream(&exit/1, @opts) |> Enum.take(4) == + [exit: 1, exit: 2, exit: 3, exit: 4] + end + + test "with outer halt and slowest first" do + assert 8..1 |> Task.async_stream(&sleep/1, @opts) |> Enum.take(4) == + [ok: 8, ok: 7, ok: 6, ok: 5] + end + + test "terminates inner effect" do + stream = + 1..4 + |> Task.async_stream(&sleep/1, @opts) + |> Stream.transform(fn -> :ok end, + fn x, acc -> {[x], acc} end, + fn _ -> Process.put(:stream_transform, true) end) + + Process.put(:stream_transform, false) + assert Enum.to_list(stream) == [ok: 1, ok: 2, ok: 3, ok: 4] + assert Process.get(:stream_transform) + end + + test "terminates outer effect" do + stream = + 1..4 + |> Stream.transform(fn -> :ok end, + fn x, acc -> {[x], acc} end, + fn _ -> Process.put(:stream_transform, true) end) + |> Task.async_stream(&sleep/1, @opts) + + Process.put(:stream_transform, false) + assert Enum.to_list(stream) == [ok: 1, ok: 2, ok: 3, ok: 4] + assert Process.get(:stream_transform) + end + + test "with :on_timeout set to :kill_task" do + opts = Keyword.merge(@opts, on_timeout: :kill_task, timeout: 50) + assert [100, 1, 100, 1] |> Task.async_stream(&sleep/1, opts) |> Enum.to_list() == + [exit: :timeout, ok: 1, exit: :timeout, ok: 1] + refute_received _ + end + end end - end diff --git a/lib/elixir/test/elixir/test_helper.exs b/lib/elixir/test/elixir/test_helper.exs index 6edf43dd100..21b60cdf5e3 100644 --- a/lib/elixir/test/elixir/test_helper.exs +++ b/lib/elixir/test/elixir/test_helper.exs @@ -1,4 +1,5 @@ -ExUnit.start [trace: "--trace" in System.argv] +ExUnit.start [trace: "--trace" in System.argv, + assert_receive_timeout: 500] # Beam files compiled on demand path = Path.expand("../../tmp/beams", __DIR__) @@ -18,15 +19,15 @@ defmodule PathHelpers do end def fixture_path(extra) do - Path.join(fixture_path, extra) + Path.join(fixture_path(), extra) end def tmp_path(extra) do - Path.join(tmp_path, extra) + Path.join(tmp_path(), extra) end def elixir(args) do - runcmd(elixir_executable, args) + runcmd(elixir_executable(), args) end def elixir_executable do @@ -34,7 +35,7 @@ defmodule PathHelpers do end def elixirc(args) do - runcmd(elixirc_executable, args) + runcmd(elixirc_executable(), args) end def elixirc_executable do @@ -48,58 +49,21 @@ defmodule PathHelpers do res end - defp runcmd(executable,args) do - :os.cmd :binary.bin_to_list("#{executable} #{IO.chardata_to_string(args)}#{redirect_std_err_on_win}") + defp runcmd(executable, args) do + :os.cmd :binary.bin_to_list("#{executable} #{IO.chardata_to_string(args)}#{redirect_std_err_on_win()}") end defp executable_path(name) do - Path.expand("../../../../bin/#{name}#{executable_extension}", __DIR__) + Path.expand("../../../../bin/#{name}#{executable_extension()}", __DIR__) end if match? {:win32, _}, :os.type do - def is_win?, do: true + def windows?, do: true def executable_extension, do: ".bat" def redirect_std_err_on_win, do: " 2>&1" else - def is_win?, do: false + def windows?, do: false def executable_extension, do: "" def redirect_std_err_on_win, do: "" end end - -defmodule CompileAssertion do - import ExUnit.Assertions - - def assert_compile_fail(exception, string) do - case format_rescue(string) do - {^exception, _} -> :ok - error -> - raise ExUnit.AssertionError, - left: inspect(elem(error, 0)), - right: inspect(exception), - message: "Expected match" - end - end - - def assert_compile_fail(exception, message, string) do - case format_rescue(string) do - {^exception, ^message} -> :ok - error -> - raise ExUnit.AssertionError, - left: "#{inspect elem(error, 0)}[message: #{inspect elem(error, 1)}]", - right: "#{inspect exception}[message: #{inspect message}]", - message: "Expected match" - end - end - - defp format_rescue(expr) do - result = try do - :elixir.eval(to_char_list(expr), []) - nil - rescue - error -> {error.__struct__, Exception.message(error)} - end - - result || flunk(message: "Expected expression to fail") - end -end diff --git a/lib/elixir/test/elixir/tuple_test.exs b/lib/elixir/test/elixir/tuple_test.exs index 54af5e603b6..92f3cf92cf0 100644 --- a/lib/elixir/test/elixir/tuple_test.exs +++ b/lib/elixir/test/elixir/tuple_test.exs @@ -3,33 +3,37 @@ Code.require_file "test_helper.exs", __DIR__ defmodule TupleTest do use ExUnit.Case, async: true - test :elem do + doctest Tuple + + # Tuple-related functions in the Kernel module. + + test "Kernel.elem/2" do assert elem({:a, :b, :c}, 1) == :b end - test :put_elem do + test "Kernel.put_elem/3" do assert put_elem({:a, :b, :c}, 1, :d) == {:a, :d, :c} end - test :keywords do + test "keyword syntax is supported in tuple literals" do assert {1, 2, three: :four} == {1, 2, [three: :four]} end - test :optional_comma do + test "optional comma is supported in tuple literals" do assert {1} == {1,} assert {1, 2, 3} == {1, 2, 3,} end - test :partial_application do + test "partial application" do assert (&{&1, 2}).(1) == {1, 2} assert (&{&1, &2}).(1, 2) == {1, 2} assert (&{&2, &1}).(2, 1) == {1, 2} end # Tuple module - # We check two variants due to inlining. + # We check two variants of each function due to inlining. - test :duplicate do + test "duplicate/2" do assert Tuple.duplicate(:foo, 0) == {} assert Tuple.duplicate(:foo, 3) == {:foo, :foo, :foo} @@ -38,17 +42,24 @@ defmodule TupleTest do assert mod.duplicate(:foo, 3) == {:foo, :foo, :foo} end - test :insert_at do + test "insert_at/3" do assert Tuple.insert_at({:bar, :baz}, 0, :foo) == {:foo, :bar, :baz} mod = Tuple assert mod.insert_at({:bar, :baz}, 0, :foo) == {:foo, :bar, :baz} end - test :delete_at do + test "append/2" do + assert Tuple.append({:foo, :bar}, :baz) == {:foo, :bar, :baz} + + mod = Tuple + assert mod.append({:foo, :bar}, :baz) == {:foo, :bar, :baz} + end + + test "delete_at/2" do assert Tuple.delete_at({:foo, :bar, :baz}, 0) == {:bar, :baz} mod = Tuple assert mod.delete_at({:foo, :bar, :baz}, 0) == {:bar, :baz} end -end \ No newline at end of file +end diff --git a/lib/elixir/test/elixir/uri_test.exs b/lib/elixir/test/elixir/uri_test.exs index 76126d74766..24fe985d42e 100644 --- a/lib/elixir/test/elixir/uri_test.exs +++ b/lib/elixir/test/elixir/uri_test.exs @@ -3,19 +3,21 @@ Code.require_file "test_helper.exs", __DIR__ defmodule URITest do use ExUnit.Case, async: true - test :encode do + doctest URI + + test "encode/1,2" do assert URI.encode("4_test.is-s~") == "4_test.is-s~" assert URI.encode("\r\n&<%>\" ゆ", &URI.char_unreserved?/1) == "%0D%0A%26%3C%25%3E%22%20%E3%82%86" end - test :encode_www_form do + test "encode_www_form/1" do assert URI.encode_www_form("4test ~1.x") == "4test+~1.x" assert URI.encode_www_form("poll:146%") == "poll%3A146%25" assert URI.encode_www_form("/\n+/ゆ") == "%2F%0A%2B%2F%E3%82%86" end - test :encode_query do + test "encode_query/1" do assert URI.encode_query([{:foo, :bar}, {:baz, :quux}]) == "foo=bar&baz=quux" assert URI.encode_query([{"foo", "bar"}, {"baz", "quux"}]) == "foo=bar&baz=quux" assert URI.encode_query([{"foo z", :bar}]) == "foo+z=bar" @@ -23,12 +25,18 @@ defmodule URITest do assert_raise ArgumentError, fn -> URI.encode_query([{"foo", 'bar'}]) end + + assert_raise ArgumentError, fn -> + URI.encode_query([{'foo', "bar"}]) + end end - test :decode_query do - assert URI.decode_query("", []) == [] + test "decode_query/1,2" do assert URI.decode_query("", %{}) == %{} + assert URI.decode_query("safe=off", %{"cookie" => "foo"}) == + %{"safe" => "off", "cookie" => "foo"} + assert URI.decode_query("q=search%20query&cookie=ab%26cd&block+buster=") == %{"block buster" => "", "cookie" => "ab&cd", "q" => "search query"} @@ -43,13 +51,13 @@ defmodule URITest do %{"something" => "weird=happening"} end - test :decoder do + test "query_decoder/1" do decoder = URI.query_decoder("q=search%20query&cookie=ab%26cd&block%20buster=") expected = [{"q", "search query"}, {"cookie", "ab&cd"}, {"block buster", ""}] assert Enum.map(decoder, &(&1)) == expected end - test :decode do + test "decode/1" do assert URI.decode("%0D%0A%26%3C%25%3E%22%20%E3%82%86") == "\r\n&<%>\" ゆ" assert URI.decode("%2f%41%4a%55") == "/AJU" assert URI.decode("4_t+st.is-s~") == "4_t+st.is-s~" @@ -62,135 +70,153 @@ defmodule URITest do end end - test :decode_www_form do + test "decode_www_form/1" do assert URI.decode_www_form("%3Eval+ue%2B") == ">val ue+" assert URI.decode_www_form("%E3%82%86+") == "ゆ " - end - test :parse_uri do - assert URI.parse(uri = %URI{scheme: "http", host: "foo.com"}) == uri + assert_raise ArgumentError, fn -> + URI.decode_www_form("%ZZ") + end end - test :parse_http do - assert %URI{scheme: "http", host: "foo.com", path: "/path/to/something", - query: "foo=bar&bar=foo", fragment: "fragment", port: 80, - authority: "foo.com", userinfo: nil} == - URI.parse("http://foo.com/path/to/something?foo=bar&bar=foo#fragment") - end + describe "parse/1" do + test "returns the given URI if a %URI{} struct is given" do + assert URI.parse(uri = %URI{scheme: "http", host: "foo.com"}) == uri + end - test :parse_https do - assert %URI{scheme: "https", host: "foo.com", authority: "foo.com", - query: nil, fragment: nil, port: 443, path: nil, userinfo: nil} == - URI.parse("https://foo.com/") - end + test "works with HTTP scheme" do + assert %URI{scheme: "http", host: "foo.com", path: "/path/to/something", + query: "foo=bar&bar=foo", fragment: "fragment", port: 80, + authority: "foo.com", userinfo: nil} == + URI.parse("http://foo.com/path/to/something?foo=bar&bar=foo#fragment") + end - test :parse_file do - assert %URI{scheme: "file", host: nil, path: "/foo/bar/baz", userinfo: nil, - query: nil, fragment: nil, port: nil, authority: nil} == - URI.parse("file:///foo/bar/baz") - end + test "works with HTTPS scheme" do + assert %URI{scheme: "https", host: "foo.com", authority: "foo.com", + query: nil, fragment: nil, port: 443, path: nil, userinfo: nil} == + URI.parse("https://foo.com/") + end - test :parse_ftp do - assert %URI{scheme: "ftp", host: "private.ftp-servers.example.com", - userinfo: "user001:secretpassword", authority: "user001:secretpassword@private.ftp-servers.example.com", - path: "/mydirectory/myfile.txt", query: nil, fragment: nil, - port: 21} == - URI.parse("ftp://user001:secretpassword@private.ftp-servers.example.com/mydirectory/myfile.txt") - end + test "works with \"file\" scheme" do + assert %URI{scheme: "file", host: nil, path: "/foo/bar/baz", userinfo: nil, + query: nil, fragment: nil, port: nil, authority: nil} == + URI.parse("file:///foo/bar/baz") + end - test :parse_sftp do - assert %URI{scheme: "sftp", host: "private.ftp-servers.example.com", - userinfo: "user001:secretpassword", authority: "user001:secretpassword@private.ftp-servers.example.com", - path: "/mydirectory/myfile.txt", query: nil, fragment: nil, port: 22} == - URI.parse("sftp://user001:secretpassword@private.ftp-servers.example.com/mydirectory/myfile.txt") - end + test "works with FTP scheme" do + assert %URI{scheme: "ftp", host: "private.ftp-server.example.com", + userinfo: "user001:password", authority: "user001:password@private.ftp-server.example.com", + path: "/my_directory/my_file.txt", query: nil, fragment: nil, + port: 21} == + URI.parse("ftp://user001:password@private.ftp-server.example.com/my_directory/my_file.txt") + end - test :parse_tftp do - assert %URI{scheme: "tftp", host: "private.ftp-servers.example.com", - userinfo: "user001:secretpassword", authority: "user001:secretpassword@private.ftp-servers.example.com", - path: "/mydirectory/myfile.txt", query: nil, fragment: nil, port: 69} == - URI.parse("tftp://user001:secretpassword@private.ftp-servers.example.com/mydirectory/myfile.txt") - end + test "works with SFTP scheme" do + assert %URI{scheme: "sftp", host: "private.ftp-server.example.com", + userinfo: "user001:password", authority: "user001:password@private.ftp-server.example.com", + path: "/my_directory/my_file.txt", query: nil, fragment: nil, port: 22} == + URI.parse("sftp://user001:password@private.ftp-server.example.com/my_directory/my_file.txt") + end + test "works with TFTP scheme" do + assert %URI{scheme: "tftp", host: "private.ftp-server.example.com", + userinfo: "user001:password", authority: "user001:password@private.ftp-server.example.com", + path: "/my_directory/my_file.txt", query: nil, fragment: nil, port: 69} == + URI.parse("tftp://user001:password@private.ftp-server.example.com/my_directory/my_file.txt") + end - test :parse_ldap do - assert %URI{scheme: "ldap", host: nil, authority: nil, userinfo: nil, - path: "/dc=example,dc=com", query: "?sub?(givenName=John)", - fragment: nil, port: 389} == - URI.parse("ldap:///dc=example,dc=com??sub?(givenName=John)") - assert %URI{scheme: "ldap", host: "ldap.example.com", authority: "ldap.example.com", - userinfo: nil, path: "/cn=John%20Doe,dc=example,dc=com", fragment: nil, - port: 389, query: nil} == - URI.parse("ldap://ldap.example.com/cn=John%20Doe,dc=example,dc=com") - end - test :parse_splits_authority do - assert %URI{scheme: "http", host: "foo.com", path: nil, - query: nil, fragment: nil, port: 4444, - authority: "foo:bar@foo.com:4444", - userinfo: "foo:bar"} == - URI.parse("http://foo:bar@foo.com:4444/") - assert %URI{scheme: "https", host: "foo.com", path: nil, - query: nil, fragment: nil, port: 443, - authority: "foo:bar@foo.com", userinfo: "foo:bar"} == - URI.parse("https://foo:bar@foo.com/") - assert %URI{scheme: "http", host: "foo.com", path: nil, - query: nil, fragment: nil, port: 4444, - authority: "foo.com:4444", userinfo: nil} == - URI.parse("http://foo.com:4444/") - end + test "works with LDAP scheme" do + assert %URI{scheme: "ldap", host: nil, authority: nil, userinfo: nil, + path: "/dc=example,dc=com", query: "?sub?(givenName=John)", + fragment: nil, port: 389} == + URI.parse("ldap:///dc=example,dc=com??sub?(givenName=John)") + assert %URI{scheme: "ldap", host: "ldap.example.com", authority: "ldap.example.com", + userinfo: nil, path: "/cn=John%20Doe,dc=example,dc=com", fragment: nil, + port: 389, query: nil} == + URI.parse("ldap://ldap.example.com/cn=John%20Doe,dc=example,dc=com") + end - test :default_port do - assert URI.default_port("http") == 80 - assert URI.default_port("unknown") == nil + test "splits authority" do + assert %URI{scheme: "http", host: "foo.com", path: nil, + query: nil, fragment: nil, port: 4444, + authority: "foo:bar@foo.com:4444", + userinfo: "foo:bar"} == + URI.parse("http://foo:bar@foo.com:4444/") + assert %URI{scheme: "https", host: "foo.com", path: nil, + query: nil, fragment: nil, port: 443, + authority: "foo:bar@foo.com", userinfo: "foo:bar"} == + URI.parse("https://foo:bar@foo.com/") + assert %URI{scheme: "http", host: "foo.com", path: nil, + query: nil, fragment: nil, port: 4444, + authority: "foo.com:4444", userinfo: nil} == + URI.parse("http://foo.com:4444/") + end - URI.default_port("unknown", 13) - assert URI.default_port("unknown") == 13 - end + test "can parse bad URIs" do + assert URI.parse("") + assert URI.parse("https:??@?F?@#>F//23/") - test :parse_bad_uris do - assert URI.parse("https:??@?F?@#>F//23/") - assert URI.parse("") - assert URI.parse(":https") - assert URI.parse("https") - end + assert URI.parse(":https").path == ":https" + assert URI.parse("https").path == "https" + assert URI.parse("ht\0tps://foo.com").path == "ht\0tps://foo.com" + end + + test "can parse IPv6 addresses" do + addresses = [ + "::", # undefined + "::1", # loopback + "1080::8:800:200C:417A", # unicast + "FF01::101", # multicast + "2607:f3f0:2:0:216:3cff:fef0:174a", # abbreviated + "2607:f3F0:2:0:216:3cFf:Fef0:174A", # mixed hex case + "2051:0db8:2d5a:3521:8313:ffad:1242:8e2e", # complete + "::00:192.168.10.184" # embedded IPv4 + ] + + Enum.each(addresses, fn(addr) -> + simple_uri = URI.parse("http://[#{addr}]/") + assert simple_uri.authority == "[#{addr}]" + assert simple_uri.host == addr + + userinfo_uri = URI.parse("http://user:pass@[#{addr}]/") + assert userinfo_uri.authority == "user:pass@[#{addr}]" + assert userinfo_uri.host == addr + assert userinfo_uri.userinfo == "user:pass" + + port_uri = URI.parse("http://[#{addr}]:2222/") + assert port_uri.authority == "[#{addr}]:2222" + assert port_uri.host == addr + assert port_uri.port == 2222 + + userinfo_port_uri = URI.parse("http://user:pass@[#{addr}]:2222/") + assert userinfo_port_uri.authority == "user:pass@[#{addr}]:2222" + assert userinfo_port_uri.host == addr + assert userinfo_port_uri.userinfo == "user:pass" + assert userinfo_port_uri.port == 2222 + end) + end - test :ipv6_addresses do - addrs = [ - "::", # undefined - "::1", # loopback - "1080::8:800:200C:417A", # unicast - "FF01::101", # multicast - "2607:f3f0:2:0:216:3cff:fef0:174a", # abbreviated - "2607:f3F0:2:0:216:3cFf:Fef0:174A", # mixed hex case - "2051:0db8:2d5a:3521:8313:ffad:1242:8e2e", # complete - "::00:192.168.10.184" # embedded IPv4 - ] - - Enum.each addrs, fn(addr) -> - simple_uri = URI.parse("http://[#{addr}]/") - assert simple_uri.host == addr - - userinfo_uri = URI.parse("http://user:pass@[#{addr}]/") - assert userinfo_uri.host == addr - assert userinfo_uri.userinfo == "user:pass" - - port_uri = URI.parse("http://[#{addr}]:2222/") - assert port_uri.host == addr - assert port_uri.port == 2222 - - userinfo_port_uri = URI.parse("http://user:pass@[#{addr}]:2222/") - assert userinfo_port_uri.host == addr - assert userinfo_port_uri.userinfo == "user:pass" - assert userinfo_port_uri.port == 2222 + test "downcases the scheme" do + assert URI.parse("hTtP://google.com").scheme == "http" end end - test :downcase_scheme do - assert URI.parse("hTtP://google.com").scheme == "http" + test "default_port/1,2" do + assert URI.default_port("http") == 80 + try do + URI.default_port("http", 8000) + assert URI.default_port("http") == 8000 + after + URI.default_port("http", 80) + end + + assert URI.default_port("unknown") == nil + URI.default_port("unknown", 13) + assert URI.default_port("unknown") == 13 end - test :to_string do + test "to_string/1 and Kernel.to_string/1" do assert to_string(URI.parse("http://google.com/")) == "http://google.com/" assert to_string(URI.parse("http://google.com:443/")) == "http://google.com:443/" assert to_string(URI.parse("https://google.com/")) == "https://google.com/" @@ -198,5 +224,70 @@ defmodule URITest do assert to_string(URI.parse("http://google.com/elixir")) == "http://google.com/elixir" assert to_string(URI.parse("http://google.com/?q=lol")) == "http://google.com/?q=lol" assert to_string(URI.parse("http://google.com/?q=lol#omg")) == "http://google.com/?q=lol#omg" + assert to_string(URI.parse("//google.com/elixir")) == "//google.com/elixir" + assert to_string(URI.parse("//google.com:8080/elixir")) == "//google.com:8080/elixir" + assert to_string(URI.parse("//user:password@google.com/")) == "//user:password@google.com/" + assert to_string(URI.parse("http://[2001:db8::]:8080/")) == "http://[2001:db8::]:8080/" + assert to_string(URI.parse("http://[2001:db8::]/")) == "http://[2001:db8::]/" + + assert URI.to_string(URI.parse("http://google.com/")) == "http://google.com/" + assert URI.to_string(URI.parse("//user:password@google.com/")) == "//user:password@google.com/" + end + + test "merge/2" do + assert_raise ArgumentError, "you must merge onto an absolute URI", fn -> + URI.merge("/relative", "") + end + + assert URI.merge("http://google.com/foo", "http://example.com/baz") |> to_string == "http://example.com/baz" + assert URI.merge("http://google.com/foo", "//example.com/baz") |> to_string == "http://example.com/baz" + + assert URI.merge("http://example.com/", URI.parse("/foo")) |> to_string == "http://example.com/foo" + + base = URI.parse("http://example.com/foo/bar") + assert URI.merge(base, "") |> to_string == "http://example.com/foo/bar" + assert URI.merge(base, "#fragment") |> to_string == "http://example.com/foo/bar#fragment" + assert URI.merge(base, "?query") |> to_string == "http://example.com/foo/bar?query" + assert URI.merge(base, %URI{path: ""}) |> to_string == "http://example.com/foo/bar" + assert URI.merge(base, %URI{path: "", fragment: "fragment"}) |> to_string == "http://example.com/foo/bar#fragment" + + base = URI.parse("http://example.com/") + assert URI.merge(base, "/foo") |> to_string == "http://example.com/foo" + assert URI.merge(base, "foo") |> to_string == "http://example.com/foo" + + base = URI.parse("http://example.com/foo/bar") + assert URI.merge(base, "/baz") |> to_string == "http://example.com/baz" + assert URI.merge(base, "baz") |> to_string == "http://example.com/foo/baz" + assert URI.merge(base, "../baz") |> to_string == "http://example.com/baz" + assert URI.merge(base, ".././baz") |> to_string == "http://example.com/baz" + assert URI.merge(base, "./baz") |> to_string == "http://example.com/foo/baz" + assert URI.merge(base, "bar/./baz") |> to_string == "http://example.com/foo/bar/baz" + + base = URI.parse("http://example.com/foo/bar/") + assert URI.merge(base, "/baz") |> to_string == "http://example.com/baz" + assert URI.merge(base, "baz") |> to_string == "http://example.com/foo/bar/baz" + assert URI.merge(base, "../baz") |> to_string == "http://example.com/foo/baz" + assert URI.merge(base, ".././baz") |> to_string == "http://example.com/foo/baz" + assert URI.merge(base, "./baz") |> to_string == "http://example.com/foo/bar/baz" + assert URI.merge(base, "bar/./baz") |> to_string == "http://example.com/foo/bar/bar/baz" + + base = URI.parse("http://example.com/foo/bar/baz") + assert URI.merge(base, "../../foobar") |> to_string == "http://example.com/foobar" + assert URI.merge(base, "../../../foobar") |> to_string == "http://example.com/foobar" + assert URI.merge(base, "../../../../../../foobar") |> to_string == "http://example.com/foobar" + + base = URI.parse("http://example.com/bar") + assert URI.merge(base, "baz") |> to_string == "http://example.com/baz" + + base = URI.parse("http://example.com/foo/bar") + assert URI.merge(base, "baz") |> to_string == "http://example.com/foo/baz" + + base = URI.parse("http://example.com/foo?query1") + assert URI.merge(base, "?query2") |> to_string == "http://example.com/foo?query2" + assert URI.merge(base, "") |> to_string == "http://example.com/foo?query1" + + base = URI.parse("http://example.com/foo#fragment1") + assert URI.merge(base, "#fragment2") |> to_string == "http://example.com/foo#fragment2" + assert URI.merge(base, "") |> to_string == "http://example.com/foo" end end diff --git a/lib/elixir/test/elixir/version_test.exs b/lib/elixir/test/elixir/version_test.exs index 04c96b5d08f..ca71ff25e5b 100644 --- a/lib/elixir/test/elixir/version_test.exs +++ b/lib/elixir/test/elixir/version_test.exs @@ -1,199 +1,242 @@ Code.require_file "test_helper.exs", __DIR__ defmodule VersionTest do - use ExUnit.Case, async: true - alias Version.Parser, as: P - alias Version, as: V - - test "compare" do - assert :gt == V.compare("1.0.1", "1.0.0") - assert :gt == V.compare("1.1.0", "1.0.1") - assert :gt == V.compare("2.1.1", "1.2.2") - assert :gt == V.compare("1.0.0", "1.0.0-dev") - assert :gt == V.compare("1.2.3-dev", "0.1.2") - assert :gt == V.compare("1.0.0-a.b", "1.0.0-a") - assert :gt == V.compare("1.0.0-b", "1.0.0-a.b") - assert :gt == V.compare("1.0.0-a", "1.0.0-0") - assert :gt == V.compare("1.0.0-a.b", "1.0.0-a.a") - - assert :lt == V.compare("1.0.0", "1.0.1") - assert :lt == V.compare("1.0.1", "1.1.0") - assert :lt == V.compare("1.2.2", "2.1.1") - assert :lt == V.compare("1.0.0-dev", "1.0.0") - assert :lt == V.compare("0.1.2", "1.2.3-dev") - assert :lt == V.compare("1.0.0-a", "1.0.0-a.b") - assert :lt == V.compare("1.0.0-a.b", "1.0.0-b") - assert :lt == V.compare("1.0.0-0", "1.0.0-a") - assert :lt == V.compare("1.0.0-a.a", "1.0.0-a.b") - - assert :eq == V.compare("1.0.0", "1.0.0") - assert :eq == V.compare("1.0.0-dev", "1.0.0-dev") - assert :eq == V.compare("1.0.0-a", "1.0.0-a") - end - - test "invalid compare" do - assert_raise V.InvalidVersionError, fn -> - V.compare("1.0", "1.0.0") + use ExUnit.Case, async: true + + doctest Version + + alias Version.Parser + + test "compare/2 with valid versions" do + assert Version.compare("1.0.1", "1.0.0") == :gt + assert Version.compare("1.1.0", "1.0.1") == :gt + assert Version.compare("2.1.1", "1.2.2") == :gt + assert Version.compare("1.0.0", "1.0.0-dev") == :gt + assert Version.compare("1.2.3-dev", "0.1.2") == :gt + assert Version.compare("1.0.0-a.b", "1.0.0-a") == :gt + assert Version.compare("1.0.0-b", "1.0.0-a.b") == :gt + assert Version.compare("1.0.0-a", "1.0.0-0") == :gt + assert Version.compare("1.0.0-a.b", "1.0.0-a.a") == :gt + + assert Version.compare("1.0.0", "1.0.1") == :lt + assert Version.compare("1.0.1", "1.1.0") == :lt + assert Version.compare("1.2.2", "2.1.1") == :lt + assert Version.compare("1.0.0-dev", "1.0.0") == :lt + assert Version.compare("0.1.2", "1.2.3-dev") == :lt + assert Version.compare("1.0.0-a", "1.0.0-a.b") == :lt + assert Version.compare("1.0.0-a.b", "1.0.0-b") == :lt + assert Version.compare("1.0.0-0", "1.0.0-a") == :lt + assert Version.compare("1.0.0-a.a", "1.0.0-a.b") == :lt + + assert Version.compare("1.0.0", "1.0.0") == :eq + assert Version.compare("1.0.0-dev", "1.0.0-dev") == :eq + assert Version.compare("1.0.0-a", "1.0.0-a") == :eq + end + + test "compare/2 with invalid versions" do + assert_raise Version.InvalidVersionError, fn -> + Version.compare("1.0", "1.0.0") end - assert_raise V.InvalidVersionError, fn -> - V.compare("1.0.0-dev", "1.0") + assert_raise Version.InvalidVersionError, fn -> + Version.compare("1.0.0-dev", "1.0") end - assert_raise V.InvalidVersionError, fn -> - V.compare("foo", "1.0.0-a") + assert_raise Version.InvalidVersionError, fn -> + Version.compare("foo", "1.0.0-a") end end test "lexes specifications properly" do - assert P.lexer("== != > >= < <= ~>", []) == [:'==', :'!=', :'>', :'>=', :'<', :'<=', :'~>'] - assert P.lexer("2.3.0", []) == [:'==', "2.3.0"] - assert P.lexer("!2.3.0", []) == [:'!=', "2.3.0"] - assert P.lexer(">>=", []) == [:'>', :'>='] - assert P.lexer(">2.4.0", []) == [:'>', "2.4.0"] - assert P.lexer(" > 2.4.0", []) == [:'>', "2.4.0"] + assert Parser.lexer("== != > >= < <= ~>", []) == [:'==', :'!=', :'>', :'>=', :'<', :'<=', :'~>'] + assert Parser.lexer("2.3.0", []) == [:'==', "2.3.0"] + assert Parser.lexer("!2.3.0", []) == [:'!=', "2.3.0"] + assert Parser.lexer(">>=", []) == [:'>', :'>='] + assert Parser.lexer(">2.4.0", []) == [:'>', "2.4.0"] + assert Parser.lexer("> 2.4.0", []) == [:'>', "2.4.0"] + assert Parser.lexer(" > 2.4.0", []) == [:'>', "2.4.0"] end - test "parse" do - assert {:ok, %V{major: 1, minor: 2, patch: 3}} = V.parse("1.2.3") - assert {:ok, %V{major: 1, minor: 4, patch: 5}} = V.parse("1.4.5+ignore") - assert {:ok, %V{major: 1, minor: 4, patch: 5, pre: ["6-g3318bd5"]}} = V.parse("1.4.5-6-g3318bd5") - assert {:ok, %V{major: 1, minor: 4, patch: 5, pre: [6, 7, "eight"]}} = V.parse("1.4.5-6.7.eight") - assert {:ok, %V{major: 1, minor: 4, patch: 5, pre: ["6-g3318bd5"]}} = V.parse("1.4.5-6-g3318bd5+ignore") - - assert :error = V.parse("foobar") - assert :error = V.parse("2.3") - assert :error = V.parse("2") - assert :error = V.parse("2.3.0-01") + test "parse/1" do + assert {:ok, %Version{major: 1, minor: 2, patch: 3}} = Version.parse("1.2.3") + assert {:ok, %Version{major: 1, minor: 4, patch: 5}} = Version.parse("1.4.5+ignore") + assert {:ok, %Version{major: 0, minor: 0, patch: 1}} = Version.parse("0.0.1+sha.0702245") + assert {:ok, %Version{major: 1, minor: 4, patch: 5, pre: ["6-g3318bd5"]}} = Version.parse("1.4.5-6-g3318bd5") + assert {:ok, %Version{major: 1, minor: 4, patch: 5, pre: [6, 7, "eight"]}} = Version.parse("1.4.5-6.7.eight") + assert {:ok, %Version{major: 1, minor: 4, patch: 5, pre: ["6-g3318bd5"]}} = Version.parse("1.4.5-6-g3318bd5+ignore") + + assert Version.parse("foobar") == :error + assert Version.parse("2") == :error + assert Version.parse("2.") == :error + assert Version.parse("2.3") == :error + assert Version.parse("2.3.") == :error + assert Version.parse("2.3.0-") == :error + assert Version.parse("2.3.0+") == :error + assert Version.parse("2.3.0.") == :error + assert Version.parse("2.3.0.4") == :error + assert Version.parse("2.3.-rc.1") == :error + assert Version.parse("2.3.+rc.1") == :error + assert Version.parse("2.3.0-01") == :error + assert Version.parse("2.3.00-1") == :error + assert Version.parse("2.3.00") == :error + assert Version.parse("2.03.0") == :error + assert Version.parse("02.3.0") == :error end - test "to_string" do - assert V.parse("1.0.0") |> elem(1) |> to_string == "1.0.0" - assert V.parse("1.0.0-dev") |> elem(1) |> to_string == "1.0.0-dev" - assert V.parse("1.0.0+lol") |> elem(1) |> to_string == "1.0.0+lol" - assert V.parse("1.0.0-dev+lol") |> elem(1) |> to_string == "1.0.0-dev+lol" + test "Kernel.to_string/1" do + assert Version.parse!("1.0.0") |> to_string == "1.0.0" + assert Version.parse!("1.0.0-dev") |> to_string == "1.0.0-dev" + assert Version.parse!("1.0.0+lol") |> to_string == "1.0.0+lol" + assert Version.parse!("1.0.0-dev+lol") |> to_string == "1.0.0-dev+lol" + assert Version.parse!("1.0.0-0") |> to_string == "1.0.0-0" + assert Version.parse!("1.0.0-rc.0") |> to_string == "1.0.0-rc.0" end - test "invalid match" do - assert_raise V.InvalidVersionError, fn -> - V.match?("foo", "2.3.0") + test "match?/2 with invalid versions" do + assert_raise Version.InvalidVersionError, fn -> + Version.match?("foo", "2.3.0") end - assert_raise V.InvalidVersionError, fn -> - V.match?("2.3", "2.3.0") + assert_raise Version.InvalidVersionError, fn -> + Version.match?("2.3", "2.3.0") end - assert_raise V.InvalidRequirementError, fn -> - V.match?("2.3.0", "foo") + assert_raise Version.InvalidRequirementError, fn -> + Version.match?("2.3.0", "foo") end - assert_raise V.InvalidRequirementError, fn -> - V.match?("2.3.0", "2.3") + assert_raise Version.InvalidRequirementError, fn -> + Version.match?("2.3.0", "2.3") end end test "==" do - assert V.match?("2.3.0", "2.3.0") - refute V.match?("2.4.0", "2.3.0") + assert Version.match?("2.3.0", "2.3.0") + refute Version.match?("2.4.0", "2.3.0") - assert V.match?("2.3.0", "== 2.3.0") - refute V.match?("2.4.0", "== 2.3.0") + assert Version.match?("2.3.0", "== 2.3.0") + refute Version.match?("2.4.0", "== 2.3.0") - assert V.match?("1.0.0", "1.0.0") - assert V.match?("1.0.0", "1.0.0") + assert Version.match?("1.0.0", "1.0.0") + assert Version.match?("1.0.0", "1.0.0") - assert V.match?("1.2.3-alpha", "1.2.3-alpha") + assert Version.match?("1.2.3-alpha", "1.2.3-alpha") - assert V.match?("0.9.3", "== 0.9.3+dev") + assert Version.match?("0.9.3", "== 0.9.3+dev") + + {:ok, vsn} = Version.parse("2.3.0") + assert Version.match?(vsn, "2.3.0") end test "!=" do - assert V.match?("2.4.0", "!2.3.0") - refute V.match?("2.3.0", "!2.3.0") + assert Version.match?("2.4.0", "!2.3.0") + refute Version.match?("2.3.0", "!2.3.0") - assert V.match?("2.4.0", "!= 2.3.0") - refute V.match?("2.3.0", "!= 2.3.0") + assert Version.match?("2.4.0", "!= 2.3.0") + refute Version.match?("2.3.0", "!= 2.3.0") end test ">" do - assert V.match?("2.4.0", "> 2.3.0") - refute V.match?("2.2.0", "> 2.3.0") - refute V.match?("2.3.0", "> 2.3.0") - - assert V.match?("1.2.3", "> 1.2.3-alpha") - assert V.match?("1.2.3-alpha.1", "> 1.2.3-alpha") - assert V.match?("1.2.3-alpha.beta.sigma", "> 1.2.3-alpha.beta") - refute V.match?("1.2.3-alpha.10", "< 1.2.3-alpha.1") - refute V.match?("0.10.2-dev", "> 0.10.2") + assert Version.match?("2.4.0", "> 2.3.0") + refute Version.match?("2.2.0", "> 2.3.0") + refute Version.match?("2.3.0", "> 2.3.0") + + assert Version.match?("1.2.3", "> 1.2.3-alpha") + assert Version.match?("1.2.3-alpha.1", "> 1.2.3-alpha") + assert Version.match?("1.2.3-alpha.beta.sigma", "> 1.2.3-alpha.beta") + refute Version.match?("1.2.3-alpha.10", "< 1.2.3-alpha.1") + refute Version.match?("0.10.2-dev", "> 0.10.2") end test ">=" do - assert V.match?("2.4.0", ">= 2.3.0") - refute V.match?("2.2.0", ">= 2.3.0") - assert V.match?("2.3.0", ">= 2.3.0") + assert Version.match?("2.4.0", ">= 2.3.0") + refute Version.match?("2.2.0", ">= 2.3.0") + assert Version.match?("2.3.0", ">= 2.3.0") - assert V.match?("2.0.0", ">= 1.0.0") - assert V.match?("1.0.0", ">= 1.0.0") + assert Version.match?("2.0.0", ">= 1.0.0") + assert Version.match?("1.0.0", ">= 1.0.0") end test "<" do - assert V.match?("2.2.0", "< 2.3.0") - refute V.match?("2.4.0", "< 2.3.0") - refute V.match?("2.3.0", "< 2.3.0") + assert Version.match?("2.2.0", "< 2.3.0") + refute Version.match?("2.4.0", "< 2.3.0") + refute Version.match?("2.3.0", "< 2.3.0") - assert V.match?("0.10.2-dev", "< 0.10.2") + assert Version.match?("0.10.2-dev", "< 0.10.2") - refute V.match?("1.0.0", "< 1.0.0-dev") - refute V.match?("1.2.3-dev", "< 0.1.2") + refute Version.match?("1.0.0", "< 1.0.0-dev") + refute Version.match?("1.2.3-dev", "< 0.1.2") end test "<=" do - assert V.match?("2.2.0", "<= 2.3.0") - refute V.match?("2.4.0", "<= 2.3.0") - assert V.match?("2.3.0", "<= 2.3.0") + assert Version.match?("2.2.0", "<= 2.3.0") + refute Version.match?("2.4.0", "<= 2.3.0") + assert Version.match?("2.3.0", "<= 2.3.0") end test "~>" do - assert V.match?("3.0.0", "~> 3.0") - assert V.match?("3.2.0", "~> 3.0") - refute V.match?("4.0.0", "~> 3.0") - refute V.match?("4.4.0", "~> 3.0") + assert Version.match?("3.0.0", "~> 3.0") + assert Version.match?("3.2.0", "~> 3.0") + refute Version.match?("4.0.0", "~> 3.0") + refute Version.match?("4.4.0", "~> 3.0") - assert V.match?("3.0.2", "~> 3.0.0") - assert V.match?("3.0.0", "~> 3.0.0") - refute V.match?("3.1.0", "~> 3.0.0") - refute V.match?("3.4.0", "~> 3.0.0") + assert Version.match?("3.0.2", "~> 3.0.0") + assert Version.match?("3.0.0", "~> 3.0.0") + refute Version.match?("3.1.0", "~> 3.0.0") + refute Version.match?("3.4.0", "~> 3.0.0") - assert V.match?("3.6.0", "~> 3.5") - assert V.match?("3.5.0", "~> 3.5") - refute V.match?("4.0.0", "~> 3.5") - refute V.match?("5.0.0", "~> 3.5") + assert Version.match?("3.6.0", "~> 3.5") + assert Version.match?("3.5.0", "~> 3.5") + refute Version.match?("4.0.0", "~> 3.5") + refute Version.match?("5.0.0", "~> 3.5") - assert V.match?("3.5.2", "~> 3.5.0") - assert V.match?("3.5.4", "~> 3.5.0") - refute V.match?("3.6.0", "~> 3.5.0") - refute V.match?("3.6.3", "~> 3.5.0") + assert Version.match?("3.5.2", "~> 3.5.0") + assert Version.match?("3.5.4", "~> 3.5.0") + refute Version.match?("3.6.0", "~> 3.5.0") + refute Version.match?("3.6.3", "~> 3.5.0") - assert V.match?("0.9.3", "~> 0.9.3-dev") - refute V.match?("0.10.0", "~> 0.9.3-dev") + assert Version.match?("0.9.3", "~> 0.9.3-dev") + refute Version.match?("0.10.0", "~> 0.9.3-dev") - refute V.match?("0.3.0-dev", "~> 0.2.0") + refute Version.match?("0.3.0-dev", "~> 0.2.0") - assert_raise V.InvalidRequirementError, fn -> - V.match?("3.0.0", "~> 3") + assert_raise Version.InvalidRequirementError, fn -> + Version.match?("3.0.0", "~> 3") end end + test "allow_pre" do + assert Version.match?("1.1.0", "~> 1.0", allow_pre: true) + assert Version.match?("1.1.0", "~> 1.0", allow_pre: false) + assert Version.match?("1.1.0-beta", "~> 1.0", allow_pre: true) + refute Version.match?("1.1.0-beta", "~> 1.0", allow_pre: false) + assert Version.match?("1.0.1-beta", "~> 1.0.0-beta", allow_pre: false) + + assert Version.match?("1.1.0", ">= 1.0.0", allow_pre: true) + assert Version.match?("1.1.0", ">= 1.0.0", allow_pre: false) + assert Version.match?("1.1.0-beta", ">= 1.0.0", allow_pre: true) + refute Version.match?("1.1.0-beta", ">= 1.0.0", allow_pre: false) + assert Version.match?("1.1.0-beta", ">= 1.0.0-beta", allow_pre: false) + end + test "and" do - assert V.match?("0.9.3", "> 0.9.0 and < 0.10.0") - refute V.match?("0.10.2", "> 0.9.0 and < 0.10.0") + assert Version.match?("0.9.3", "> 0.9.0 and < 0.10.0") + refute Version.match?("0.10.2", "> 0.9.0 and < 0.10.0") end test "or" do - assert V.match?("0.9.1", "0.9.1 or 0.9.3 or 0.9.5") - assert V.match?("0.9.3", "0.9.1 or 0.9.3 or 0.9.5") - assert V.match?("0.9.5", "0.9.1 or 0.9.3 or 0.9.5") + assert Version.match?("0.9.1", "0.9.1 or 0.9.3 or 0.9.5") + assert Version.match?("0.9.3", "0.9.1 or 0.9.3 or 0.9.5") + assert Version.match?("0.9.5", "0.9.1 or 0.9.3 or 0.9.5") + + refute Version.match?("0.9.6", "0.9.1 or 0.9.3 or 0.9.5") + end + + test "compile requirement" do + {:ok, req} = Version.parse_requirement("1.2.3") + req = Version.compile_requirement(req) - refute V.match?("0.9.6", "0.9.1 or 0.9.3 or 0.9.5") + assert Version.match?("1.2.3", req) + refute Version.match?("1.2.4", req) end end diff --git a/lib/elixir/test/erlang/atom_test.erl b/lib/elixir/test/erlang/atom_test.erl index 3b84b90fb44..73bf534320e 100644 --- a/lib/elixir/test/erlang/atom_test.erl +++ b/lib/elixir/test/erlang/atom_test.erl @@ -1,42 +1,42 @@ -module(atom_test). -export([kv/1]). --include("elixir.hrl"). -include_lib("eunit/include/eunit.hrl"). eval(Content) -> {Value, Binding, _, _} = elixir:eval(Content, []), {Value, Binding}. -kv([{Key,nil}]) -> Key. +kv([{Key, nil}]) -> Key. atom_with_punctuation_test() -> - {foo@bar,[]} = eval(":foo@bar"), - {'a?',[]} = eval(":a?"), - {'a!',[]} = eval(":a!"), - {'||',[]} = eval(":||"), - {'...',[]} = eval(":..."). + {foo@bar, []} = eval(":foo@bar"), + {'a?', []} = eval(":a?"), + {'a!', []} = eval(":a!"), + {'||', []} = eval(":||"), + {'...', []} = eval(":..."). atom_quoted_call_test() -> - {3,[]} = eval("Kernel.'+'(1, 2)"). + {3, []} = eval("Kernel.'+'(1, 2)"). kv_with_quotes_test() -> - {'foo bar',[]} = eval(":atom_test.kv(\"foo bar\": nil)"). + {'foo bar', []} = eval(":atom_test.kv(\"foo bar\": nil)"). kv_with_interpolation_test() -> - {'foo',[]} = eval(":atom_test.kv(\"#{\"foo\"}\": nil)"), - {'foo',[]} = eval(":atom_test.kv(\"#{\"fo\"}o\": nil)"), - {'foo',_} = eval("a = \"f\"; :atom_test.kv(\"#{a}#{\"o\"}o\": nil)"). + {'foo', []} = eval(":atom_test.kv(\"#{\"foo\"}\": nil)"), + {'foo', []} = eval(":atom_test.kv(\"#{\"fo\"}o\": nil)"), + {'foo', _} = eval("a = \"f\"; :atom_test.kv(\"#{a}#{\"o\"}o\": nil)"). quoted_atom_test() -> - {foo,[]} = eval(":\"foo\""), - {foo,[]} = eval(":'foo'"). + {foo, []} = eval(":\"foo\""), + {foo, []} = eval(":'foo'"), + {'foo.Bar', []} = eval(":\"foo.Bar\""). atom_with_interpolation_test() -> - {foo,[]} = eval(":\"f#{\"o\"}o\""), - {foo,_} = eval("a=\"foo\"; :\"#{a}\""), - {foo,_} = eval("a=\"oo\"; :\"f#{a}\""), - {foo,_} = eval("a=\"fo\"; :\"#{a}o\""), - {fof,_} = eval("a=\"f\"; :\"#{a}o#{a}\""). + {foo, []} = eval(":\"f#{\"o\"}o\""), + {foo, _} = eval("a=\"foo\"; :\"#{a}\""), + {foo, _} = eval("a=\"oo\"; :\"f#{a}\""), + {foo, _} = eval("a=\"fo\"; :\"#{a}o\""), + {fof, _} = eval("a=\"f\"; :\"#{a}o#{a}\""). quoted_atom_chars_are_escaped_test() -> - {'"',[]} = eval(":\"\\\"\""). + {'"', []} = eval(":\"\\\"\""). diff --git a/lib/elixir/test/erlang/control_test.erl b/lib/elixir/test/erlang/control_test.erl index de7fbac7fac..703eebd1e03 100644 --- a/lib/elixir/test/erlang/control_test.erl +++ b/lib/elixir/test/erlang/control_test.erl @@ -1,5 +1,4 @@ -module(control_test). --include("elixir.hrl"). -include_lib("eunit/include/eunit.hrl"). eval(Content) -> @@ -27,25 +26,8 @@ if_else_kv_args_test() -> if_else_kv_blocks_test() -> {2, _} = eval("if(false) do\n1\nelse\n2\nend"), - {2, _} = eval("if(false) do\n1\n3\nelse\n2\nend"), {2, _} = eval("if(false) do 1 else 2 end"), - {2, _} = eval("if(false) do 1;else 2; end"), - {3, _} = eval("if(false) do 1;else 2; 3; end"). - -vars_if_test() -> - F = fun() -> - {1, [{foo,1}]} = eval("if foo = 1 do; true; else false; end; foo"), - eval("defmodule Bar do\ndef foo, do: 1\ndef bar(x) do\nif x do; foo = 2; else foo = foo; end; foo; end\nend"), - {1, _} = eval("Bar.bar(false)"), - {2, _} = eval("Bar.bar(true)") - end, - test_helper:run_and_remove(F, ['Elixir.Bar']). - -multi_assigned_if_test() -> - {3, _} = eval("x = 1\nif true do\nx = 2\nx = 3\nelse true\nend\nx"), - {3, _} = eval("x = 1\nif true do\n^x = 1\nx = 2\nx = 3\nelse true\nend\nx"), - {1, _} = eval("if true do\nx = 1\nelse true\nend\nx"), - {nil, _} = eval("if false do\nx = 1\nelse true\nend\nx"). + {2, _} = eval("if(false) do 1;else 2; end"). multi_line_if_test() -> {1, _} = eval("if true\ndo\n1\nelse\n2\nend"). @@ -57,8 +39,8 @@ try_test() -> try_else_test() -> {true, _} = eval("try do\n1\nelse 2 -> false\n1 -> true\nrescue\nErlangError -> nil\nend"), - {true, _} = eval("try do\n1\nelse {x,y} -> false\nx -> true\nrescue\nErlangError -> nil\nend"), - {true, _} = eval("try do\n{1,2}\nelse {3,4} -> false\n_ -> true\nrescue\nErlangError -> nil\nend"). + {true, _} = eval("try do\n1\nelse {x, y} -> false\nx -> true\nrescue\nErlangError -> nil\nend"), + {true, _} = eval("try do\n{1, 2}\nelse {3, 4} -> false\n_ -> true\nrescue\nErlangError -> nil\nend"). % Receive @@ -67,141 +49,94 @@ receive_test() -> {20, _} = eval("send self(), :bar\nreceive do\n:foo -> 10\n_ -> 20\nend"), {30, _} = eval("receive do\nafter 1 -> 30\nend"). -vars_receive_test() -> - {10, _} = eval("send self(), :foo\nreceive do\n:foo ->\na = 10\n:bar -> nil\nend\na"), - {nil, _} = eval("send self(), :bar\nreceive do\n:foo ->\nb = 10\n_ -> 20\nend\nb"), - {30, _} = eval("receive do\n:foo -> nil\nafter\n1 -> c = 30\nend\nc"), - {30, _} = eval("x = 1\nreceive do\n:foo -> nil\nafter\nx -> c = 30\nend\nc"). - % Case case_test() -> {true, []} = eval("case 1 do\n2 -> false\n1 -> true\nend"), - {true, []} = eval("case 1 do\n{x,y} -> false\nx -> true\nend"), - {true, []} = eval("case {1,2} do;{3,4} -> false\n_ -> true\nend"). + {true, []} = eval("case 1 do\n{x, y} -> false\nx -> true\nend"), + {true, []} = eval("case {1, 2} do; {3, 4} -> false\n_ -> true\nend"). case_with_do_ambiguity_test() -> - {true,_} = eval("case Atom.to_char_list(true) do\n_ -> true\nend"). + {true, _} = eval("case Atom.to_charlist(true) do\n_ -> true\nend"). case_with_match_do_ambiguity_test() -> - {true,_} = eval("case x = Atom.to_char_list(true) do\n_ -> true\nend"). + {true, _} = eval("case x = Atom.to_charlist(true) do\n_ -> true\nend"). case_with_unary_do_ambiguity_test() -> - {false,_} = eval("! case Atom.to_char_list(true) do\n_ -> true\nend"). - -multi_assigned_case_test() -> - {3, _} = eval("x = 1\ncase true do\n true ->\nx = 2\nx = 3\n_ -> true\nend\nx"), - {3, _} = eval("x = 1\ncase 1 do\n ^x -> x = 2\nx = 3\n_ -> true\nend\nx"), - {1, _} = eval("case true do\ntrue -> x = 1\n_ -> true\nend\nx"), - {nil, _} = eval("case true do\nfalse -> x = 1\n_ -> true\nend\nx"). - -vars_case_test() -> - F = fun() -> - eval("defmodule Bar do\ndef foo, do: 1\ndef bar(x) do\ncase x do\ntrue -> foo = 2\nfalse -> foo = foo\nend\nfoo\nend\nend"), - {1, _} = eval("Bar.bar(false)"), - {2, _} = eval("Bar.bar(true)") - end, - test_helper:run_and_remove(F, ['Elixir.Bar']). + {false, _} = eval("! case Atom.to_charlist(true) do\n_ -> true\nend"). % Comparison equal_test() -> - {true,_} = eval(":a == :a"), - {true,_} = eval("1 == 1"), - {true,_} = eval("{1,2} == {1,2}"), - {false,_} = eval("1 == 2"), - {false,_} = eval("{1,2} == {1,3}"). + {true, _} = eval(":a == :a"), + {true, _} = eval("1 == 1"), + {true, _} = eval("{1, 2} == {1, 2}"), + {false, _} = eval("1 == 2"), + {false, _} = eval("{1, 2} == {1, 3}"). not_equal_test() -> - {false,_} = eval(":a != :a"), - {false,_} = eval("1 != 1"), - {false,_} = eval("{1,2} != {1,2}"), - {true,_} = eval("1 != 2"), - {true,_} = eval("{1,2} != {1,3}"). + {false, _} = eval(":a != :a"), + {false, _} = eval("1 != 1"), + {false, _} = eval("{1, 2} != {1, 2}"), + {true, _} = eval("1 != 2"), + {true, _} = eval("{1, 2} != {1, 3}"). not_exclamation_mark_test() -> - {false,_} = eval("! :a"), - {false,_} = eval("!true"), - {false,_} = eval("!1"), - {false,_} = eval("![]"), - {true,_} = eval("!nil"), - {true,_} = eval("!false"). + {false, _} = eval("! :a"), + {false, _} = eval("!true"), + {false, _} = eval("!1"), + {false, _} = eval("![]"), + {true, _} = eval("!nil"), + {true, _} = eval("!false"). notnot_exclamation_mark_test() -> - {true,_} = eval("!! :a"), - {true,_} = eval("!!true"), - {true,_} = eval("!!1"), - {true,_} = eval("!![]"), - {false,_} = eval("!!nil"), - {false,_} = eval("!!false"). + {true, _} = eval("!! :a"), + {true, _} = eval("!!true"), + {true, _} = eval("!!1"), + {true, _} = eval("!![]"), + {false, _} = eval("!!nil"), + {false, _} = eval("!!false"). less_greater_test() -> - {true,_} = eval("1 < 2"), - {true,_} = eval("1 < :a"), - {false,_} = eval("1 < 1.0"), - {false,_} = eval("1 < 1"), - {true,_} = eval("1 <= 1.0"), - {true,_} = eval("1 <= 1"), - {true,_} = eval("1 <= :a"), - {false,_} = eval("1 > 2"), - {false,_} = eval("1 > :a"), - {false,_} = eval("1 > 1.0"), - {false,_} = eval("1 > 1"), - {true,_} = eval("1 >= 1.0"), - {true,_} = eval("1 >= 1"), - {false,_} = eval("1 >= :a"). + {true, _} = eval("1 < 2"), + {true, _} = eval("1 < :a"), + {false, _} = eval("1 < 1.0"), + {false, _} = eval("1 < 1"), + {true, _} = eval("1 <= 1.0"), + {true, _} = eval("1 <= 1"), + {true, _} = eval("1 <= :a"), + {false, _} = eval("1 > 2"), + {false, _} = eval("1 > :a"), + {false, _} = eval("1 > 1.0"), + {false, _} = eval("1 > 1"), + {true, _} = eval("1 >= 1.0"), + {true, _} = eval("1 >= 1"), + {false, _} = eval("1 >= :a"). integer_and_float_test() -> - {true,_} = eval("1 == 1"), - {false,_} = eval("1 != 1"), - {true,_} = eval("1 == 1.0"), - {false,_} = eval("1 != 1.0"), - {true,_} = eval("1 === 1"), - {false,_} = eval("1 !== 1"), - {false,_} = eval("1 === 1.0"), - {true,_} = eval("1 !== 1.0"). - -and_test() -> - F = fun() -> - eval("defmodule Bar do\ndef foo, do: true\ndef bar, do: false\n def baz(x), do: x == 1\nend"), - {true, _} = eval("true and true"), - {false, _} = eval("true and false"), - {false, _} = eval("false and true"), - {false, _} = eval("false and false"), - {true, _} = eval("Bar.foo and Bar.foo"), - {false, _} = eval("Bar.foo and Bar.bar"), - {true, _} = eval("Bar.foo and Bar.baz 1"), - {false, _} = eval("Bar.foo and Bar.baz 2"), - {true, _} = eval("false and false or true"), - {3, _} = eval("Bar.foo and 1 + 2"), - {false, _} = eval("Bar.bar and :erlang.error(:bad)"), - ?assertError({badarg, 1}, eval("1 and 2")) - end, - test_helper:run_and_remove(F, ['Elixir.Bar']). - -or_test() -> - F = fun() -> - eval("defmodule Bar do\ndef foo, do: true\ndef bar, do: false\n def baz(x), do: x == 1\nend"), - {true, _} = eval("true or true"), - {true, _} = eval("true or false"), - {true, _} = eval("false or true"), - {false, _} = eval("false or false"), - {true, _} = eval("Bar.foo or Bar.foo"), - {true, _} = eval("Bar.foo or Bar.bar"), - {false, _} = eval("Bar.bar or Bar.bar"), - {true, _} = eval("Bar.bar or Bar.baz 1"), - {false, _} = eval("Bar.bar or Bar.baz 2"), - {3, _} = eval("Bar.bar or 1 + 2"), - {true, _} = eval("Bar.foo or :erlang.error(:bad)"), - ?assertError({badarg, 1}, eval("1 or 2")) - end, - test_helper:run_and_remove(F, ['Elixir.Bar']). + {true, _} = eval("1 == 1"), + {false, _} = eval("1 != 1"), + {true, _} = eval("1 == 1.0"), + {false, _} = eval("1 != 1.0"), + {true, _} = eval("1 === 1"), + {false, _} = eval("1 !== 1"), + {false, _} = eval("1 === 1.0"), + {true, _} = eval("1 !== 1.0"). not_test() -> {false, _} = eval("not true"), {true, _} = eval("not false"), ?assertError(badarg, eval("not 1")). +rearrange_not_left_in_right_test() -> + %% TODO: Deprecate "not left in right" rearrangement. + {true, _} = eval("not false in []"), + {false, _} = eval("not true in [true]"). + +rearrange_left_not_in_right_test() -> + {true, _} = eval("false not in []"), + {false, _} = eval("true not in [true]"). + andand_test() -> F = fun() -> eval("defmodule Bar do\ndef foo, do: true\ndef bar, do: false\n def baz(x), do: x == 1\nend"), @@ -253,31 +188,37 @@ oror_test() -> end, test_helper:run_and_remove(F, ['Elixir.Bar']). +cond_line_test() -> + {'case', 1, _, + [{clause, 2, _, _, _}, + {clause, 3, _, _, _}] + } = to_erl("cond do\n 1 -> :ok\n 2 -> :ok\nend"). + % Optimized optimized_if_test() -> {'case', _, _, - [{clause,_,[{atom,_,false}],[],[{atom,_,else}]}, - {clause,_,[{atom,_,true}],[],[{atom,_,do}]}] - } = to_erl("if is_list([]), do: :do, else: :else"). + [{clause, _, [{atom, _, false}], [], [{atom, _, else}]}, + {clause, _, [{atom, _, true}], [], [{atom, _, do}]}] + } = to_erl("if is_list([]), do: :do, else: :else"). optimized_andand_test() -> {'case', _, _, - [{clause,_, - [{var,_,Var}], - [[{op,_,'orelse',_,_}]], - [{var,_,Var}]}, - {clause,_,[{var,_,'_'}],[],[{atom,0,done}]}] - } = to_erl("is_list([]) && :done"). + [{clause, _, + [{var, _, Var}], + [[{op, _, 'or', _, _}]], + [{var, _, Var}]}, + {clause, _, [{var, _, '_'}], [], [{atom, 0, done}]}] + } = to_erl("is_list([]) && :done"). optimized_oror_test() -> {'case', _, _, - [{clause,1, - [{var,1,_}], - [[{op,1,'orelse',_,_}]], - [{atom,0,done}]}, - {clause,1,[{var,1,Var}],[],[{var,1,Var}]}] - } = to_erl("is_list([]) || :done"). + [{clause, 1, + [{var, 1, _}], + [[{op, 1, 'or', _, _}]], + [{atom, 0, done}]}, + {clause, 1, [{var, 1, Var}], [], [{var, 1, Var}]}] + } = to_erl("is_list([]) || :done"). no_after_in_try_test() -> - {'try', _, [_], [_], _, []} = to_erl("try do :foo.bar() else _ -> :ok end"). \ No newline at end of file + {'try', _, [_], [_], _, []} = to_erl("try do :foo.bar() else _ -> :ok end"). diff --git a/lib/elixir/test/erlang/function_test.erl b/lib/elixir/test/erlang/function_test.erl index 8f875a5817b..9759e4e0bd5 100644 --- a/lib/elixir/test/erlang/function_test.erl +++ b/lib/elixir/test/erlang/function_test.erl @@ -10,10 +10,6 @@ function_arg_do_end_test() -> {nil, _} = eval("if true do end"). function_stab_end_test() -> - {_, [{a, Fun1}]} = eval("a = fn -> end"), - nil = Fun1(), - {_, [{a, Fun2}]} = eval("a = fn() -> end"), - nil = Fun2(), {_, [{a, Fun3}]} = eval("a = fn -> 1 + 2 end"), 3 = Fun3(). @@ -29,47 +25,47 @@ function_stab_inline_test() -> function_with_args_test() -> {Fun, _} = eval("fn(a, b) -> a + b end"), - 3 = Fun(1,2). + 3 = Fun(1, 2). function_with_kv_args_test() -> {Fun, _} = eval("fn(a, [other: b, another: c]) -> a + b + c end"), - 6 = Fun(1,[{other,2}, {another,3}]). + 6 = Fun(1, [{other, 2}, {another, 3}]). function_as_closure_test() -> - {_, [{a, Res1}|_]} = eval("b = 1; a = fn -> b + 2 end"), + {_, [{a, Res1} | _]} = eval("b = 1; a = fn -> b + 2 end"), 3 = Res1(). function_apply_test() -> - {3,_} = eval("a = fn -> 3 end; apply a, []"). + {3, _} = eval("a = fn -> 3 end; apply a, []"). function_apply_with_args_test() -> - {3,_} = eval("a = fn b -> b + 2 end; apply a, [1]"). + {3, _} = eval("a = fn b -> b + 2 end; apply a, [1]"). function_apply_and_clojure_test() -> - {3,_} = eval("b = 1; a = fn -> b + 2 end; apply a, []"). + {3, _} = eval("b = 1; a = fn -> b + 2 end; apply a, []"). function_parens_test() -> - {0,_} = eval("(fn() -> 0 end).()"), - {1,_} = eval("(fn(1) -> 1 end).(1)"), - {3,_} = eval("(fn(1, 2) -> 3 end).(1, 2)"), + {0, _} = eval("(fn() -> 0 end).()"), + {1, _} = eval("(fn(1) -> 1 end).(1)"), + {3, _} = eval("(fn(1, 2) -> 3 end).(1, 2)"), - {0,_} = eval("(fn () -> 0 end).()"), - {1,_} = eval("(fn (1) -> 1 end).(1)"), - {3,_} = eval("(fn (1, 2) -> 3 end).(1, 2)"). + {0, _} = eval("(fn() -> 0 end).()"), + {1, _} = eval("(fn(1) -> 1 end).(1)"), + {3, _} = eval("(fn(1, 2) -> 3 end).(1, 2)"). %% Function calls function_call_test() -> - {3, _} = eval("x = fn a, b -> a + b end\nx.(1,2)"). + {3, _} = eval("x = fn a, b -> a + b end\nx.(1, 2)"). function_call_without_arg_test() -> {3, _} = eval("x = fn -> 2 + 1 end\nx.()"). function_call_do_end_test() -> - {[1,[{do,2},{else,3}]], _} = eval("x = fn a, b -> [a,b] end\nx.(1) do\n2\nelse 3\nend"). + {[1, [{do, 2}, {else, 3}]], _} = eval("x = fn a, b -> [a, b] end\nx.(1) do\n2\nelse 3\nend"). function_call_with_assignment_test() -> - {3, [{a,_},{c, 3}]} = eval("a = fn x -> x + 2 end; c = a.(1)"). + {3, [{a, _}, {c, 3}]} = eval("a = fn x -> x + 2 end; c = a.(1)"). function_calls_with_multiple_expressions_test() -> {26, _} = eval("a = fn a, b -> a + b end; a.((3 + 4 - 1), (2 * 10))"). @@ -78,29 +74,29 @@ function_calls_with_multiple_args_with_line_breaks_test() -> {5, _} = eval("a = fn a, b -> a + b end; a.(\n3,\n2\n)"). function_calls_with_parenthesis_test() -> - {3, [{a,_},{b,1}]} = eval("a = (fn x -> x + 2 end).(b = 1)"). + {3, [{a, _}, {b, 1}]} = eval("a = (fn x -> x + 2 end).(b = 1)"). function_call_with_a_single_space_test() -> - {3, _} = eval("a = fn a, b -> a + b end; a. (1,2)"), - {3, _} = eval("a = fn a, b -> a + b end; a .(1,2)"). + {3, _} = eval("a = fn a, b -> a + b end; a. (1, 2)"), + {3, _} = eval("a = fn a, b -> a + b end; a .(1, 2)"). function_call_with_spaces_test() -> - {3, _} = eval("a = fn a, b -> a + b end; a . (1,2)"). + {3, _} = eval("a = fn a, b -> a + b end; a . (1, 2)"). function_call_without_assigning_with_spaces_test() -> - {3, _} = eval("(fn a, b -> a + b end) . (1,2)"). + {3, _} = eval("(fn a, b -> a + b end) . (1, 2)"). function_call_with_assignment_and_spaces_test() -> - {3, [{a,_},{c,3}]} = eval("a = fn x -> x + 2 end; c = a . (1)"). + {3, [{a, _}, {c, 3}]} = eval("a = fn x -> x + 2 end; c = a . (1)"). function_call_with_multiple_spaces_test() -> - {3, _} = eval("a = fn a, b -> a + b end; a . (1,2)"). + {3, _} = eval("a = fn a, b -> a + b end; a . (1, 2)"). function_call_with_multiline_test() -> - {3, _} = eval("a = fn a, b -> a + b end; a . \n (1,2)"). + {3, _} = eval("a = fn a, b -> a + b end; a . \n (1, 2)"). function_call_with_tabs_test() -> - {3, _} = eval("a = fn a, b -> a + b end; a .\n\t(1,2)"). + {3, _} = eval("a = fn a, b -> a + b end; a .\n\t(1, 2)"). function_call_with_args_and_nested_when_test() -> {Fun, _} = eval("fn a, b when a == 1 when b == 2 -> a + b end"), diff --git a/lib/elixir/test/erlang/match_test.erl b/lib/elixir/test/erlang/match_test.erl index 317b066dadf..2a28cf469d1 100644 --- a/lib/elixir/test/erlang/match_test.erl +++ b/lib/elixir/test/erlang/match_test.erl @@ -19,19 +19,19 @@ assignment_test() -> not_single_assignment_test() -> {2, [{a, 2}]} = eval("a = 1\na = 2\na"), - {1, [{a, 1}]} = eval("{a,a} = {1,1}\na"), - {2, [{a, 2}]} = eval("a = 1\n{^a,a} = {1,2}\na"), - ?assertError({badmatch, _}, eval("{a,a} = {1,2}")), - ?assertError({badmatch, _}, eval("{1 = a,a} = {1,2}")), - ?assertError({badmatch, _}, eval("{a = 1,a} = {1,2}")), - ?assertError({badmatch, _}, eval("a = 0;{a,a} = {1,2}")), - ?assertError({badmatch, _}, eval("a = 0;{1 = a,a} = {1,2}")), + {1, [{a, 1}]} = eval("{a, a} = {1, 1}\na"), + {2, [{a, 2}]} = eval("a = 1\n{^a, a} = {1, 2}\na"), + ?assertError({badmatch, _}, eval("{a, a} = {1, 2}")), + ?assertError({badmatch, _}, eval("{1 = a, a} = {1, 2}")), + ?assertError({badmatch, _}, eval("{a = 1, a} = {1, 2}")), + ?assertError({badmatch, _}, eval("a = 0;{a, a} = {1, 2}")), + ?assertError({badmatch, _}, eval("a = 0;{1 = a, a} = {1, 2}")), ?assertError({badmatch, _}, eval("a = 1\n^a = 2")). duplicated_assignment_on_module_with_tuple_test() -> F = fun() -> eval("defmodule Foo do\ndef v({a, _left}, {a, _right}), do: a\nend"), - {1,_} = eval("Foo.v({1, :foo}, {1, :bar})"), + {1, _} = eval("Foo.v({1, :foo}, {1, :bar})"), ?assertError(function_clause, eval("Foo.v({1, :foo}, {2, :bar})")) end, test_helper:run_and_remove(F, ['Elixir.Foo']). @@ -39,7 +39,7 @@ duplicated_assignment_on_module_with_tuple_test() -> duplicated_assignment_on_module_with_list_test() -> F = fun() -> eval("defmodule Foo do\ndef v([ a, _left ], [ a, _right ]), do: a\nend"), - {1,_} = eval("Foo.v([ 1, :foo ], [ 1, :bar ])"), + {1, _} = eval("Foo.v([ 1, :foo ], [ 1, :bar ])"), ?assertError(function_clause, eval("Foo.v([ 1, :foo ], [ 2, :bar ])")) end, test_helper:run_and_remove(F, ['Elixir.Foo']). @@ -67,52 +67,52 @@ underscore_assignment_test() -> {1, []} = eval("_ = 1"). assignment_precedence_test() -> - {_, [{x,{'__block__', _, [1,2,3]}}]} = eval("x = quote do\n1\n2\n3\nend"). + {_, [{x, {'__block__', _, [1, 2, 3]}}]} = eval("x = quote do\n1\n2\n3\nend"). % Tuples match simple_tuple_test() -> {{}, _} = eval("a = {}"), - {{1,2,3}, _} = eval("a = {1, 2, 3}"), - {{1,2,3}, _} = eval("a = {1, 1 + 1, 3}"), - {{1,{2},3}, _} = eval("a = {1, {2}, 3}"). + {{1, 2, 3}, _} = eval("a = {1, 2, 3}"), + {{1, 2, 3}, _} = eval("a = {1, 1 + 1, 3}"), + {{1, {2}, 3}, _} = eval("a = {1, {2}, 3}"). tuple_match_test() -> - {_, _} = eval("{1,2,3} = {1, 2, 3}"), + {_, _} = eval("{1, 2, 3} = {1, 2, 3}"), ?assertError({badmatch, _}, eval("{1, 3, 2} = {1, 2, 3}")). % Lists match simple_list_test() -> {[], _} = eval("a = []"), - {[1,2,3], _} = eval("a = [1, 2, 3]"), - {[1,2,3], _} = eval("a = [1, 1 + 1, 3]"), - {[1,[2],3], _} = eval("a = [1, [2], 3]"), - {[1,{2},3], _} = eval("a = [1, {2}, 3]"). + {[1, 2, 3], _} = eval("a = [1, 2, 3]"), + {[1, 2, 3], _} = eval("a = [1, 1 + 1, 3]"), + {[1, [2], 3], _} = eval("a = [1, [2], 3]"), + {[1, {2}, 3], _} = eval("a = [1, {2}, 3]"). list_match_test() -> {_, _} = eval("[1, 2, 3] = [1, 2, 3]"), ?assertError({badmatch, _}, eval("[1, 3, 2] = [1, 2, 3]")). list_vars_test() -> - {[3,1], [{x,3}]} = eval("x = 1\n[x = x + 2, x]"). + {[3, 1], [{x, 3}]} = eval("x = 1\n[x = x + 2, x]"). head_and_tail_test() -> - {_,[{h,1},{t,[2,3]}]} = eval("[h|t] = [1,2,3]"), - {_,[{h,2},{t,[3]}]} = eval("[1,h|t] = [1,2,3]"), - {_,[{t,[3]}]} = eval("[1,2|t] = [1,2,3]"), - {_,[{h,1}]} = eval("[h|[2,3]] = [1,2,3]"), - {_,[{t,[2,3]}]} = eval("[+1|t] = [1,2,3]"), - ?assertError({badmatch, _}, eval("[2,h|t] = [1,2,3]")). + {_, [{h, 1}, {t, [2, 3]}]} = eval("[h | t] = [1, 2, 3]"), + {_, [{h, 2}, {t, [3]}]} = eval("[1, h | t] = [1, 2, 3]"), + {_, [{t, [3]}]} = eval("[1, 2 | t] = [1, 2, 3]"), + {_, [{h, 1}]} = eval("[h | [2, 3]] = [1, 2, 3]"), + {_, [{t, [2, 3]}]} = eval("[+1 | t] = [1, 2, 3]"), + ?assertError({badmatch, _}, eval("[2, h | t] = [1, 2, 3]")). % Keyword match orrdict_match_test() -> - {[{a,1},{b,2}], _} = eval("a = [a: 1, b: 2]"). - + {[{a, 1}, {b, 2}], _} = eval("a = [a: 1, b: 2]"). + % Function match function_clause_test() -> F = fun() -> - eval("defmodule Foo do\ndef a([{_k,_}=e|_]), do: e\nend"), - {{foo,bar},_} = eval("Foo.a([{:foo,:bar}])") + eval("defmodule Foo do\ndef a([{_k, _}=e | _]), do: e\nend"), + {{foo, bar}, _} = eval("Foo.a([{:foo, :bar}])") end, - test_helper:run_and_remove(F, ['Elixir.Foo']). \ No newline at end of file + test_helper:run_and_remove(F, ['Elixir.Foo']). diff --git a/lib/elixir/test/erlang/module_test.erl b/lib/elixir/test/erlang/module_test.erl index 1b9b835f2e7..8e38c683447 100644 --- a/lib/elixir/test/erlang/module_test.erl +++ b/lib/elixir/test/erlang/module_test.erl @@ -25,18 +25,18 @@ function_test() -> test_helper:run_and_remove(F, ['Elixir.Foo.Bar.Baz']). quote_unquote_splicing_test() -> - {{'{}', [], [1,2,3,4,5]}, _} = eval("x = [2,3,4]\nquote do: {1, unquote_splicing(x), 5}"). + {{'{}', [], [1, 2, 3, 4, 5]}, _} = eval("x = [2, 3, 4]\nquote do: {1, unquote_splicing(x), 5}"). def_shortcut_test() -> F = fun() -> - {1,[]} = eval("defmodule Foo do\ndef version, do: 1\nend\nFoo.version") + {1, []} = eval("defmodule Foo do\ndef version, do: 1\nend\nFoo.version") end, test_helper:run_and_remove(F, ['Elixir.Foo']). macro_test() -> F = fun() -> - {'Elixir.Foo',[]} = eval("defmodule Foo do\ndef version, do: __MODULE__\nend\nFoo.version"), - {nil,[]} = eval("__MODULE__") + {'Elixir.Foo', []} = eval("defmodule Foo do\ndef version, do: __MODULE__\nend\nFoo.version"), + {nil, []} = eval("__MODULE__") end, test_helper:run_and_remove(F, ['Elixir.Foo']). @@ -66,33 +66,30 @@ def_left_default_test() -> def_with_guard_test() -> F = fun() -> eval("defmodule Foo do\ndef v(x) when x < 10, do: true\ndef v(x) when x >= 10, do: false\nend"), - {true,_} = eval("Foo.v(0)"), - {false,_} = eval("Foo.v(20)") + {true, _} = eval("Foo.v(0)"), + {false, _} = eval("Foo.v(20)") end, test_helper:run_and_remove(F, ['Elixir.Foo']). do_end_test() -> F = fun() -> eval("defmodule Foo do\ndef a, do: 1\ndefmodule Bar do\ndef b, do: 2\nend\ndef c, do: 3\nend"), - {1,_} = eval("Foo.a"), - {2,_} = eval("Foo.Bar.b"), - {3,_} = eval("Foo.c") + {1, _} = eval("Foo.a"), + {2, _} = eval("Foo.Bar.b"), + {3, _} = eval("Foo.c") end, test_helper:run_and_remove(F, ['Elixir.Foo', 'Elixir.Foo.Bar']). nesting_test() -> F = fun() -> eval("defmodule Foo do\ndefmodule Elixir.Bar do\ndef b, do: 2\nend\nend"), - {2,_} = eval("Bar.b") + {2, _} = eval("Bar.b") end, test_helper:run_and_remove(F, ['Elixir.Foo', 'Elixir.Bar']). dot_alias_test() -> {'Elixir.Foo.Bar.Baz', _} = eval("Foo.Bar.Baz"). -dot_dyn_alias_test() -> - {'Elixir.Foo.Bar.Baz', _} = eval("a = Foo.Bar; a.Baz"). - single_ref_test() -> {'Elixir.Foo', _} = eval("Foo"), {'Elixir.Foo', _} = eval("Elixir.Foo"). @@ -106,7 +103,7 @@ module_with_elixir_as_a_name_test() -> dynamic_defmodule_test() -> F = fun() -> eval("defmodule Foo do\ndef a(name) do\ndefmodule name, do: (def x, do: 1)\nend\nend"), - {_,_} = eval("Foo.a(Bar)"), - {1,_} = eval("Bar.x") + {_, _} = eval("Foo.a(Bar)"), + {1, _} = eval("Bar.x") end, - test_helper:run_and_remove(F, ['Elixir.Foo', 'Elixir.Bar']). \ No newline at end of file + test_helper:run_and_remove(F, ['Elixir.Foo', 'Elixir.Bar']). diff --git a/lib/elixir/test/erlang/operators_test.erl b/lib/elixir/test/erlang/operators_test.erl index 6ef07a89f14..ebce11ca393 100644 --- a/lib/elixir/test/erlang/operators_test.erl +++ b/lib/elixir/test/erlang/operators_test.erl @@ -6,65 +6,65 @@ eval(Content) -> {Value, Binding}. separator_test() -> - {334,[]} = eval("3_34"), - {600,[]} = eval("2_00+45_5-5_5"). + {334, []} = eval("3_34"), + {600, []} = eval("2_00+45_5-5_5"). integer_sum_test() -> - {3,[]} = eval("1+2"), - {6,[]} = eval("1+2+3"), - {6,[]} = eval("1+2 +3"), - {6,[]} = eval("1 + 2 + 3"). + {3, []} = eval("1+2"), + {6, []} = eval("1+2+3"), + {6, []} = eval("1+2 +3"), + {6, []} = eval("1 + 2 + 3"). integer_sum_minus_test() -> - {-4,[]} = eval("1-2-3"), - {0,[]} = eval("1+2-3"), - {0,[]} = eval("1 + 2 - 3"). + {-4, []} = eval("1-2-3"), + {0, []} = eval("1+2-3"), + {0, []} = eval("1 + 2 - 3"). integer_mult_test() -> - {6,[]} = eval("1*2*3"), - {6,[]} = eval("1 * 2 * 3"). + {6, []} = eval("1*2*3"), + {6, []} = eval("1 * 2 * 3"). integer_div_test() -> - {0.5,[]} = eval("1 / 2"), - {2.0,[]} = eval("4 / 2"). + {0.5, []} = eval("1 / 2"), + {2.0, []} = eval("4 / 2"). integer_div_rem_test() -> - {2,[]} = eval("div 5, 2"), - {1,[]} = eval("rem 5, 2"). + {2, []} = eval("div 5, 2"), + {1, []} = eval("rem 5, 2"). integer_mult_div_test() -> - {1.0,[]} = eval("2*1/2"), - {6.0,[]} = eval("3 * 4 / 2"). + {1.0, []} = eval("2*1/2"), + {6.0, []} = eval("3 * 4 / 2"). integer_without_parens_test() -> - {17,[]} = eval("3 * 5 + 2"), - {17,[]} = eval("2 + 3 * 5"), - {6.0,[]} = eval("4 / 4 + 5"). + {17, []} = eval("3 * 5 + 2"), + {17, []} = eval("2 + 3 * 5"), + {6.0, []} = eval("4 / 4 + 5"). integer_with_parens_test() -> - {21,[]} = eval("3 * (5 + 2)"), - {21,[]} = eval("3 * (((5 + (2))))"), - {25,[]} = eval("(2 + 3) * 5"), - {0.25,[]} = eval("4 / (11 + 5)"). + {21, []} = eval("3 * (5 + 2)"), + {21, []} = eval("3 * (((5 + (2))))"), + {25, []} = eval("(2 + 3) * 5"), + {0.25, []} = eval("4 / (11 + 5)"). integer_with_unary_test() -> - {2,[]} = eval("- 1 * - 2"). + {2, []} = eval("- 1 * - 2"). integer_eol_test() -> - {3,[]} = eval("1 +\n2"), - {2,[]} = eval("1 *\n2"), - {8,[]} = eval("1 + 2\n3 + 5"), - {8,[]} = eval("1 + 2\n\n\n3 + 5"), - {8,[]} = eval("1 + 2;\n\n3 + 5"), - {8,[]} = eval("1 + (\n2\n) + 3 + 2"), - {8,[]} = eval("1 + (\n\n 2\n\n) + 3 + 2"), - {3,[]} = eval(";1 + 2"), + {3, []} = eval("1 +\n2"), + {2, []} = eval("1 *\n2"), + {8, []} = eval("1 + 2\n3 + 5"), + {8, []} = eval("1 + 2\n\n\n3 + 5"), + {8, []} = eval("1 + 2;\n\n3 + 5"), + {8, []} = eval("1 + (\n2\n) + 3 + 2"), + {8, []} = eval("1 + (\n\n 2\n\n) + 3 + 2"), + {3, []} = eval(";1 + 2"), ?assertError(#{'__struct__' := 'Elixir.SyntaxError'}, eval("1 + 2;\n;\n3 + 5")). float_with_parens_and_unary_test() -> - {-21.0,[]} = eval("-3.0 * (5 + 2)"), - {25.0,[]} = eval("(2 + 3.0) * 5"), - {0.25,[]} = eval("4 / (11.0 + 5)"). + {-21.0, []} = eval("-3.0 * (5 + 2)"), + {25.0, []} = eval("(2 + 3.0) * 5"), + {0.25, []} = eval("4 / (11.0 + 5)"). operators_precedence_test() -> {2, _} = eval("max -1, 2"), @@ -78,15 +78,15 @@ operators_variables_precedence_test() -> operators_variables_precedence_on_namespaces_test() -> F = fun() -> eval("defmodule Foo do; def l, do: 1; end; defmodule Bar do; def l(_x), do: 1; end"), - {3,[]} = eval("1 + Foo.l + 1"), - {3,[]} = eval("1 + Foo.l+1"), - {2,[]} = eval("1 + Bar.l +1") + {3, []} = eval("1 + Foo.l + 1"), + {3, []} = eval("1 + Foo.l+1"), + {2, []} = eval("1 + Bar.l +1") end, test_helper:run_and_remove(F, ['Elixir.Foo', 'Elixir.Bar']). add_add_op_test() -> - {[1,2,3,4],[]} = eval("[1,2] ++ [3,4]"). + {[1, 2, 3, 4], []} = eval("[1, 2] ++ [3, 4]"). minus_minus_op_test() -> - {[1,2],[]} = eval("[1,2,3] -- [3]"), - {[1,2,3],[]} = eval("[1,2,3] -- [3] -- [3]"). \ No newline at end of file + {[1, 2], []} = eval("[1, 2, 3] -- [3]"), + {[1, 2, 3], []} = eval("[1, 2, 3] -- [3] -- [3]"). diff --git a/lib/elixir/test/erlang/string_test.erl b/lib/elixir/test/erlang/string_test.erl index aa787110fbb..83b2c14c981 100644 --- a/lib/elixir/test/erlang/string_test.erl +++ b/lib/elixir/test/erlang/string_test.erl @@ -1,5 +1,5 @@ -module(string_test). --include("elixir.hrl"). +-include("../../src/elixir.hrl"). -include_lib("eunit/include/eunit.hrl"). eval(Content) -> @@ -7,8 +7,13 @@ eval(Content) -> {Value, Binding}. extract_interpolations(String) -> - element(2, elixir_interpolation:extract(1, - #elixir_tokenizer{file = <<"nofile">>}, true, String ++ [$"], $")). + Tokenizer = #elixir_tokenizer{file = <<"nofile">>}, + case elixir_interpolation:extract(1, 1, Tokenizer, true, String ++ [$"], $") of + {error, Error} -> + Error; + {_, _, Z, _} -> + Z + end. % Interpolations @@ -16,54 +21,61 @@ extract_interpolations_without_interpolation_test() -> [<<"foo">>] = extract_interpolations("foo"). extract_interpolations_with_escaped_interpolation_test() -> - [<<"f#{o}o">>] = extract_interpolations("f\\#{o}o"). + [<<"f#{o}o">>] = extract_interpolations("f\\#{o}o"), + {1, 8, [<<"f#{o}o">>], []} = elixir_interpolation:extract(1, 2, + #elixir_tokenizer{file = <<"nofile">>}, true, "f\\#{o}o\"", $"). extract_interpolations_with_interpolation_test() -> [<<"f">>, - {1,[{atom,1,o}]}, + {{1, 2, 7}, [{atom, {1, 4, 6}, o}]}, <<"o">>] = extract_interpolations("f#{:o}o"). extract_interpolations_with_two_interpolations_test() -> [<<"f">>, - {1,[{atom,1,o}]},{1,[{atom,1,o}]}, + {{1, 2, 7}, [{atom, {1, 4, 6}, o}]}, {{1, 7, 12}, [{atom, {1, 9, 11}, o}]}, <<"o">>] = extract_interpolations("f#{:o}#{:o}o"). extract_interpolations_with_only_two_interpolations_test() -> - [{1,[{atom,1,o}]}, - {1,[{atom,1,o}]}] = extract_interpolations("#{:o}#{:o}"). + [{{1, 1, 6}, [{atom, {1, 3, 5}, o}]}, + {{1, 6, 11}, [{atom, {1, 8, 10}, o}]}] = extract_interpolations("#{:o}#{:o}"). extract_interpolations_with_tuple_inside_interpolation_test() -> [<<"f">>, - {1,[{'{',1},{number,1,1},{'}',1}]}, + {{1, 2, 8}, [{'{', {1, 4, 5}}, {number, {1, 5, 6}, 1}, {'}', {1, 6, 7}}]}, <<"o">>] = extract_interpolations("f#{{1}}o"). extract_interpolations_with_many_expressions_inside_interpolation_test() -> [<<"f">>, - {1,[{number,1,1},{eol,1,newline},{number,2,2}]}, + {{1, 2, 3}, [{number, {1, 4, 5}, 1}, {eol, {1, 5, 6}}, {number, {2, 1, 2}, 2}]}, <<"o">>] = extract_interpolations("f#{1\n2}o"). extract_interpolations_with_right_curly_inside_string_inside_interpolation_test() -> [<<"f">>, - {1,[{bin_string,1,[<<"f}o">>]}]}, + {{1, 2, 10}, [{bin_string, {1, 4, 9}, [<<"f}o">>]}]}, <<"o">>] = extract_interpolations("f#{\"f}o\"}o"). extract_interpolations_with_left_curly_inside_string_inside_interpolation_test() -> [<<"f">>, - {1,[{bin_string,1,[<<"f{o">>]}]}, + {{1, 2, 10}, [{bin_string, {1, 4, 9}, [<<"f{o">>]}]}, <<"o">>] = extract_interpolations("f#{\"f{o\"}o"). extract_interpolations_with_escaped_quote_inside_string_inside_interpolation_test() -> [<<"f">>, - {1,[{bin_string,1,[<<"f\"o">>]}]}, + {{1, 2, 11}, [{bin_string, {1, 4, 10}, [<<"f\"o">>]}]}, <<"o">>] = extract_interpolations("f#{\"f\\\"o\"}o"). extract_interpolations_with_less_than_operation_inside_interpolation_test() -> [<<"f">>, - {1,[{number,1,1},{rel_op,1,'<'},{number,1,2}]}, + {{1, 2, 8}, [{number, {1, 4, 5}, 1}, {rel_op, {1, 5, 6}, '<'}, {number, {1, 6, 7}, 2}]}, <<"o">>] = extract_interpolations("f#{1<2}o"). +extract_interpolations_with_an_escaped_character_test() -> + [<<"f">>, + {{1, 2, 17}, [{char, {1, 4, 7}, 7}, {rel_op, {1, 8, 9}, '>'}, {char, {1, 10, 13}, 7}]} + ] = extract_interpolations("f#{?\\a > ?\\a }"). + extract_interpolations_with_invalid_expression_inside_interpolation_test() -> - {1,"invalid token: ",":1}o\""} = extract_interpolations("f#{:1}o"). + {1, "unexpected token: ", _} = extract_interpolations("f#{:1}o"). %% Bin strings @@ -166,9 +178,9 @@ list_string_with_the_end_of_line_slash_test() -> {"fo", _} = eval("'f\\\r\no'"). char_test() -> - {99,[]} = eval("?1 + ?2"), - {10,[]} = eval("?\\n"), - {40,[]} = eval("?\\("). + {99, []} = eval("?1 + ?2"), + {10, []} = eval("?\\n"), + {40, []} = eval("?\\("). %% Binaries diff --git a/lib/elixir/test/erlang/test_helper.erl b/lib/elixir/test/erlang/test_helper.erl index e0890855250..cae9b9821ac 100644 --- a/lib/elixir/test/erlang/test_helper.erl +++ b/lib/elixir/test/erlang/test_helper.erl @@ -1,5 +1,4 @@ -module(test_helper). --include("elixir.hrl"). -export([test/0, run_and_remove/2, throw_elixir/1, throw_erlang/1]). -define(TESTS, [ atom_test, @@ -13,7 +12,7 @@ ]). test() -> - application:start(elixir), + application:ensure_all_started(elixir), case eunit:test(?TESTS) of error -> erlang:halt(1); _Res -> erlang:halt(0) diff --git a/lib/elixir/test/erlang/tokenizer_test.erl b/lib/elixir/test/erlang/tokenizer_test.erl index 4cd713860e8..48a48378beb 100644 --- a/lib/elixir/test/erlang/tokenizer_test.erl +++ b/lib/elixir/test/erlang/tokenizer_test.erl @@ -1,9 +1,8 @@ -module(tokenizer_test). --include("elixir.hrl"). -include_lib("eunit/include/eunit.hrl"). tokenize(String) -> - {ok, _Line, Result} = elixir_tokenizer:tokenize(String, 1, []), + {ok, _Line, _Column, Result} = elixir_tokenizer:tokenize(String, 1, []), Result. tokenize_error(String) -> @@ -11,136 +10,179 @@ tokenize_error(String) -> Error. type_test() -> - [{number,1,1},{type_op,1,'::'},{number,1,3}] = tokenize("1 :: 3"), - [{identifier,1,foo}, - {'.',1}, - {paren_identifier,1,'::'}, - {'(',1}, - {number,1,3}, - {')',1}] = tokenize("foo.::(3)"). + [{number, {1, 1, 2}, 1}, {type_op, {1, 3, 5}, '::'}, {number, {1, 6, 7}, 3}] = tokenize("1 :: 3"), + [{identifier, {1, 1, 5}, name}, + {'.', {1, 5, 6}}, + {paren_identifier, {1, 6, 8}, '::'}, + {'(', {1, 8, 9}}, + {number, {1, 9, 10}, 3}, + {')', {1, 10, 11}}] = tokenize("name.::(3)"). arithmetic_test() -> - [{number,1,1},{dual_op,1,'+'},{number,1,2},{dual_op,1,'+'},{number,1,3}] = tokenize("1 + 2 + 3"). + [{number, {1, 1, 2}, 1}, {dual_op, {1, 3, 4}, '+'}, {number, {1, 5, 6}, 2}, {dual_op, {1, 7, 8}, '+'}, {number, {1, 9, 10}, 3}] = tokenize("1 + 2 + 3"). op_kw_test() -> - [{atom,1,foo},{dual_op,1,'+'},{atom,1,bar}] = tokenize(":foo+:bar"). + [{atom, {1, 1, 5}, foo}, {dual_op, {1, 5, 6}, '+'}, {atom, {1, 6, 10}, bar}] = tokenize(":foo+:bar"). scientific_test() -> - [{number, 1, 0.1}] = tokenize("1.0e-1"). + [{number, {1, 1, 7}, 0.1}] = tokenize("1.0e-1"), + [{number, {1, 1, 16}, 1.2345678e-7}] = tokenize("1_234.567_8e-10"), + {1, "invalid float number ", "1.0e309"} = tokenize_error("1.0e309"). hex_bin_octal_test() -> - [{number,1,255}] = tokenize("0xFF"), - [{number,1,255}] = tokenize("0Xff"), - [{number,1,63}] = tokenize("077"), - [{number,1,63}] = tokenize("077"), - [{number,1,3}] = tokenize("0b11"), - [{number,1,3}] = tokenize("0B11"). + [{number, {1, 1, 5}, 255}] = tokenize("0xFF"), + [{number, {1, 1, 6}, 255}] = tokenize("0xF_F"), + [{number, {1, 1, 5}, 63}] = tokenize("0o77"), + [{number, {1, 1, 6}, 63}] = tokenize("0o7_7"), + [{number, {1, 1, 5}, 3}] = tokenize("0b11"), + [{number, {1, 1, 6}, 3}] = tokenize("0b1_1"). unquoted_atom_test() -> - [{atom, 1, '+'}] = tokenize(":+"), - [{atom, 1, '-'}] = tokenize(":-"), - [{atom, 1, '*'}] = tokenize(":*"), - [{atom, 1, '/'}] = tokenize(":/"), - [{atom, 1, '='}] = tokenize(":="), - [{atom, 1, '&&'}] = tokenize(":&&"). + [{atom, {1, 1, 3}, '+'}] = tokenize(":+"), + [{atom, {1, 1, 3}, '-'}] = tokenize(":-"), + [{atom, {1, 1, 3}, '*'}] = tokenize(":*"), + [{atom, {1, 1, 3}, '/'}] = tokenize(":/"), + [{atom, {1, 1, 3}, '='}] = tokenize(":="), + [{atom, {1, 1, 4}, '&&'}] = tokenize(":&&"). quoted_atom_test() -> - [{atom_unsafe, 1, [<<"foo bar">>]}] = tokenize(":\"foo bar\""). + [{atom_unsafe, {1, 1, 11}, [<<"foo bar">>]}] = tokenize(":\"foo bar\""). oversized_atom_test() -> - OversizedAtom = [$:|string:copies("a", 256)], + OversizedAtom = [$: | string:copies("a", 256)], {1, "atom length must be less than system limit", ":"} = tokenize_error(OversizedAtom). op_atom_test() -> - [{atom,1,f0_1}] = tokenize(":f0_1"). + [{atom, {1, 1, 6}, f0_1}] = tokenize(":f0_1"). kw_test() -> - [{kw_identifier, 1, do}] = tokenize("do: "), - [{kw_identifier_unsafe, 1, [<<"foo bar">>]}] = tokenize("\"foo bar\": "). + [{kw_identifier, {1, 1, 4}, do}] = tokenize("do: "), + [{kw_identifier, {1, 1, 4}, a@}] = tokenize("a@: "), + [{kw_identifier, {1, 1, 4}, 'A@'}] = tokenize("A@: "), + [{kw_identifier, {1, 1, 5}, a@b}] = tokenize("a@b: "), + [{kw_identifier, {1, 1, 5}, 'A@!'}] = tokenize("A@!: "), + [{kw_identifier, {1, 1, 5}, 'a@!'}] = tokenize("a@!: "), + [{kw_identifier_unsafe, {1, 1, 10}, [<<"foo bar">>]}] = tokenize("\"foo bar\": "). integer_test() -> - [{number, 1, 123}] = tokenize("123"), - [{number, 1, 123},{eol, 1, ';'}] = tokenize("123;"), - [{eol, 1, newline}, {number, 3, 123}] = tokenize("\n\n123"), - [{number, 1, 123}, {number, 1, 234}] = tokenize(" 123 234 "). + [{number, {1, 1, 4}, 123}] = tokenize("123"), + [{number, {1, 1, 4}, 123}, {';', {1, 4, 5}}] = tokenize("123;"), + [{eol, {1, 1, 2}}, {number, {3, 1, 4}, 123}] = tokenize("\n\n123"), + [{number, {1, 3, 6}, 123}, {number, {1, 8, 11}, 234}] = tokenize(" 123 234 "). float_test() -> - [{number, 1, 12.3}] = tokenize("12.3"), - [{number, 1, 12.3},{eol, 1, ';'}] = tokenize("12.3;"), - [{eol, 1, newline}, {number, 3, 12.3}] = tokenize("\n\n12.3"), - [{number, 1, 12.3}, {number, 1, 23.4}] = tokenize(" 12.3 23.4 "). + [{number, {1, 1, 5}, 12.3}] = tokenize("12.3"), + [{number, {1, 1, 5}, 12.3}, {';', {1, 5, 6}}] = tokenize("12.3;"), + [{eol, {1, 1, 2}}, {number, {3, 1, 5}, 12.3}] = tokenize("\n\n12.3"), + [{number, {1, 3, 7}, 12.3}, {number, {1, 9, 13}, 23.4}] = tokenize(" 12.3 23.4 "), + OversizedFloat = string:copies("9", 310) ++ ".0", + {1, "invalid float number ", OversizedFloat} = tokenize_error(OversizedFloat). comments_test() -> - [{number, 1, 1},{eol, 1, newline},{number,2,2}] = tokenize("1 # Comment\n2"). + [{number, {1, 1, 2}, 1}, {eol, {1, 3, 4}}, {number, {2, 1, 2}, 2}] = tokenize("1 # Comment\n2"). identifier_test() -> - [{identifier,1,abc}] = tokenize("abc "), - [{identifier,1,'abc?'}] = tokenize("abc?"), - [{identifier,1,'abc!'}] = tokenize("abc!"), - [{identifier,1,'a0c!'}] = tokenize("a0c!"), - [{paren_identifier,1,'a0c'},{'(',1},{')',1}] = tokenize("a0c()"), - [{paren_identifier,1,'a0c!'},{'(',1},{')',1}] = tokenize("a0c!()"). + [{identifier, {1, 1, 4}, abc}] = tokenize("abc "), + [{identifier, {1, 1, 5}, 'abc?'}] = tokenize("abc?"), + [{identifier, {1, 1, 5}, 'abc!'}] = tokenize("abc!"), + [{identifier, {1, 1, 5}, 'a0c!'}] = tokenize("a0c!"), + [{paren_identifier, {1, 1, 4}, 'a0c'}, {'(', {1, 4, 5}}, {')', {1, 5, 6}}] = tokenize("a0c()"), + [{paren_identifier, {1, 1, 5}, 'a0c!'}, {'(', {1, 5, 6}}, {')', {1, 6, 7}}] = tokenize("a0c!()"). module_macro_test() -> - [{identifier,1,'__MODULE__'}] = tokenize("__MODULE__"). + [{identifier, {1, 1, 11}, '__MODULE__'}] = tokenize("__MODULE__"). triple_dot_test() -> - [{identifier,1,'...'}] = tokenize("..."), - [{'.',1},{identifier,1,'..'}] = tokenize(". .."). + [{identifier, {1, 1, 4}, '...'}] = tokenize("..."), + [{'.', {1, 1, 2}}, {identifier, {1, 3, 5}, '..'}] = tokenize(". .."). dot_test() -> - [{identifier,1,foo}, - {'.',1}, - {identifier,1,bar}, - {'.',1}, - {identifier,1,baz}] = tokenize("foo.bar.baz"). + [{identifier, {1, 1, 4}, foo}, + {'.', {1, 4, 5}}, + {identifier, {1, 5, 8}, bar}, + {'.', {1, 8, 9}}, + {identifier, {1, 9, 12}, baz}] = tokenize("foo.bar.baz"). dot_keyword_test() -> - [{identifier,1,foo}, - {'.',1}, - {identifier,1,do}] = tokenize("foo.do"). + [{identifier, {1, 1, 4}, foo}, + {'.', {1, 4, 5}}, + {identifier, {1, 5, 7}, do}] = tokenize("foo.do"). newline_test() -> - [{identifier,1,foo}, - {'.',2}, - {identifier,2,bar}] = tokenize("foo\n.bar"), - [{number,1,1}, - {two_op,2,'++'}, - {number,2,2}] = tokenize("1\n++2"). + [{identifier, {1, 1, 4}, foo}, + {'.', {2, 1, 2}}, + {identifier, {2, 2, 5}, bar}] = tokenize("foo\n.bar"), + [{number, {1, 1, 2}, 1}, + {two_op, {2, 1, 3}, '++'}, + {number, {2, 3, 4}, 2}] = tokenize("1\n++2"). + +dot_newline_operator_test() -> + [{identifier, {1, 1, 4}, foo}, + {'.', {2, 4, 5}}, + {identifier, {2, 1, 2}, '+'}, + {number, {2, 2, 3}, 1}] = tokenize("foo.\n+1"), + [{identifier, {1, 1, 4}, foo}, + {'.', {2, 4, 5}}, + {identifier, {2, 1, 2}, '+'}, + {number, {2, 2, 3}, 1}] = tokenize("foo.#bar\n+1"). aliases_test() -> - [{'aliases',1,['Foo']}] = tokenize("Foo"), - [{'aliases',1,['Foo']}, - {'.',1}, - {'aliases',1,['Bar']}, - {'.',1}, - {'aliases',1,['Baz']}] = tokenize("Foo.Bar.Baz"). + [{'aliases', {1, 1, 4}, ['Foo']}] = tokenize("Foo"), + [{'aliases', {1, 1, 4}, ['Foo']}, + {'.', {1, 4, 5}}, + {'aliases', {1, 5, 8}, ['Bar']}, + {'.', {1, 8, 9}}, + {'aliases', {1, 9, 12}, ['Baz']}] = tokenize("Foo.Bar.Baz"). string_test() -> - [{bin_string,1,[<<"foo">>]}] = tokenize("\"foo\""), - [{list_string,1,[<<"foo">>]}] = tokenize("'foo'"). + [{bin_string, {1, 1, 6}, [<<"foo">>]}] = tokenize("\"foo\""), + [{bin_string, {1, 1, 6}, [<<"f\"">>]}] = tokenize("\"f\\\"\""), + [{list_string, {1, 1, 6}, [<<"foo">>]}] = tokenize("'foo'"). empty_string_test() -> - [{bin_string,1,[<<>>]}] = tokenize("\"\""), - [{list_string,1,[<<>>]}] = tokenize("''"). + [{bin_string, {1, 1, 3}, [<<>>]}] = tokenize("\"\""), + [{list_string, {1, 1, 3}, [<<>>]}] = tokenize("''"). addadd_test() -> - [{identifier,1,x},{two_op,1,'++'},{identifier,1,y}] = tokenize("x ++ y"). + [{identifier, {1, 1, 2}, x}, {two_op, {1, 3, 5}, '++'}, {identifier, {1, 6, 7}, y}] = tokenize("x ++ y"). + +space_test() -> + [{op_identifier, {1, 1, 4}, foo}, {dual_op, {1, 5, 6}, '-'}, {number, {1, 6, 7}, 2}] = tokenize("foo -2"), + [{op_identifier, {1, 1, 4}, foo}, {dual_op, {1, 6, 7}, '-'}, {number, {1, 7, 8}, 2}] = tokenize("foo -2"). chars_test() -> - [{number,1,97}] = tokenize("?a"), - [{number,1,99}] = tokenize("?c"), - [{number,1,7}] = tokenize("?\\a"), - [{number,1,10}] = tokenize("?\\n"), - [{number,1,92}] = tokenize("?\\\\"), - [{number,1,10}] = tokenize("?\\xa"), - [{number,1,26}] = tokenize("?\\X1a"), - [{number,1,6}] = tokenize("?\\6"), - [{number,1,49}] = tokenize("?\\61"), - [{number,1,255}] = tokenize("?\\377"), - [{number,1,10}] = tokenize("?\\x{a}"), - [{number,1,171}] = tokenize("?\\x{ab}"), - [{number,1,2748}] = tokenize("?\\x{abc}"), - [{number,1,43981}] = tokenize("?\\x{abcd}"), - [{number,1,703710}] = tokenize("?\\x{abcde}"), - [{number,1,1092557}] = tokenize("?\\x{10abcd}"). + [{char, {1, 1, 3}, 97}] = tokenize("?a"), + [{char, {1, 1, 3}, 99}] = tokenize("?c"), + [{char, {1, 1, 4}, 0}] = tokenize("?\\0"), + [{char, {1, 1, 4}, 7}] = tokenize("?\\a"), + [{char, {1, 1, 4}, 10}] = tokenize("?\\n"), + [{char, {1, 1, 4}, 92}] = tokenize("?\\\\"). + +interpolation_test() -> + [{bin_string, {1, 1, 9}, [<<"f">>, + {{1, 3, 8}, [{identifier, {1, 5, 7}, oo}]}]}, + {two_op, {1, 10, 12}, '<>'}, {bin_string, {1, 13, 15}, + [<<>>]}] = tokenize("\"f#{oo}\" <> \"\""). + +capture_test() -> + [{capture_op, {1, 1, 2}, '&'}, + {identifier, {1, 2, 4}, '||'}, + {mult_op, {1, 4, 5}, '/'}, + {number, {1, 5, 6}, 2}] = tokenize("&||/2"), + [{capture_op, {1, 1, 2}, '&'}, + {identifier, {1, 2, 4}, 'or'}, + {mult_op, {1, 4, 5}, '/'}, + {number, {1, 5, 6}, 2}] = tokenize("&or/2"), + [{capture_op,{1,1,2},'&'}, + {unary_op,{1,2,5},'not'}, + {number,{1,6,7},1}, + {',',{1,7,8}}, + {number,{1,9,10},2}] = tokenize("¬ 1, 2"). + +vc_merge_conflict_test() -> + {1, "found an unexpected version control marker, please resolve the conflicts: ", "<<<<<<< HEAD"} = + tokenize_error("<<<<<<< HEAD\n[1, 2, 3]"). + +invalid_sigil_delimiter_test() -> + {1, "invalid sigil delimiter: ", Message} = tokenize_error("~s\\"), + true = lists:prefix("\"\\\" (column 3, codepoint U+005C)", lists:flatten(Message)). diff --git a/lib/elixir/unicode/CompositionExclusions.txt b/lib/elixir/unicode/CompositionExclusions.txt new file mode 100644 index 00000000000..1999ed13283 --- /dev/null +++ b/lib/elixir/unicode/CompositionExclusions.txt @@ -0,0 +1,208 @@ +# CompositionExclusions-9.0.0.txt +# Date: 2016-01-21, 22:00:00 GMT [KW, LI] +# © 2016 Unicode®, Inc. +# For terms of use, see http://www.unicode.org/terms_of_use.html +# +# Unicode Character Database +# For documentation, see http://www.unicode.org/reports/tr44/ +# +# This file lists the characters for the Composition Exclusion Table +# defined in UAX #15, Unicode Normalization Forms. +# +# This file is a normative contributory data file in the +# Unicode Character Database. +# +# For more information, see +# http://www.unicode.org/unicode/reports/tr15/#Primary_Exclusion_List_Table +# +# For a full derivation of composition exclusions, see the derived property +# Full_Composition_Exclusion in DerivedNormalizationProps.txt +# + +# ================================================ +# (1) Script Specifics +# +# This list of characters cannot be derived from the UnicodeData.txt file. +# ================================================ + +0958 # DEVANAGARI LETTER QA +0959 # DEVANAGARI LETTER KHHA +095A # DEVANAGARI LETTER GHHA +095B # DEVANAGARI LETTER ZA +095C # DEVANAGARI LETTER DDDHA +095D # DEVANAGARI LETTER RHA +095E # DEVANAGARI LETTER FA +095F # DEVANAGARI LETTER YYA +09DC # BENGALI LETTER RRA +09DD # BENGALI LETTER RHA +09DF # BENGALI LETTER YYA +0A33 # GURMUKHI LETTER LLA +0A36 # GURMUKHI LETTER SHA +0A59 # GURMUKHI LETTER KHHA +0A5A # GURMUKHI LETTER GHHA +0A5B # GURMUKHI LETTER ZA +0A5E # GURMUKHI LETTER FA +0B5C # ORIYA LETTER RRA +0B5D # ORIYA LETTER RHA +0F43 # TIBETAN LETTER GHA +0F4D # TIBETAN LETTER DDHA +0F52 # TIBETAN LETTER DHA +0F57 # TIBETAN LETTER BHA +0F5C # TIBETAN LETTER DZHA +0F69 # TIBETAN LETTER KSSA +0F76 # TIBETAN VOWEL SIGN VOCALIC R +0F78 # TIBETAN VOWEL SIGN VOCALIC L +0F93 # TIBETAN SUBJOINED LETTER GHA +0F9D # TIBETAN SUBJOINED LETTER DDHA +0FA2 # TIBETAN SUBJOINED LETTER DHA +0FA7 # TIBETAN SUBJOINED LETTER BHA +0FAC # TIBETAN SUBJOINED LETTER DZHA +0FB9 # TIBETAN SUBJOINED LETTER KSSA +FB1D # HEBREW LETTER YOD WITH HIRIQ +FB1F # HEBREW LIGATURE YIDDISH YOD YOD PATAH +FB2A # HEBREW LETTER SHIN WITH SHIN DOT +FB2B # HEBREW LETTER SHIN WITH SIN DOT +FB2C # HEBREW LETTER SHIN WITH DAGESH AND SHIN DOT +FB2D # HEBREW LETTER SHIN WITH DAGESH AND SIN DOT +FB2E # HEBREW LETTER ALEF WITH PATAH +FB2F # HEBREW LETTER ALEF WITH QAMATS +FB30 # HEBREW LETTER ALEF WITH MAPIQ +FB31 # HEBREW LETTER BET WITH DAGESH +FB32 # HEBREW LETTER GIMEL WITH DAGESH +FB33 # HEBREW LETTER DALET WITH DAGESH +FB34 # HEBREW LETTER HE WITH MAPIQ +FB35 # HEBREW LETTER VAV WITH DAGESH +FB36 # HEBREW LETTER ZAYIN WITH DAGESH +FB38 # HEBREW LETTER TET WITH DAGESH +FB39 # HEBREW LETTER YOD WITH DAGESH +FB3A # HEBREW LETTER FINAL KAF WITH DAGESH +FB3B # HEBREW LETTER KAF WITH DAGESH +FB3C # HEBREW LETTER LAMED WITH DAGESH +FB3E # HEBREW LETTER MEM WITH DAGESH +FB40 # HEBREW LETTER NUN WITH DAGESH +FB41 # HEBREW LETTER SAMEKH WITH DAGESH +FB43 # HEBREW LETTER FINAL PE WITH DAGESH +FB44 # HEBREW LETTER PE WITH DAGESH +FB46 # HEBREW LETTER TSADI WITH DAGESH +FB47 # HEBREW LETTER QOF WITH DAGESH +FB48 # HEBREW LETTER RESH WITH DAGESH +FB49 # HEBREW LETTER SHIN WITH DAGESH +FB4A # HEBREW LETTER TAV WITH DAGESH +FB4B # HEBREW LETTER VAV WITH HOLAM +FB4C # HEBREW LETTER BET WITH RAFE +FB4D # HEBREW LETTER KAF WITH RAFE +FB4E # HEBREW LETTER PE WITH RAFE + +# Total code points: 67 + +# ================================================ +# (2) Post Composition Version precomposed characters +# +# These characters cannot be derived solely from the UnicodeData.txt file +# in this version of Unicode. +# +# Note that characters added to the standard after the +# Composition Version and which have canonical decomposition mappings +# are not automatically added to this list of Post Composition +# Version precomposed characters. +# ================================================ + +2ADC # FORKING +1D15E # MUSICAL SYMBOL HALF NOTE +1D15F # MUSICAL SYMBOL QUARTER NOTE +1D160 # MUSICAL SYMBOL EIGHTH NOTE +1D161 # MUSICAL SYMBOL SIXTEENTH NOTE +1D162 # MUSICAL SYMBOL THIRTY-SECOND NOTE +1D163 # MUSICAL SYMBOL SIXTY-FOURTH NOTE +1D164 # MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE +1D1BB # MUSICAL SYMBOL MINIMA +1D1BC # MUSICAL SYMBOL MINIMA BLACK +1D1BD # MUSICAL SYMBOL SEMIMINIMA WHITE +1D1BE # MUSICAL SYMBOL SEMIMINIMA BLACK +1D1BF # MUSICAL SYMBOL FUSA WHITE +1D1C0 # MUSICAL SYMBOL FUSA BLACK + +# Total code points: 14 + +# ================================================ +# (3) Singleton Decompositions +# +# These characters can be derived from the UnicodeData.txt file +# by including all canonically decomposable characters whose +# canonical decomposition consists of a single character. +# +# These characters are simply quoted here for reference. +# See also Full_Composition_Exclusion in DerivedNormalizationProps.txt +# ================================================ + +# 0340..0341 [2] COMBINING GRAVE TONE MARK..COMBINING ACUTE TONE MARK +# 0343 COMBINING GREEK KORONIS +# 0374 GREEK NUMERAL SIGN +# 037E GREEK QUESTION MARK +# 0387 GREEK ANO TELEIA +# 1F71 GREEK SMALL LETTER ALPHA WITH OXIA +# 1F73 GREEK SMALL LETTER EPSILON WITH OXIA +# 1F75 GREEK SMALL LETTER ETA WITH OXIA +# 1F77 GREEK SMALL LETTER IOTA WITH OXIA +# 1F79 GREEK SMALL LETTER OMICRON WITH OXIA +# 1F7B GREEK SMALL LETTER UPSILON WITH OXIA +# 1F7D GREEK SMALL LETTER OMEGA WITH OXIA +# 1FBB GREEK CAPITAL LETTER ALPHA WITH OXIA +# 1FBE GREEK PROSGEGRAMMENI +# 1FC9 GREEK CAPITAL LETTER EPSILON WITH OXIA +# 1FCB GREEK CAPITAL LETTER ETA WITH OXIA +# 1FD3 GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA +# 1FDB GREEK CAPITAL LETTER IOTA WITH OXIA +# 1FE3 GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA +# 1FEB GREEK CAPITAL LETTER UPSILON WITH OXIA +# 1FEE..1FEF [2] GREEK DIALYTIKA AND OXIA..GREEK VARIA +# 1FF9 GREEK CAPITAL LETTER OMICRON WITH OXIA +# 1FFB GREEK CAPITAL LETTER OMEGA WITH OXIA +# 1FFD GREEK OXIA +# 2000..2001 [2] EN QUAD..EM QUAD +# 2126 OHM SIGN +# 212A..212B [2] KELVIN SIGN..ANGSTROM SIGN +# 2329 LEFT-POINTING ANGLE BRACKET +# 232A RIGHT-POINTING ANGLE BRACKET +# F900..FA0D [270] CJK COMPATIBILITY IDEOGRAPH-F900..CJK COMPATIBILITY IDEOGRAPH-FA0D +# FA10 CJK COMPATIBILITY IDEOGRAPH-FA10 +# FA12 CJK COMPATIBILITY IDEOGRAPH-FA12 +# FA15..FA1E [10] CJK COMPATIBILITY IDEOGRAPH-FA15..CJK COMPATIBILITY IDEOGRAPH-FA1E +# FA20 CJK COMPATIBILITY IDEOGRAPH-FA20 +# FA22 CJK COMPATIBILITY IDEOGRAPH-FA22 +# FA25..FA26 [2] CJK COMPATIBILITY IDEOGRAPH-FA25..CJK COMPATIBILITY IDEOGRAPH-FA26 +# FA2A..FA6D [68] CJK COMPATIBILITY IDEOGRAPH-FA2A..CJK COMPATIBILITY IDEOGRAPH-FA6D +# FA70..FAD9 [106] CJK COMPATIBILITY IDEOGRAPH-FA70..CJK COMPATIBILITY IDEOGRAPH-FAD9 +# 2F800..2FA1D [542] CJK COMPATIBILITY IDEOGRAPH-2F800..CJK COMPATIBILITY IDEOGRAPH-2FA1D + +# Total code points: 1035 + +# ================================================ +# (4) Non-Starter Decompositions +# +# These characters can be derived from the UnicodeData.txt file +# by including each expanding canonical decomposition +# (i.e., those which canonically decompose to a sequence +# of characters instead of a single character), such that: +# +# A. The character is not a Starter. +# +# OR (inclusive) +# +# B. The character's canonical decomposition begins +# with a character that is not a Starter. +# +# Note that a "Starter" is any character with a zero combining class. +# +# These characters are simply quoted here for reference. +# See also Full_Composition_Exclusion in DerivedNormalizationProps.txt +# ================================================ + +# 0344 COMBINING GREEK DIALYTIKA TONOS +# 0F73 TIBETAN VOWEL SIGN II +# 0F75 TIBETAN VOWEL SIGN UU +# 0F81 TIBETAN VOWEL SIGN REVERSED II + +# Total code points: 4 + +# EOF diff --git a/lib/elixir/unicode/GraphemeBreakProperty.txt b/lib/elixir/unicode/GraphemeBreakProperty.txt index f13970a2567..e7d36e228d6 100644 --- a/lib/elixir/unicode/GraphemeBreakProperty.txt +++ b/lib/elixir/unicode/GraphemeBreakProperty.txt @@ -1,14 +1,53 @@ +# GraphemeBreakProperty-9.0.0.txt +# Date: 2016-06-03, 22:23:55 GMT +# © 2016 Unicode®, Inc. +# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. +# For terms of use, see http://www.unicode.org/terms_of_use.html +# +# Unicode Character Database +# For documentation, see http://www.unicode.org/reports/tr44/ + +# ================================================ + +# Property: Grapheme_Cluster_Break + +# All code points not explicitly listed for Grapheme_Cluster_Break +# have the value Other (XX). + +# @missing: 0000..10FFFF; Other + +# ================================================ + +0600..0605 ; Prepend # Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE +06DD ; Prepend # Cf ARABIC END OF AYAH +070F ; Prepend # Cf SYRIAC ABBREVIATION MARK +08E2 ; Prepend # Cf ARABIC DISPUTED END OF AYAH +0D4E ; Prepend # Lo MALAYALAM LETTER DOT REPH +110BD ; Prepend # Cf KAITHI NUMBER SIGN +111C2..111C3 ; Prepend # Lo [2] SHARADA SIGN JIHVAMULIYA..SHARADA SIGN UPADHMANIYA + +# Total code points: 13 + +# ================================================ + 000D ; CR # Cc + +# Total code points: 1 + +# ================================================ + 000A ; LF # Cc + +# Total code points: 1 + +# ================================================ + 0000..0009 ; Control # Cc [10] .. 000B..000C ; Control # Cc [2] .. 000E..001F ; Control # Cc [18] .. 007F..009F ; Control # Cc [33] .. 00AD ; Control # Cf SOFT HYPHEN -0600..0605 ; Control # Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE 061C ; Control # Cf ARABIC LETTER MARK -06DD ; Control # Cf ARABIC END OF AYAH -070F ; Control # Cf SYRIAC ABBREVIATION MARK 180E ; Control # Cf MONGOLIAN VOWEL SEPARATOR 200B ; Control # Cf ZERO WIDTH SPACE 200E..200F ; Control # Cf [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK @@ -22,15 +61,18 @@ D800..DFFF ; Control # Cs [2048] .. FEFF ; Control # Cf ZERO WIDTH NO-BREAK SPACE FFF0..FFF8 ; Control # Cn [9] .. FFF9..FFFB ; Control # Cf [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR -110BD ; Control # Cf KAITHI NUMBER SIGN 1BCA0..1BCA3 ; Control # Cf [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP 1D173..1D17A ; Control # Cf [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE E0000 ; Control # Cn E0001 ; Control # Cf LANGUAGE TAG E0002..E001F ; Control # Cn [30] .. -E0020..E007F ; Control # Cf [96] TAG SPACE..CANCEL TAG E0080..E00FF ; Control # Cn [128] .. E01F0..E0FFF ; Control # Cn [3600] .. + +# Total code points: 5925 + +# ================================================ + 0300..036F ; Extend # Mn [112] COMBINING GRAVE ACCENT..COMBINING LATIN SMALL LETTER X 0483..0487 ; Extend # Mn [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE 0488..0489 ; Extend # Me [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN @@ -55,7 +97,8 @@ E01F0..E0FFF ; Control # Cn [3600] .. 0825..0827 ; Extend # Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U 0829..082D ; Extend # Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA 0859..085B ; Extend # Mn [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK -08E4..0902 ; Extend # Mn [31] ARABIC CURLY FATHA..DEVANAGARI SIGN ANUSVARA +08D4..08E1 ; Extend # Mn [14] ARABIC SMALL HIGH WORD AR-RUB..ARABIC SMALL HIGH SIGN SAFHA +08E3..0902 ; Extend # Mn [32] ARABIC TURNED DAMMA BELOW..DEVANAGARI SIGN ANUSVARA 093A ; Extend # Mn DEVANAGARI VOWEL SIGN OE 093C ; Extend # Mn DEVANAGARI SIGN NUKTA 0941..0948 ; Extend # Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI @@ -161,6 +204,7 @@ E01F0..E0FFF ; Control # Cn [3600] .. 17C9..17D3 ; Extend # Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT 17DD ; Extend # Mn KHMER SIGN ATTHACAN 180B..180D ; Extend # Mn [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE +1885..1886 ; Extend # Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA 18A9 ; Extend # Mn MONGOLIAN LETTER ALI GALI DAGALGA 1920..1922 ; Extend # Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U 1927..1928 ; Extend # Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O @@ -200,8 +244,8 @@ E01F0..E0FFF ; Control # Cn [3600] .. 1CF4 ; Extend # Mn VEDIC TONE CANDRA ABOVE 1CF8..1CF9 ; Extend # Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE 1DC0..1DF5 ; Extend # Mn [54] COMBINING DOTTED GRAVE ACCENT..COMBINING UP TACK ABOVE -1DFC..1DFF ; Extend # Mn [4] COMBINING DOUBLE INVERTED BREVE BELOW..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW -200C..200D ; Extend # Cf [2] ZERO WIDTH NON-JOINER..ZERO WIDTH JOINER +1DFB..1DFF ; Extend # Mn [5] COMBINING DELETION MARK..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW +200C ; Extend # Cf ZERO WIDTH NON-JOINER 20D0..20DC ; Extend # Mn [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE 20DD..20E0 ; Extend # Me [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH 20E1 ; Extend # Mn COMBINING LEFT RIGHT ARROW ABOVE @@ -216,13 +260,13 @@ E01F0..E0FFF ; Control # Cn [3600] .. A66F ; Extend # Mn COMBINING CYRILLIC VZMET A670..A672 ; Extend # Me [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN A674..A67D ; Extend # Mn [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK -A69F ; Extend # Mn COMBINING CYRILLIC LETTER IOTIFIED E +A69E..A69F ; Extend # Mn [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E A6F0..A6F1 ; Extend # Mn [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS A802 ; Extend # Mn SYLOTI NAGRI SIGN DVISVARA A806 ; Extend # Mn SYLOTI NAGRI SIGN HASANTA A80B ; Extend # Mn SYLOTI NAGRI SIGN ANUSVARA A825..A826 ; Extend # Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E -A8C4 ; Extend # Mn SAURASHTRA SIGN VIRAMA +A8C4..A8C5 ; Extend # Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU A8E0..A8F1 ; Extend # Mn [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA A926..A92D ; Extend # Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU A947..A951 ; Extend # Mn [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R @@ -249,7 +293,7 @@ ABE8 ; Extend # Mn MEETEI MAYEK VOWEL SIGN UNAP ABED ; Extend # Mn MEETEI MAYEK APUN IYEK FB1E ; Extend # Mn HEBREW POINT JUDEO-SPANISH VARIKA FE00..FE0F ; Extend # Mn [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16 -FE20..FE2D ; Extend # Mn [14] COMBINING LIGATURE LEFT HALF..COMBINING CONJOINING MACRON BELOW +FE20..FE2F ; Extend # Mn [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF FF9E..FF9F ; Extend # Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK 101FD ; Extend # Mn PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE 102E0 ; Extend # Mn COPTIC EPACT THOUSANDS MARK @@ -271,18 +315,23 @@ FF9E..FF9F ; Extend # Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDT 11173 ; Extend # Mn MAHAJANI SIGN NUKTA 11180..11181 ; Extend # Mn [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA 111B6..111BE ; Extend # Mn [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O +111CA..111CC ; Extend # Mn [3] SHARADA SIGN NUKTA..SHARADA EXTRA SHORT VOWEL MARK 1122F..11231 ; Extend # Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI 11234 ; Extend # Mn KHOJKI SIGN ANUSVARA 11236..11237 ; Extend # Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA +1123E ; Extend # Mn KHOJKI SIGN SUKUN 112DF ; Extend # Mn KHUDAWADI SIGN ANUSVARA 112E3..112EA ; Extend # Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA -11301 ; Extend # Mn GRANTHA SIGN CANDRABINDU +11300..11301 ; Extend # Mn [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU 1133C ; Extend # Mn GRANTHA SIGN NUKTA 1133E ; Extend # Mc GRANTHA VOWEL SIGN AA 11340 ; Extend # Mn GRANTHA VOWEL SIGN II 11357 ; Extend # Mc GRANTHA AU LENGTH MARK 11366..1136C ; Extend # Mn [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX 11370..11374 ; Extend # Mn [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA +11438..1143F ; Extend # Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI +11442..11444 ; Extend # Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA +11446 ; Extend # Mn NEWA SIGN NUKTA 114B0 ; Extend # Mc TIRHUTA VOWEL SIGN AA 114B3..114B8 ; Extend # Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL 114BA ; Extend # Mn TIRHUTA VOWEL SIGN SHORT E @@ -293,6 +342,7 @@ FF9E..FF9F ; Extend # Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDT 115B2..115B5 ; Extend # Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR 115BC..115BD ; Extend # Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA 115BF..115C0 ; Extend # Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA +115DC..115DD ; Extend # Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU 11633..1163A ; Extend # Mn [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI 1163D ; Extend # Mn MODI SIGN ANUSVARA 1163F..11640 ; Extend # Mn [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA @@ -300,6 +350,16 @@ FF9E..FF9F ; Extend # Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDT 116AD ; Extend # Mn TAKRI VOWEL SIGN AA 116B0..116B5 ; Extend # Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU 116B7 ; Extend # Mn TAKRI SIGN NUKTA +1171D..1171F ; Extend # Mn [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA +11722..11725 ; Extend # Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU +11727..1172B ; Extend # Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER +11C30..11C36 ; Extend # Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L +11C38..11C3D ; Extend # Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA +11C3F ; Extend # Mn BHAIKSUKI SIGN VIRAMA +11C92..11CA7 ; Extend # Mn [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA +11CAA..11CB0 ; Extend # Mn [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA +11CB2..11CB3 ; Extend # Mn [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E +11CB5..11CB6 ; Extend # Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU 16AF0..16AF4 ; Extend # Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE 16B30..16B36 ; Extend # Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM 16F8F..16F92 ; Extend # Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW @@ -311,9 +371,32 @@ FF9E..FF9F ; Extend # Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDT 1D185..1D18B ; Extend # Mn [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE 1D1AA..1D1AD ; Extend # Mn [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO 1D242..1D244 ; Extend # Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME +1DA00..1DA36 ; Extend # Mn [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN +1DA3B..1DA6C ; Extend # Mn [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT +1DA75 ; Extend # Mn SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS +1DA84 ; Extend # Mn SIGNWRITING LOCATION HEAD NECK +1DA9B..1DA9F ; Extend # Mn [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6 +1DAA1..1DAAF ; Extend # Mn [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16 +1E000..1E006 ; Extend # Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE +1E008..1E018 ; Extend # Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU +1E01B..1E021 ; Extend # Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI +1E023..1E024 ; Extend # Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS +1E026..1E02A ; Extend # Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA 1E8D0..1E8D6 ; Extend # Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS +1E944..1E94A ; Extend # Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA +E0020..E007F ; Extend # Cf [96] TAG SPACE..CANCEL TAG E0100..E01EF ; Extend # Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256 + +# Total code points: 1828 + +# ================================================ + 1F1E6..1F1FF ; Regional_Indicator # So [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z + +# Total code points: 26 + +# ================================================ + 0903 ; SpacingMark # Mc DEVANAGARI SIGN VISARGA 093B ; SpacingMark # Mc DEVANAGARI VOWEL SIGN OOE 093E..0940 ; SpacingMark # Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II @@ -368,8 +451,6 @@ E0100..E01EF ; Extend # Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256 1929..192B ; SpacingMark # Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA 1930..1931 ; SpacingMark # Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA 1933..1938 ; SpacingMark # Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA -19B5..19B7 ; SpacingMark # Mc [3] NEW TAI LUE VOWEL SIGN E..NEW TAI LUE VOWEL SIGN O -19BA ; SpacingMark # Mc NEW TAI LUE VOWEL SIGN AY 1A19..1A1A ; SpacingMark # Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O 1A55 ; SpacingMark # Mc TAI THAM CONSONANT SIGN MEDIAL RA 1A57 ; SpacingMark # Mc TAI THAM CONSONANT SIGN LA TANG LAI @@ -429,6 +510,9 @@ ABEC ; SpacingMark # Mc MEETEI MAYEK LUM IYEK 11347..11348 ; SpacingMark # Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI 1134B..1134D ; SpacingMark # Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA 11362..11363 ; SpacingMark # Mc [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL +11435..11437 ; SpacingMark # Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II +11440..11441 ; SpacingMark # Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU +11445 ; SpacingMark # Mc NEWA SIGN VISARGA 114B1..114B2 ; SpacingMark # Mc [2] TIRHUTA VOWEL SIGN I..TIRHUTA VOWEL SIGN II 114B9 ; SpacingMark # Mc TIRHUTA VOWEL SIGN E 114BB..114BC ; SpacingMark # Mc [2] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN O @@ -443,15 +527,42 @@ ABEC ; SpacingMark # Mc MEETEI MAYEK LUM IYEK 116AC ; SpacingMark # Mc TAKRI SIGN VISARGA 116AE..116AF ; SpacingMark # Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II 116B6 ; SpacingMark # Mc TAKRI SIGN VIRAMA +11720..11721 ; SpacingMark # Mc [2] AHOM VOWEL SIGN A..AHOM VOWEL SIGN AA +11726 ; SpacingMark # Mc AHOM VOWEL SIGN E +11C2F ; SpacingMark # Mc BHAIKSUKI VOWEL SIGN AA +11C3E ; SpacingMark # Mc BHAIKSUKI SIGN VISARGA +11CA9 ; SpacingMark # Mc MARCHEN SUBJOINED LETTER YA +11CB1 ; SpacingMark # Mc MARCHEN VOWEL SIGN I +11CB4 ; SpacingMark # Mc MARCHEN VOWEL SIGN O 16F51..16F7E ; SpacingMark # Mc [46] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN NG 1D166 ; SpacingMark # Mc MUSICAL SYMBOL COMBINING SPRECHGESANG STEM 1D16D ; SpacingMark # Mc MUSICAL SYMBOL COMBINING AUGMENTATION DOT + +# Total code points: 341 + +# ================================================ + 1100..115F ; L # Lo [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER A960..A97C ; L # Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH + +# Total code points: 125 + +# ================================================ + 1160..11A7 ; V # Lo [72] HANGUL JUNGSEONG FILLER..HANGUL JUNGSEONG O-YAE D7B0..D7C6 ; V # Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E + +# Total code points: 95 + +# ================================================ + 11A8..11FF ; T # Lo [88] HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG SSANGNIEUN D7CB..D7FB ; T # Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH + +# Total code points: 137 + +# ================================================ + AC00 ; LV # Lo HANGUL SYLLABLE GA AC1C ; LV # Lo HANGUL SYLLABLE GAE AC38 ; LV # Lo HANGUL SYLLABLE GYA @@ -851,6 +962,11 @@ D734 ; LV # Lo HANGUL SYLLABLE HYU D750 ; LV # Lo HANGUL SYLLABLE HEU D76C ; LV # Lo HANGUL SYLLABLE HYI D788 ; LV # Lo HANGUL SYLLABLE HI + +# Total code points: 399 + +# ================================================ + AC01..AC1B ; LVT # Lo [27] HANGUL SYLLABLE GAG..HANGUL SYLLABLE GAH AC1D..AC37 ; LVT # Lo [27] HANGUL SYLLABLE GAEG..HANGUL SYLLABLE GAEH AC39..AC53 ; LVT # Lo [27] HANGUL SYLLABLE GYAG..HANGUL SYLLABLE GYAH @@ -1250,3 +1366,66 @@ D735..D74F ; LVT # Lo [27] HANGUL SYLLABLE HYUG..HANGUL SYLLABLE HYUH D751..D76B ; LVT # Lo [27] HANGUL SYLLABLE HEUG..HANGUL SYLLABLE HEUH D76D..D787 ; LVT # Lo [27] HANGUL SYLLABLE HYIG..HANGUL SYLLABLE HYIH D789..D7A3 ; LVT # Lo [27] HANGUL SYLLABLE HIG..HANGUL SYLLABLE HIH + +# Total code points: 10773 + +# ================================================ + +261D ; E_Base # So WHITE UP POINTING INDEX +26F9 ; E_Base # So PERSON WITH BALL +270A..270D ; E_Base # So [4] RAISED FIST..WRITING HAND +1F385 ; E_Base # So FATHER CHRISTMAS +1F3C3..1F3C4 ; E_Base # So [2] RUNNER..SURFER +1F3CA..1F3CB ; E_Base # So [2] SWIMMER..WEIGHT LIFTER +1F442..1F443 ; E_Base # So [2] EAR..NOSE +1F446..1F450 ; E_Base # So [11] WHITE UP POINTING BACKHAND INDEX..OPEN HANDS SIGN +1F46E ; E_Base # So POLICE OFFICER +1F470..1F478 ; E_Base # So [9] BRIDE WITH VEIL..PRINCESS +1F47C ; E_Base # So BABY ANGEL +1F481..1F483 ; E_Base # So [3] INFORMATION DESK PERSON..DANCER +1F485..1F487 ; E_Base # So [3] NAIL POLISH..HAIRCUT +1F4AA ; E_Base # So FLEXED BICEPS +1F575 ; E_Base # So SLEUTH OR SPY +1F57A ; E_Base # So MAN DANCING +1F590 ; E_Base # So RAISED HAND WITH FINGERS SPLAYED +1F595..1F596 ; E_Base # So [2] REVERSED HAND WITH MIDDLE FINGER EXTENDED..RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS +1F645..1F647 ; E_Base # So [3] FACE WITH NO GOOD GESTURE..PERSON BOWING DEEPLY +1F64B..1F64F ; E_Base # So [5] HAPPY PERSON RAISING ONE HAND..PERSON WITH FOLDED HANDS +1F6A3 ; E_Base # So ROWBOAT +1F6B4..1F6B6 ; E_Base # So [3] BICYCLIST..PEDESTRIAN +1F6C0 ; E_Base # So BATH +1F918..1F91E ; E_Base # So [7] SIGN OF THE HORNS..HAND WITH INDEX AND MIDDLE FINGERS CROSSED +1F926 ; E_Base # So FACE PALM +1F930 ; E_Base # So PREGNANT WOMAN +1F933..1F939 ; E_Base # So [7] SELFIE..JUGGLING +1F93C..1F93E ; E_Base # So [3] WRESTLERS..HANDBALL + +# Total code points: 79 + +# ================================================ + +1F3FB..1F3FF ; E_Modifier # Sk [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6 + +# Total code points: 5 + +# ================================================ + +200D ; ZWJ # Cf ZERO WIDTH JOINER + +# Total code points: 1 + +# ================================================ + +2764 ; Glue_After_Zwj # So HEAVY BLACK HEART +1F48B ; Glue_After_Zwj # So KISS MARK +1F5E8 ; Glue_After_Zwj # So LEFT SPEECH BUBBLE + +# Total code points: 3 + +# ================================================ + +1F466..1F469 ; E_Base_GAZ # So [4] BOY..WOMAN + +# Total code points: 4 + +# EOF \ No newline at end of file diff --git a/lib/elixir/unicode/GraphemeBreakTest.txt b/lib/elixir/unicode/GraphemeBreakTest.txt new file mode 100644 index 00000000000..0f6a0db7905 --- /dev/null +++ b/lib/elixir/unicode/GraphemeBreakTest.txt @@ -0,0 +1,847 @@ +# GraphemeBreakTest-9.0.0.txt +# Date: 2016-06-02, 18:28:17 GMT +# © 2016 Unicode®, Inc. +# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. +# For terms of use, see http://www.unicode.org/terms_of_use.html +# +# Unicode Character Database +# For documentation, see http://www.unicode.org/reports/tr44/ +# +# Default Grapheme Break Test +# +# Format: +# (# )? +# contains hex Unicode code points, with +# ÷ wherever there is a break opportunity, and +# × wherever there is not. +# the format can change, but currently it shows: +# - the sample character name +# - (x) the Grapheme_Cluster_Break property value for the sample character +# - [x] the rule that determines whether there is a break or not +# +# These samples may be extended or changed in the future. +# +÷ 0020 ÷ 0020 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0020 × 0308 ÷ 0020 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0020 ÷ 000D ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (CR) ÷ [0.3] +÷ 0020 × 0308 ÷ 000D ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 0020 ÷ 000A ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (LF) ÷ [0.3] +÷ 0020 × 0308 ÷ 000A ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 0020 ÷ 0001 ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (Control) ÷ [0.3] +÷ 0020 × 0308 ÷ 0001 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0020 × 0300 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0020 × 0308 × 0300 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0020 ÷ 0600 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0020 × 0308 ÷ 0600 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0020 × 0903 ÷ # ÷ [0.2] SPACE (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0020 × 0308 × 0903 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0020 ÷ 1100 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0020 × 0308 ÷ 1100 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0020 ÷ 1160 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0020 × 0308 ÷ 1160 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0020 ÷ 11A8 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0020 × 0308 ÷ 11A8 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0020 ÷ AC00 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0020 × 0308 ÷ AC00 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0020 ÷ AC01 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0020 × 0308 ÷ AC01 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0020 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0020 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0020 ÷ 261D ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0020 × 0308 ÷ 261D ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0020 ÷ 1F3FB ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0020 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0020 × 200D ÷ # ÷ [0.2] SPACE (Other) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0020 × 0308 × 200D ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0020 ÷ 2764 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0020 × 0308 ÷ 2764 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0020 ÷ 1F466 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0020 × 0308 ÷ 1F466 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0020 ÷ 0378 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] (Other) ÷ [0.3] +÷ 0020 × 0308 ÷ 0378 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 0020 ÷ D800 ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (Control) ÷ [0.3] +÷ 0020 × 0308 ÷ D800 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 000D ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [4.0] SPACE (Other) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 000D ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] (CR) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 000D × 000A ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 000D ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Control) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 000D ÷ 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 000D ÷ 0308 × 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 000D ÷ 0600 ÷ # ÷ [0.2] (CR) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 000D ÷ 0903 ÷ # ÷ [0.2] (CR) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 000D ÷ 0308 × 0903 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 000D ÷ 1100 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 000D ÷ 1160 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 000D ÷ 11A8 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 000D ÷ AC00 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 000D ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 000D ÷ AC01 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 000D ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 000D ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 000D ÷ 261D ÷ # ÷ [0.2] (CR) ÷ [4.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 261D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 000D ÷ 1F3FB ÷ # ÷ [0.2] (CR) ÷ [4.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 1F3FB ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 000D ÷ 200D ÷ # ÷ [0.2] (CR) ÷ [4.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 000D ÷ 0308 × 200D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 000D ÷ 2764 ÷ # ÷ [0.2] (CR) ÷ [4.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 2764 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 000D ÷ 1F466 ÷ # ÷ [0.2] (CR) ÷ [4.0] BOY (EBG) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 1F466 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 000D ÷ 0378 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Other) ÷ [0.3] +÷ 000D ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 000A ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [4.0] SPACE (Other) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 000A ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] (CR) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 000A ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] (LF) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 000A ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Control) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 000A ÷ 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 000A ÷ 0308 × 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 000A ÷ 0600 ÷ # ÷ [0.2] (LF) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 000A ÷ 0903 ÷ # ÷ [0.2] (LF) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 000A ÷ 0308 × 0903 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 000A ÷ 1100 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 000A ÷ 1160 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 000A ÷ 11A8 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 000A ÷ AC00 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 000A ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 000A ÷ AC01 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 000A ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 000A ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 000A ÷ 261D ÷ # ÷ [0.2] (LF) ÷ [4.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 261D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 000A ÷ 1F3FB ÷ # ÷ [0.2] (LF) ÷ [4.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 1F3FB ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 000A ÷ 200D ÷ # ÷ [0.2] (LF) ÷ [4.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 000A ÷ 0308 × 200D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 000A ÷ 2764 ÷ # ÷ [0.2] (LF) ÷ [4.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 2764 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 000A ÷ 1F466 ÷ # ÷ [0.2] (LF) ÷ [4.0] BOY (EBG) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 1F466 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 000A ÷ 0378 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Other) ÷ [0.3] +÷ 000A ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 000A ÷ D800 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Control) ÷ [0.3] +÷ 000A ÷ 0308 ÷ D800 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0001 ÷ 0020 ÷ # ÷ [0.2] (Control) ÷ [4.0] SPACE (Other) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0001 ÷ 000D ÷ # ÷ [0.2] (Control) ÷ [4.0] (CR) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 0001 ÷ 000A ÷ # ÷ [0.2] (Control) ÷ [4.0] (LF) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 0001 ÷ 0001 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Control) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0001 ÷ 0300 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0001 ÷ 0308 × 0300 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0001 ÷ 0600 ÷ # ÷ [0.2] (Control) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0001 ÷ 0903 ÷ # ÷ [0.2] (Control) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0001 ÷ 0308 × 0903 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0001 ÷ 1100 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0001 ÷ 1160 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0001 ÷ 11A8 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0001 ÷ AC00 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0001 ÷ AC01 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0001 ÷ 1F1E6 ÷ # ÷ [0.2] (Control) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0001 ÷ 261D ÷ # ÷ [0.2] (Control) ÷ [4.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 261D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0001 ÷ 1F3FB ÷ # ÷ [0.2] (Control) ÷ [4.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 1F3FB ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0001 ÷ 200D ÷ # ÷ [0.2] (Control) ÷ [4.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0001 ÷ 0308 × 200D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0001 ÷ 2764 ÷ # ÷ [0.2] (Control) ÷ [4.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 2764 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0001 ÷ 1F466 ÷ # ÷ [0.2] (Control) ÷ [4.0] BOY (EBG) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 1F466 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0001 ÷ 0378 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Other) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 0001 ÷ D800 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Control) ÷ [0.3] +÷ 0001 ÷ 0308 ÷ D800 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0300 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0300 × 0308 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0300 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 0300 × 0308 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 0300 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 0300 × 0308 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 0300 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0300 × 0308 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0300 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0300 × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0300 ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0300 × 0308 ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0300 × 0903 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0300 × 0308 × 0903 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0300 ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0300 × 0308 ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0300 ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0300 × 0308 ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0300 ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0300 × 0308 ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0300 ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0300 × 0308 ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0300 ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0300 × 0308 ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0300 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0300 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0300 ÷ 261D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0300 × 0308 ÷ 261D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0300 ÷ 1F3FB ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0300 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0300 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0300 × 0308 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0300 ÷ 2764 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0300 × 0308 ÷ 2764 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0300 ÷ 1F466 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0300 × 0308 ÷ 1F466 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0300 ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 0300 × 0308 ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 0300 ÷ D800 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0300 × 0308 ÷ D800 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0600 × 0020 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] SPACE (Other) ÷ [0.3] +÷ 0600 × 0308 ÷ 0020 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0600 ÷ 000D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (CR) ÷ [0.3] +÷ 0600 × 0308 ÷ 000D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 0600 ÷ 000A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (LF) ÷ [0.3] +÷ 0600 × 0308 ÷ 000A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 0600 ÷ 0001 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0600 × 0308 ÷ 0001 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0600 × 0300 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0600 × 0308 × 0300 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0600 × 0600 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0600 × 0308 ÷ 0600 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0600 × 0903 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0600 × 0308 × 0903 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0600 × 1100 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0600 × 0308 ÷ 1100 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0600 × 1160 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0600 × 0308 ÷ 1160 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0600 × 11A8 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0600 × 0308 ÷ 11A8 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0600 × AC00 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0600 × 0308 ÷ AC00 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0600 × AC01 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0600 × 0308 ÷ AC01 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0600 × 1F1E6 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0600 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0600 × 261D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0600 × 0308 ÷ 261D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0600 × 1F3FB ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0600 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0600 × 200D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0600 × 0308 × 200D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0600 × 2764 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0600 × 0308 ÷ 2764 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0600 × 1F466 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] BOY (EBG) ÷ [0.3] +÷ 0600 × 0308 ÷ 1F466 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0600 × 0378 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] (Other) ÷ [0.3] +÷ 0600 × 0308 ÷ 0378 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 0600 ÷ D800 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0600 × 0308 ÷ D800 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0903 ÷ 0020 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0903 × 0308 ÷ 0020 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0903 ÷ 000D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (CR) ÷ [0.3] +÷ 0903 × 0308 ÷ 000D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 0903 ÷ 000A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (LF) ÷ [0.3] +÷ 0903 × 0308 ÷ 000A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 0903 ÷ 0001 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (Control) ÷ [0.3] +÷ 0903 × 0308 ÷ 0001 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0903 × 0300 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0903 × 0308 × 0300 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0903 ÷ 0600 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0903 × 0308 ÷ 0600 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0903 × 0903 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0903 × 0308 × 0903 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0903 ÷ 1100 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0903 × 0308 ÷ 1100 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0903 ÷ 1160 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0903 × 0308 ÷ 1160 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0903 ÷ 11A8 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0903 × 0308 ÷ 11A8 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0903 ÷ AC00 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0903 × 0308 ÷ AC00 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0903 ÷ AC01 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0903 × 0308 ÷ AC01 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0903 ÷ 1F1E6 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0903 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0903 ÷ 261D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0903 × 0308 ÷ 261D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0903 ÷ 1F3FB ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0903 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0903 × 200D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0903 × 0308 × 200D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0903 ÷ 2764 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0903 × 0308 ÷ 2764 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0903 ÷ 1F466 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0903 × 0308 ÷ 1F466 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0903 ÷ 0378 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] (Other) ÷ [0.3] +÷ 0903 × 0308 ÷ 0378 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 0903 ÷ D800 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (Control) ÷ [0.3] +÷ 0903 × 0308 ÷ D800 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1100 ÷ 0020 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1100 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1100 ÷ 000D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (CR) ÷ [0.3] +÷ 1100 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 1100 ÷ 000A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (LF) ÷ [0.3] +÷ 1100 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 1100 ÷ 0001 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (Control) ÷ [0.3] +÷ 1100 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1100 × 0300 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1100 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1100 ÷ 0600 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1100 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1100 × 0903 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1100 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1100 × 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1100 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1100 × 1160 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1100 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1100 ÷ 11A8 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1100 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1100 × AC00 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1100 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1100 × AC01 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1100 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1100 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1100 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1100 ÷ 261D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1100 × 0308 ÷ 261D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1100 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1100 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1100 × 200D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1100 × 0308 × 200D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1100 ÷ 2764 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1100 × 0308 ÷ 2764 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1100 ÷ 1F466 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1100 × 0308 ÷ 1F466 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1100 ÷ 0378 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] (Other) ÷ [0.3] +÷ 1100 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 1100 ÷ D800 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (Control) ÷ [0.3] +÷ 1100 × 0308 ÷ D800 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1160 ÷ 0020 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1160 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1160 ÷ 000D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (CR) ÷ [0.3] +÷ 1160 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 1160 ÷ 000A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (LF) ÷ [0.3] +÷ 1160 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 1160 ÷ 0001 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (Control) ÷ [0.3] +÷ 1160 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1160 × 0300 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1160 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1160 ÷ 0600 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1160 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1160 × 0903 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1160 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1160 ÷ 1100 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1160 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1160 × 1160 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [7.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1160 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1160 × 11A8 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1160 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1160 ÷ AC00 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1160 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1160 ÷ AC01 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1160 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1160 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1160 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1160 ÷ 261D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1160 × 0308 ÷ 261D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1160 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1160 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1160 × 200D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1160 × 0308 × 200D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1160 ÷ 2764 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1160 × 0308 ÷ 2764 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1160 ÷ 1F466 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1160 × 0308 ÷ 1F466 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1160 ÷ 0378 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] (Other) ÷ [0.3] +÷ 1160 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 1160 ÷ D800 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (Control) ÷ [0.3] +÷ 1160 × 0308 ÷ D800 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 11A8 ÷ 0020 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 11A8 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 11A8 ÷ 000D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (CR) ÷ [0.3] +÷ 11A8 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 11A8 ÷ 000A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (LF) ÷ [0.3] +÷ 11A8 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 11A8 ÷ 0001 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (Control) ÷ [0.3] +÷ 11A8 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 11A8 × 0300 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 11A8 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 11A8 ÷ 0600 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 11A8 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 11A8 × 0903 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 11A8 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 11A8 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 11A8 ÷ 1160 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 11A8 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 11A8 × 11A8 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 11A8 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 11A8 ÷ AC00 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 11A8 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 11A8 ÷ AC01 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 11A8 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 11A8 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 11A8 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 11A8 ÷ 261D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 11A8 × 0308 ÷ 261D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 11A8 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 11A8 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 11A8 × 200D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 11A8 × 0308 × 200D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 11A8 ÷ 2764 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 11A8 × 0308 ÷ 2764 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 11A8 ÷ 1F466 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 11A8 × 0308 ÷ 1F466 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 11A8 ÷ 0378 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] (Other) ÷ [0.3] +÷ 11A8 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 11A8 ÷ D800 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (Control) ÷ [0.3] +÷ 11A8 × 0308 ÷ D800 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ AC00 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ AC00 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ AC00 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (CR) ÷ [0.3] +÷ AC00 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ AC00 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (LF) ÷ [0.3] +÷ AC00 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ AC00 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (Control) ÷ [0.3] +÷ AC00 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ AC00 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ AC00 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ AC00 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ AC00 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ AC00 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ AC00 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ AC00 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ AC00 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ AC00 × 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ AC00 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ AC00 × 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ AC00 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ AC00 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ AC00 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ AC00 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ AC00 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ AC00 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ AC00 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ AC00 ÷ 261D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ AC00 × 0308 ÷ 261D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ AC00 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ AC00 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ AC00 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ AC00 × 0308 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ AC00 ÷ 2764 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ AC00 × 0308 ÷ 2764 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ AC00 ÷ 1F466 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ AC00 × 0308 ÷ 1F466 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ AC00 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] (Other) ÷ [0.3] +÷ AC00 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ AC00 ÷ D800 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (Control) ÷ [0.3] +÷ AC00 × 0308 ÷ D800 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ AC01 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ AC01 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ AC01 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (CR) ÷ [0.3] +÷ AC01 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ AC01 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (LF) ÷ [0.3] +÷ AC01 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ AC01 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (Control) ÷ [0.3] +÷ AC01 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ AC01 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ AC01 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ AC01 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ AC01 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ AC01 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ AC01 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ AC01 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ AC01 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ AC01 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ AC01 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ AC01 × 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ AC01 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ AC01 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ AC01 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ AC01 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ AC01 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ AC01 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ AC01 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ AC01 ÷ 261D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ AC01 × 0308 ÷ 261D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ AC01 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ AC01 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ AC01 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ AC01 × 0308 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ AC01 ÷ 2764 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ AC01 × 0308 ÷ 2764 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ AC01 ÷ 1F466 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ AC01 × 0308 ÷ 1F466 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ AC01 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] (Other) ÷ [0.3] +÷ AC01 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ AC01 ÷ D800 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (Control) ÷ [0.3] +÷ AC01 × 0308 ÷ D800 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F1E6 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1F1E6 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (CR) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 1F1E6 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (LF) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 1F1E6 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F1E6 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1F1E6 × 0308 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1F1E6 ÷ 0600 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 0600 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1F1E6 × 0903 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1F1E6 × 0308 × 0903 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1F1E6 ÷ 1100 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 1100 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1F1E6 ÷ 1160 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 1160 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1F1E6 ÷ 11A8 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 11A8 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1F1E6 ÷ AC00 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ AC00 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1F1E6 ÷ AC01 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ AC01 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1F1E6 × 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [12.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1F1E6 ÷ 261D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 261D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1F1E6 ÷ 1F3FB ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1F1E6 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1F1E6 × 0308 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1F1E6 ÷ 2764 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 2764 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1F1E6 ÷ 1F466 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 1F466 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1F1E6 ÷ 0378 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] (Other) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ 0378 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 1F1E6 ÷ D800 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F1E6 × 0308 ÷ D800 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 261D ÷ 0020 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 261D × 0308 ÷ 0020 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 261D ÷ 000D ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [5.0] (CR) ÷ [0.3] +÷ 261D × 0308 ÷ 000D ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 261D ÷ 000A ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [5.0] (LF) ÷ [0.3] +÷ 261D × 0308 ÷ 000A ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 261D ÷ 0001 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [5.0] (Control) ÷ [0.3] +÷ 261D × 0308 ÷ 0001 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 261D × 0300 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 261D × 0308 × 0300 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 261D ÷ 0600 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 261D × 0308 ÷ 0600 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 261D × 0903 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 261D × 0308 × 0903 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 261D ÷ 1100 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 261D × 0308 ÷ 1100 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 261D ÷ 1160 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 261D × 0308 ÷ 1160 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 261D ÷ 11A8 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 261D × 0308 ÷ 11A8 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 261D ÷ AC00 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 261D × 0308 ÷ AC00 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 261D ÷ AC01 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 261D × 0308 ÷ AC01 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 261D ÷ 1F1E6 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 261D × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 261D ÷ 261D ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 261D × 0308 ÷ 261D ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 261D × 1F3FB ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [10.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 261D × 0308 × 1F3FB ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) × [10.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 261D × 200D ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 261D × 0308 × 200D ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 261D ÷ 2764 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 261D × 0308 ÷ 2764 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 261D ÷ 1F466 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 261D × 0308 ÷ 1F466 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 261D ÷ 0378 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [999.0] (Other) ÷ [0.3] +÷ 261D × 0308 ÷ 0378 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 261D ÷ D800 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) ÷ [5.0] (Control) ÷ [0.3] +÷ 261D × 0308 ÷ D800 ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F3FB ÷ 0020 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 0020 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1F3FB ÷ 000D ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [5.0] (CR) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 000D ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 1F3FB ÷ 000A ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [5.0] (LF) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 000A ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 1F3FB ÷ 0001 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 0001 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F3FB × 0300 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1F3FB × 0308 × 0300 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1F3FB ÷ 0600 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 0600 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1F3FB × 0903 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1F3FB × 0308 × 0903 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1F3FB ÷ 1100 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 1100 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1F3FB ÷ 1160 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 1160 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1F3FB ÷ 11A8 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 11A8 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1F3FB ÷ AC00 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1F3FB × 0308 ÷ AC00 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1F3FB ÷ AC01 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1F3FB × 0308 ÷ AC01 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1F3FB ÷ 1F1E6 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1F3FB ÷ 261D ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 261D ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1F3FB ÷ 1F3FB ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 1F3FB ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1F3FB × 200D ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1F3FB × 0308 × 200D ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1F3FB ÷ 2764 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 2764 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1F3FB ÷ 1F466 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 1F466 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1F3FB ÷ 0378 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] (Other) ÷ [0.3] +÷ 1F3FB × 0308 ÷ 0378 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 1F3FB ÷ D800 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F3FB × 0308 ÷ D800 ÷ # ÷ [0.2] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 200D ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 200D × 0308 ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 200D ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [5.0] (CR) ÷ [0.3] +÷ 200D × 0308 ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 200D ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [5.0] (LF) ÷ [0.3] +÷ 200D × 0308 ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 200D ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [5.0] (Control) ÷ [0.3] +÷ 200D × 0308 ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 200D × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 200D × 0308 × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 200D ÷ 0600 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 200D × 0308 ÷ 0600 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 200D × 0903 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 200D × 0308 × 0903 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 200D ÷ 1100 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 200D × 0308 ÷ 1100 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 200D ÷ 1160 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 200D × 0308 ÷ 1160 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 200D ÷ 11A8 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 200D × 0308 ÷ 11A8 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 200D ÷ AC00 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 200D × 0308 ÷ AC00 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 200D ÷ AC01 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 200D × 0308 ÷ AC01 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 200D ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 200D × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 200D ÷ 261D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 200D × 0308 ÷ 261D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 200D ÷ 1F3FB ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 200D × 0308 ÷ 1F3FB ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 200D × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 200D × 0308 × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 200D × 2764 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [11.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 200D × 0308 ÷ 2764 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 200D × 1F466 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [11.0] BOY (EBG) ÷ [0.3] +÷ 200D × 0308 ÷ 1F466 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 200D ÷ 0378 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] (Other) ÷ [0.3] +÷ 200D × 0308 ÷ 0378 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 200D ÷ D800 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) ÷ [5.0] (Control) ÷ [0.3] +÷ 200D × 0308 ÷ D800 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 2764 ÷ 0020 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 2764 × 0308 ÷ 0020 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 2764 ÷ 000D ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [5.0] (CR) ÷ [0.3] +÷ 2764 × 0308 ÷ 000D ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 2764 ÷ 000A ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [5.0] (LF) ÷ [0.3] +÷ 2764 × 0308 ÷ 000A ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 2764 ÷ 0001 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [5.0] (Control) ÷ [0.3] +÷ 2764 × 0308 ÷ 0001 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 2764 × 0300 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 2764 × 0308 × 0300 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 2764 ÷ 0600 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 2764 × 0308 ÷ 0600 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 2764 × 0903 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 2764 × 0308 × 0903 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 2764 ÷ 1100 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 2764 × 0308 ÷ 1100 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 2764 ÷ 1160 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 2764 × 0308 ÷ 1160 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 2764 ÷ 11A8 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 2764 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 2764 ÷ AC00 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 2764 × 0308 ÷ AC00 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 2764 ÷ AC01 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 2764 × 0308 ÷ AC01 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 2764 ÷ 1F1E6 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 2764 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 2764 ÷ 261D ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 2764 × 0308 ÷ 261D ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 2764 ÷ 1F3FB ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 2764 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 2764 × 200D ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 2764 × 0308 × 200D ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 2764 ÷ 2764 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 2764 × 0308 ÷ 2764 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 2764 ÷ 1F466 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 2764 × 0308 ÷ 1F466 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 2764 ÷ 0378 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [999.0] (Other) ÷ [0.3] +÷ 2764 × 0308 ÷ 0378 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 2764 ÷ D800 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [5.0] (Control) ÷ [0.3] +÷ 2764 × 0308 ÷ D800 ÷ # ÷ [0.2] HEAVY BLACK HEART (Glue_After_Zwj) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F466 ÷ 0020 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1F466 × 0308 ÷ 0020 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1F466 ÷ 000D ÷ # ÷ [0.2] BOY (EBG) ÷ [5.0] (CR) ÷ [0.3] +÷ 1F466 × 0308 ÷ 000D ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 1F466 ÷ 000A ÷ # ÷ [0.2] BOY (EBG) ÷ [5.0] (LF) ÷ [0.3] +÷ 1F466 × 0308 ÷ 000A ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 1F466 ÷ 0001 ÷ # ÷ [0.2] BOY (EBG) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F466 × 0308 ÷ 0001 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F466 × 0300 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1F466 × 0308 × 0300 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 1F466 ÷ 0600 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1F466 × 0308 ÷ 0600 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 1F466 × 0903 ÷ # ÷ [0.2] BOY (EBG) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1F466 × 0308 × 0903 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 1F466 ÷ 1100 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1F466 × 0308 ÷ 1100 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1F466 ÷ 1160 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1F466 × 0308 ÷ 1160 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 1F466 ÷ 11A8 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1F466 × 0308 ÷ 11A8 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 1F466 ÷ AC00 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1F466 × 0308 ÷ AC00 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 1F466 ÷ AC01 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1F466 × 0308 ÷ AC01 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 1F466 ÷ 1F1E6 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1F466 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 1F466 ÷ 261D ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1F466 × 0308 ÷ 261D ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1F466 × 1F3FB ÷ # ÷ [0.2] BOY (EBG) × [10.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1F466 × 0308 × 1F3FB ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) × [10.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 1F466 × 200D ÷ # ÷ [0.2] BOY (EBG) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1F466 × 0308 × 200D ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 1F466 ÷ 2764 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1F466 × 0308 ÷ 2764 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 1F466 ÷ 1F466 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1F466 × 0308 ÷ 1F466 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 1F466 ÷ 0378 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] (Other) ÷ [0.3] +÷ 1F466 × 0308 ÷ 0378 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 1F466 ÷ D800 ÷ # ÷ [0.2] BOY (EBG) ÷ [5.0] (Control) ÷ [0.3] +÷ 1F466 × 0308 ÷ D800 ÷ # ÷ [0.2] BOY (EBG) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0378 ÷ 0020 ÷ # ÷ [0.2] (Other) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0378 × 0308 ÷ 0020 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 0378 ÷ 000D ÷ # ÷ [0.2] (Other) ÷ [5.0] (CR) ÷ [0.3] +÷ 0378 × 0308 ÷ 000D ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ 0378 ÷ 000A ÷ # ÷ [0.2] (Other) ÷ [5.0] (LF) ÷ [0.3] +÷ 0378 × 0308 ÷ 000A ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ 0378 ÷ 0001 ÷ # ÷ [0.2] (Other) ÷ [5.0] (Control) ÷ [0.3] +÷ 0378 × 0308 ÷ 0001 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 0378 × 0300 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0378 × 0308 × 0300 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ 0378 ÷ 0600 ÷ # ÷ [0.2] (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0378 × 0308 ÷ 0600 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ 0378 × 0903 ÷ # ÷ [0.2] (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0378 × 0308 × 0903 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ 0378 ÷ 1100 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0378 × 0308 ÷ 1100 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 0378 ÷ 1160 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0378 × 0308 ÷ 1160 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ 0378 ÷ 11A8 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0378 × 0308 ÷ 11A8 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ 0378 ÷ AC00 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0378 × 0308 ÷ AC00 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ 0378 ÷ AC01 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0378 × 0308 ÷ AC01 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ 0378 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0378 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ 0378 ÷ 261D ÷ # ÷ [0.2] (Other) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0378 × 0308 ÷ 261D ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 0378 ÷ 1F3FB ÷ # ÷ [0.2] (Other) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0378 × 0308 ÷ 1F3FB ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 0378 × 200D ÷ # ÷ [0.2] (Other) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0378 × 0308 × 200D ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0378 ÷ 2764 ÷ # ÷ [0.2] (Other) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0378 × 0308 ÷ 2764 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 0378 ÷ 1F466 ÷ # ÷ [0.2] (Other) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0378 × 0308 ÷ 1F466 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ 0378 ÷ 0378 ÷ # ÷ [0.2] (Other) ÷ [999.0] (Other) ÷ [0.3] +÷ 0378 × 0308 ÷ 0378 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ 0378 ÷ D800 ÷ # ÷ [0.2] (Other) ÷ [5.0] (Control) ÷ [0.3] +÷ 0378 × 0308 ÷ D800 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ D800 ÷ 0020 ÷ # ÷ [0.2] (Control) ÷ [4.0] SPACE (Other) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ D800 ÷ 000D ÷ # ÷ [0.2] (Control) ÷ [4.0] (CR) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (CR) ÷ [0.3] +÷ D800 ÷ 000A ÷ # ÷ [0.2] (Control) ÷ [4.0] (LF) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (LF) ÷ [0.3] +÷ D800 ÷ 0001 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Control) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ D800 ÷ 0300 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ D800 ÷ 0308 × 0300 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend) ÷ [0.3] +÷ D800 ÷ 0600 ÷ # ÷ [0.2] (Control) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] +÷ D800 ÷ 0903 ÷ # ÷ [0.2] (Control) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ D800 ÷ 0308 × 0903 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] +÷ D800 ÷ 1100 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ D800 ÷ 1160 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] +÷ D800 ÷ 11A8 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] +÷ D800 ÷ AC00 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ D800 ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] +÷ D800 ÷ AC01 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ D800 ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] +÷ D800 ÷ 1F1E6 ÷ # ÷ [0.2] (Control) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] +÷ D800 ÷ 261D ÷ # ÷ [0.2] (Control) ÷ [4.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 261D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ D800 ÷ 1F3FB ÷ # ÷ [0.2] (Control) ÷ [4.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 1F3FB ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ D800 ÷ 200D ÷ # ÷ [0.2] (Control) ÷ [4.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ D800 ÷ 0308 × 200D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ D800 ÷ 2764 ÷ # ÷ [0.2] (Control) ÷ [4.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 2764 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ D800 ÷ 1F466 ÷ # ÷ [0.2] (Control) ÷ [4.0] BOY (EBG) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 1F466 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] BOY (EBG) ÷ [0.3] +÷ D800 ÷ 0378 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Other) ÷ [0.3] +÷ D800 ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [999.0] (Other) ÷ [0.3] +÷ D800 ÷ D800 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Control) ÷ [0.3] +÷ D800 ÷ 0308 ÷ D800 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [5.0] (Control) ÷ [0.3] +÷ 000D × 000A ÷ 0061 ÷ 000A ÷ 0308 ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [4.0] LATIN SMALL LETTER A (Other) ÷ [5.0] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend) ÷ [0.3] +÷ 0061 × 0308 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [0.3] +÷ 0020 × 200D ÷ 0646 ÷ # ÷ [0.2] SPACE (Other) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] ARABIC LETTER NOON (Other) ÷ [0.3] +÷ 0646 × 200D ÷ 0020 ÷ # ÷ [0.2] ARABIC LETTER NOON (Other) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] SPACE (Other) ÷ [0.3] +÷ 1100 × 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ AC00 × 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ AC01 × 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] +÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [12.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] +÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] +÷ 0061 ÷ 1F1E6 × 1F1E7 × 200D ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] +÷ 0061 ÷ 1F1E6 × 200D ÷ 1F1E7 × 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] +÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 × 1F1E9 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER D (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] +÷ 0061 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] ZERO WIDTH JOINER (ZWJ) ÷ [0.3] +÷ 0061 × 0308 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] COMBINING DIAERESIS (Extend) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] +÷ 0061 × 0903 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] +÷ 0061 ÷ 0600 × 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) × [9.2] LATIN SMALL LETTER B (Other) ÷ [0.3] +÷ 261D × 1F3FB ÷ 261D ÷ # ÷ [0.2] WHITE UP POINTING INDEX (E_Base) × [10.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [999.0] WHITE UP POINTING INDEX (E_Base) ÷ [0.3] +÷ 1F466 × 1F3FB ÷ # ÷ [0.2] BOY (EBG) × [10.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 200D × 1F466 × 1F3FB ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [11.0] BOY (EBG) × [10.0] EMOJI MODIFIER FITZPATRICK TYPE-1-2 (E_Modifier) ÷ [0.3] +÷ 200D × 2764 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [11.0] HEAVY BLACK HEART (Glue_After_Zwj) ÷ [0.3] +÷ 200D × 1F466 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ) × [11.0] BOY (EBG) ÷ [0.3] +÷ 1F466 ÷ 1F466 ÷ # ÷ [0.2] BOY (EBG) ÷ [999.0] BOY (EBG) ÷ [0.3] +# +# Lines: 822 +# +# EOF diff --git a/lib/elixir/unicode/PropList.txt b/lib/elixir/unicode/PropList.txt new file mode 100644 index 00000000000..32579bc00b6 --- /dev/null +++ b/lib/elixir/unicode/PropList.txt @@ -0,0 +1,1579 @@ +# PropList-9.0.0.txt +# Date: 2016-06-01, 10:34:30 GMT +# © 2016 Unicode®, Inc. +# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. +# For terms of use, see http://www.unicode.org/terms_of_use.html +# +# Unicode Character Database +# For documentation, see http://www.unicode.org/reports/tr44/ + +# ================================================ + +0009..000D ; White_Space # Cc [5] .. +0020 ; White_Space # Zs SPACE +0085 ; White_Space # Cc +00A0 ; White_Space # Zs NO-BREAK SPACE +1680 ; White_Space # Zs OGHAM SPACE MARK +2000..200A ; White_Space # Zs [11] EN QUAD..HAIR SPACE +2028 ; White_Space # Zl LINE SEPARATOR +2029 ; White_Space # Zp PARAGRAPH SEPARATOR +202F ; White_Space # Zs NARROW NO-BREAK SPACE +205F ; White_Space # Zs MEDIUM MATHEMATICAL SPACE +3000 ; White_Space # Zs IDEOGRAPHIC SPACE + +# Total code points: 25 + +# ================================================ + +061C ; Bidi_Control # Cf ARABIC LETTER MARK +200E..200F ; Bidi_Control # Cf [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK +202A..202E ; Bidi_Control # Cf [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE +2066..2069 ; Bidi_Control # Cf [4] LEFT-TO-RIGHT ISOLATE..POP DIRECTIONAL ISOLATE + +# Total code points: 12 + +# ================================================ + +200C..200D ; Join_Control # Cf [2] ZERO WIDTH NON-JOINER..ZERO WIDTH JOINER + +# Total code points: 2 + +# ================================================ + +002D ; Dash # Pd HYPHEN-MINUS +058A ; Dash # Pd ARMENIAN HYPHEN +05BE ; Dash # Pd HEBREW PUNCTUATION MAQAF +1400 ; Dash # Pd CANADIAN SYLLABICS HYPHEN +1806 ; Dash # Pd MONGOLIAN TODO SOFT HYPHEN +2010..2015 ; Dash # Pd [6] HYPHEN..HORIZONTAL BAR +2053 ; Dash # Po SWUNG DASH +207B ; Dash # Sm SUPERSCRIPT MINUS +208B ; Dash # Sm SUBSCRIPT MINUS +2212 ; Dash # Sm MINUS SIGN +2E17 ; Dash # Pd DOUBLE OBLIQUE HYPHEN +2E1A ; Dash # Pd HYPHEN WITH DIAERESIS +2E3A..2E3B ; Dash # Pd [2] TWO-EM DASH..THREE-EM DASH +2E40 ; Dash # Pd DOUBLE HYPHEN +301C ; Dash # Pd WAVE DASH +3030 ; Dash # Pd WAVY DASH +30A0 ; Dash # Pd KATAKANA-HIRAGANA DOUBLE HYPHEN +FE31..FE32 ; Dash # Pd [2] PRESENTATION FORM FOR VERTICAL EM DASH..PRESENTATION FORM FOR VERTICAL EN DASH +FE58 ; Dash # Pd SMALL EM DASH +FE63 ; Dash # Pd SMALL HYPHEN-MINUS +FF0D ; Dash # Pd FULLWIDTH HYPHEN-MINUS + +# Total code points: 28 + +# ================================================ + +002D ; Hyphen # Pd HYPHEN-MINUS +00AD ; Hyphen # Cf SOFT HYPHEN +058A ; Hyphen # Pd ARMENIAN HYPHEN +1806 ; Hyphen # Pd MONGOLIAN TODO SOFT HYPHEN +2010..2011 ; Hyphen # Pd [2] HYPHEN..NON-BREAKING HYPHEN +2E17 ; Hyphen # Pd DOUBLE OBLIQUE HYPHEN +30FB ; Hyphen # Po KATAKANA MIDDLE DOT +FE63 ; Hyphen # Pd SMALL HYPHEN-MINUS +FF0D ; Hyphen # Pd FULLWIDTH HYPHEN-MINUS +FF65 ; Hyphen # Po HALFWIDTH KATAKANA MIDDLE DOT + +# Total code points: 11 + +# ================================================ + +0022 ; Quotation_Mark # Po QUOTATION MARK +0027 ; Quotation_Mark # Po APOSTROPHE +00AB ; Quotation_Mark # Pi LEFT-POINTING DOUBLE ANGLE QUOTATION MARK +00BB ; Quotation_Mark # Pf RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK +2018 ; Quotation_Mark # Pi LEFT SINGLE QUOTATION MARK +2019 ; Quotation_Mark # Pf RIGHT SINGLE QUOTATION MARK +201A ; Quotation_Mark # Ps SINGLE LOW-9 QUOTATION MARK +201B..201C ; Quotation_Mark # Pi [2] SINGLE HIGH-REVERSED-9 QUOTATION MARK..LEFT DOUBLE QUOTATION MARK +201D ; Quotation_Mark # Pf RIGHT DOUBLE QUOTATION MARK +201E ; Quotation_Mark # Ps DOUBLE LOW-9 QUOTATION MARK +201F ; Quotation_Mark # Pi DOUBLE HIGH-REVERSED-9 QUOTATION MARK +2039 ; Quotation_Mark # Pi SINGLE LEFT-POINTING ANGLE QUOTATION MARK +203A ; Quotation_Mark # Pf SINGLE RIGHT-POINTING ANGLE QUOTATION MARK +2E42 ; Quotation_Mark # Ps DOUBLE LOW-REVERSED-9 QUOTATION MARK +300C ; Quotation_Mark # Ps LEFT CORNER BRACKET +300D ; Quotation_Mark # Pe RIGHT CORNER BRACKET +300E ; Quotation_Mark # Ps LEFT WHITE CORNER BRACKET +300F ; Quotation_Mark # Pe RIGHT WHITE CORNER BRACKET +301D ; Quotation_Mark # Ps REVERSED DOUBLE PRIME QUOTATION MARK +301E..301F ; Quotation_Mark # Pe [2] DOUBLE PRIME QUOTATION MARK..LOW DOUBLE PRIME QUOTATION MARK +FE41 ; Quotation_Mark # Ps PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET +FE42 ; Quotation_Mark # Pe PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET +FE43 ; Quotation_Mark # Ps PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET +FE44 ; Quotation_Mark # Pe PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET +FF02 ; Quotation_Mark # Po FULLWIDTH QUOTATION MARK +FF07 ; Quotation_Mark # Po FULLWIDTH APOSTROPHE +FF62 ; Quotation_Mark # Ps HALFWIDTH LEFT CORNER BRACKET +FF63 ; Quotation_Mark # Pe HALFWIDTH RIGHT CORNER BRACKET + +# Total code points: 30 + +# ================================================ + +0021 ; Terminal_Punctuation # Po EXCLAMATION MARK +002C ; Terminal_Punctuation # Po COMMA +002E ; Terminal_Punctuation # Po FULL STOP +003A..003B ; Terminal_Punctuation # Po [2] COLON..SEMICOLON +003F ; Terminal_Punctuation # Po QUESTION MARK +037E ; Terminal_Punctuation # Po GREEK QUESTION MARK +0387 ; Terminal_Punctuation # Po GREEK ANO TELEIA +0589 ; Terminal_Punctuation # Po ARMENIAN FULL STOP +05C3 ; Terminal_Punctuation # Po HEBREW PUNCTUATION SOF PASUQ +060C ; Terminal_Punctuation # Po ARABIC COMMA +061B ; Terminal_Punctuation # Po ARABIC SEMICOLON +061F ; Terminal_Punctuation # Po ARABIC QUESTION MARK +06D4 ; Terminal_Punctuation # Po ARABIC FULL STOP +0700..070A ; Terminal_Punctuation # Po [11] SYRIAC END OF PARAGRAPH..SYRIAC CONTRACTION +070C ; Terminal_Punctuation # Po SYRIAC HARKLEAN METOBELUS +07F8..07F9 ; Terminal_Punctuation # Po [2] NKO COMMA..NKO EXCLAMATION MARK +0830..083E ; Terminal_Punctuation # Po [15] SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU +085E ; Terminal_Punctuation # Po MANDAIC PUNCTUATION +0964..0965 ; Terminal_Punctuation # Po [2] DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA +0E5A..0E5B ; Terminal_Punctuation # Po [2] THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT +0F08 ; Terminal_Punctuation # Po TIBETAN MARK SBRUL SHAD +0F0D..0F12 ; Terminal_Punctuation # Po [6] TIBETAN MARK SHAD..TIBETAN MARK RGYA GRAM SHAD +104A..104B ; Terminal_Punctuation # Po [2] MYANMAR SIGN LITTLE SECTION..MYANMAR SIGN SECTION +1361..1368 ; Terminal_Punctuation # Po [8] ETHIOPIC WORDSPACE..ETHIOPIC PARAGRAPH SEPARATOR +166D..166E ; Terminal_Punctuation # Po [2] CANADIAN SYLLABICS CHI SIGN..CANADIAN SYLLABICS FULL STOP +16EB..16ED ; Terminal_Punctuation # Po [3] RUNIC SINGLE PUNCTUATION..RUNIC CROSS PUNCTUATION +1735..1736 ; Terminal_Punctuation # Po [2] PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION +17D4..17D6 ; Terminal_Punctuation # Po [3] KHMER SIGN KHAN..KHMER SIGN CAMNUC PII KUUH +17DA ; Terminal_Punctuation # Po KHMER SIGN KOOMUUT +1802..1805 ; Terminal_Punctuation # Po [4] MONGOLIAN COMMA..MONGOLIAN FOUR DOTS +1808..1809 ; Terminal_Punctuation # Po [2] MONGOLIAN MANCHU COMMA..MONGOLIAN MANCHU FULL STOP +1944..1945 ; Terminal_Punctuation # Po [2] LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK +1AA8..1AAB ; Terminal_Punctuation # Po [4] TAI THAM SIGN KAAN..TAI THAM SIGN SATKAANKUU +1B5A..1B5B ; Terminal_Punctuation # Po [2] BALINESE PANTI..BALINESE PAMADA +1B5D..1B5F ; Terminal_Punctuation # Po [3] BALINESE CARIK PAMUNGKAH..BALINESE CARIK PAREREN +1C3B..1C3F ; Terminal_Punctuation # Po [5] LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK +1C7E..1C7F ; Terminal_Punctuation # Po [2] OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD +203C..203D ; Terminal_Punctuation # Po [2] DOUBLE EXCLAMATION MARK..INTERROBANG +2047..2049 ; Terminal_Punctuation # Po [3] DOUBLE QUESTION MARK..EXCLAMATION QUESTION MARK +2E2E ; Terminal_Punctuation # Po REVERSED QUESTION MARK +2E3C ; Terminal_Punctuation # Po STENOGRAPHIC FULL STOP +2E41 ; Terminal_Punctuation # Po REVERSED COMMA +3001..3002 ; Terminal_Punctuation # Po [2] IDEOGRAPHIC COMMA..IDEOGRAPHIC FULL STOP +A4FE..A4FF ; Terminal_Punctuation # Po [2] LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP +A60D..A60F ; Terminal_Punctuation # Po [3] VAI COMMA..VAI QUESTION MARK +A6F3..A6F7 ; Terminal_Punctuation # Po [5] BAMUM FULL STOP..BAMUM QUESTION MARK +A876..A877 ; Terminal_Punctuation # Po [2] PHAGS-PA MARK SHAD..PHAGS-PA MARK DOUBLE SHAD +A8CE..A8CF ; Terminal_Punctuation # Po [2] SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA +A92F ; Terminal_Punctuation # Po KAYAH LI SIGN SHYA +A9C7..A9C9 ; Terminal_Punctuation # Po [3] JAVANESE PADA PANGKAT..JAVANESE PADA LUNGSI +AA5D..AA5F ; Terminal_Punctuation # Po [3] CHAM PUNCTUATION DANDA..CHAM PUNCTUATION TRIPLE DANDA +AADF ; Terminal_Punctuation # Po TAI VIET SYMBOL KOI KOI +AAF0..AAF1 ; Terminal_Punctuation # Po [2] MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM +ABEB ; Terminal_Punctuation # Po MEETEI MAYEK CHEIKHEI +FE50..FE52 ; Terminal_Punctuation # Po [3] SMALL COMMA..SMALL FULL STOP +FE54..FE57 ; Terminal_Punctuation # Po [4] SMALL SEMICOLON..SMALL EXCLAMATION MARK +FF01 ; Terminal_Punctuation # Po FULLWIDTH EXCLAMATION MARK +FF0C ; Terminal_Punctuation # Po FULLWIDTH COMMA +FF0E ; Terminal_Punctuation # Po FULLWIDTH FULL STOP +FF1A..FF1B ; Terminal_Punctuation # Po [2] FULLWIDTH COLON..FULLWIDTH SEMICOLON +FF1F ; Terminal_Punctuation # Po FULLWIDTH QUESTION MARK +FF61 ; Terminal_Punctuation # Po HALFWIDTH IDEOGRAPHIC FULL STOP +FF64 ; Terminal_Punctuation # Po HALFWIDTH IDEOGRAPHIC COMMA +1039F ; Terminal_Punctuation # Po UGARITIC WORD DIVIDER +103D0 ; Terminal_Punctuation # Po OLD PERSIAN WORD DIVIDER +10857 ; Terminal_Punctuation # Po IMPERIAL ARAMAIC SECTION SIGN +1091F ; Terminal_Punctuation # Po PHOENICIAN WORD SEPARATOR +10A56..10A57 ; Terminal_Punctuation # Po [2] KHAROSHTHI PUNCTUATION DANDA..KHAROSHTHI PUNCTUATION DOUBLE DANDA +10AF0..10AF5 ; Terminal_Punctuation # Po [6] MANICHAEAN PUNCTUATION STAR..MANICHAEAN PUNCTUATION TWO DOTS +10B3A..10B3F ; Terminal_Punctuation # Po [6] TINY TWO DOTS OVER ONE DOT PUNCTUATION..LARGE ONE RING OVER TWO RINGS PUNCTUATION +10B99..10B9C ; Terminal_Punctuation # Po [4] PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT +11047..1104D ; Terminal_Punctuation # Po [7] BRAHMI DANDA..BRAHMI PUNCTUATION LOTUS +110BE..110C1 ; Terminal_Punctuation # Po [4] KAITHI SECTION MARK..KAITHI DOUBLE DANDA +11141..11143 ; Terminal_Punctuation # Po [3] CHAKMA DANDA..CHAKMA QUESTION MARK +111C5..111C6 ; Terminal_Punctuation # Po [2] SHARADA DANDA..SHARADA DOUBLE DANDA +111CD ; Terminal_Punctuation # Po SHARADA SUTRA MARK +111DE..111DF ; Terminal_Punctuation # Po [2] SHARADA SECTION MARK-1..SHARADA SECTION MARK-2 +11238..1123C ; Terminal_Punctuation # Po [5] KHOJKI DANDA..KHOJKI DOUBLE SECTION MARK +112A9 ; Terminal_Punctuation # Po MULTANI SECTION MARK +1144B..1144D ; Terminal_Punctuation # Po [3] NEWA DANDA..NEWA COMMA +1145B ; Terminal_Punctuation # Po NEWA PLACEHOLDER MARK +115C2..115C5 ; Terminal_Punctuation # Po [4] SIDDHAM DANDA..SIDDHAM SEPARATOR BAR +115C9..115D7 ; Terminal_Punctuation # Po [15] SIDDHAM END OF TEXT MARK..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES +11641..11642 ; Terminal_Punctuation # Po [2] MODI DANDA..MODI DOUBLE DANDA +1173C..1173E ; Terminal_Punctuation # Po [3] AHOM SIGN SMALL SECTION..AHOM SIGN RULAI +11C41..11C43 ; Terminal_Punctuation # Po [3] BHAIKSUKI DANDA..BHAIKSUKI WORD SEPARATOR +11C71 ; Terminal_Punctuation # Po MARCHEN MARK SHAD +12470..12474 ; Terminal_Punctuation # Po [5] CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON +16A6E..16A6F ; Terminal_Punctuation # Po [2] MRO DANDA..MRO DOUBLE DANDA +16AF5 ; Terminal_Punctuation # Po BASSA VAH FULL STOP +16B37..16B39 ; Terminal_Punctuation # Po [3] PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN CIM CHEEM +16B44 ; Terminal_Punctuation # Po PAHAWH HMONG SIGN XAUS +1BC9F ; Terminal_Punctuation # Po DUPLOYAN PUNCTUATION CHINOOK FULL STOP +1DA87..1DA8A ; Terminal_Punctuation # Po [4] SIGNWRITING COMMA..SIGNWRITING COLON + +# Total code points: 246 + +# ================================================ + +005E ; Other_Math # Sk CIRCUMFLEX ACCENT +03D0..03D2 ; Other_Math # L& [3] GREEK BETA SYMBOL..GREEK UPSILON WITH HOOK SYMBOL +03D5 ; Other_Math # L& GREEK PHI SYMBOL +03F0..03F1 ; Other_Math # L& [2] GREEK KAPPA SYMBOL..GREEK RHO SYMBOL +03F4..03F5 ; Other_Math # L& [2] GREEK CAPITAL THETA SYMBOL..GREEK LUNATE EPSILON SYMBOL +2016 ; Other_Math # Po DOUBLE VERTICAL LINE +2032..2034 ; Other_Math # Po [3] PRIME..TRIPLE PRIME +2040 ; Other_Math # Pc CHARACTER TIE +2061..2064 ; Other_Math # Cf [4] FUNCTION APPLICATION..INVISIBLE PLUS +207D ; Other_Math # Ps SUPERSCRIPT LEFT PARENTHESIS +207E ; Other_Math # Pe SUPERSCRIPT RIGHT PARENTHESIS +208D ; Other_Math # Ps SUBSCRIPT LEFT PARENTHESIS +208E ; Other_Math # Pe SUBSCRIPT RIGHT PARENTHESIS +20D0..20DC ; Other_Math # Mn [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE +20E1 ; Other_Math # Mn COMBINING LEFT RIGHT ARROW ABOVE +20E5..20E6 ; Other_Math # Mn [2] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING DOUBLE VERTICAL STROKE OVERLAY +20EB..20EF ; Other_Math # Mn [5] COMBINING LONG DOUBLE SOLIDUS OVERLAY..COMBINING RIGHT ARROW BELOW +2102 ; Other_Math # L& DOUBLE-STRUCK CAPITAL C +2107 ; Other_Math # L& EULER CONSTANT +210A..2113 ; Other_Math # L& [10] SCRIPT SMALL G..SCRIPT SMALL L +2115 ; Other_Math # L& DOUBLE-STRUCK CAPITAL N +2119..211D ; Other_Math # L& [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-STRUCK CAPITAL R +2124 ; Other_Math # L& DOUBLE-STRUCK CAPITAL Z +2128 ; Other_Math # L& BLACK-LETTER CAPITAL Z +2129 ; Other_Math # So TURNED GREEK SMALL LETTER IOTA +212C..212D ; Other_Math # L& [2] SCRIPT CAPITAL B..BLACK-LETTER CAPITAL C +212F..2131 ; Other_Math # L& [3] SCRIPT SMALL E..SCRIPT CAPITAL F +2133..2134 ; Other_Math # L& [2] SCRIPT CAPITAL M..SCRIPT SMALL O +2135..2138 ; Other_Math # Lo [4] ALEF SYMBOL..DALET SYMBOL +213C..213F ; Other_Math # L& [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STRUCK CAPITAL PI +2145..2149 ; Other_Math # L& [5] DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL J +2195..2199 ; Other_Math # So [5] UP DOWN ARROW..SOUTH WEST ARROW +219C..219F ; Other_Math # So [4] LEFTWARDS WAVE ARROW..UPWARDS TWO HEADED ARROW +21A1..21A2 ; Other_Math # So [2] DOWNWARDS TWO HEADED ARROW..LEFTWARDS ARROW WITH TAIL +21A4..21A5 ; Other_Math # So [2] LEFTWARDS ARROW FROM BAR..UPWARDS ARROW FROM BAR +21A7 ; Other_Math # So DOWNWARDS ARROW FROM BAR +21A9..21AD ; Other_Math # So [5] LEFTWARDS ARROW WITH HOOK..LEFT RIGHT WAVE ARROW +21B0..21B1 ; Other_Math # So [2] UPWARDS ARROW WITH TIP LEFTWARDS..UPWARDS ARROW WITH TIP RIGHTWARDS +21B6..21B7 ; Other_Math # So [2] ANTICLOCKWISE TOP SEMICIRCLE ARROW..CLOCKWISE TOP SEMICIRCLE ARROW +21BC..21CD ; Other_Math # So [18] LEFTWARDS HARPOON WITH BARB UPWARDS..LEFTWARDS DOUBLE ARROW WITH STROKE +21D0..21D1 ; Other_Math # So [2] LEFTWARDS DOUBLE ARROW..UPWARDS DOUBLE ARROW +21D3 ; Other_Math # So DOWNWARDS DOUBLE ARROW +21D5..21DB ; Other_Math # So [7] UP DOWN DOUBLE ARROW..RIGHTWARDS TRIPLE ARROW +21DD ; Other_Math # So RIGHTWARDS SQUIGGLE ARROW +21E4..21E5 ; Other_Math # So [2] LEFTWARDS ARROW TO BAR..RIGHTWARDS ARROW TO BAR +2308 ; Other_Math # Ps LEFT CEILING +2309 ; Other_Math # Pe RIGHT CEILING +230A ; Other_Math # Ps LEFT FLOOR +230B ; Other_Math # Pe RIGHT FLOOR +23B4..23B5 ; Other_Math # So [2] TOP SQUARE BRACKET..BOTTOM SQUARE BRACKET +23B7 ; Other_Math # So RADICAL SYMBOL BOTTOM +23D0 ; Other_Math # So VERTICAL LINE EXTENSION +23E2 ; Other_Math # So WHITE TRAPEZIUM +25A0..25A1 ; Other_Math # So [2] BLACK SQUARE..WHITE SQUARE +25AE..25B6 ; Other_Math # So [9] BLACK VERTICAL RECTANGLE..BLACK RIGHT-POINTING TRIANGLE +25BC..25C0 ; Other_Math # So [5] BLACK DOWN-POINTING TRIANGLE..BLACK LEFT-POINTING TRIANGLE +25C6..25C7 ; Other_Math # So [2] BLACK DIAMOND..WHITE DIAMOND +25CA..25CB ; Other_Math # So [2] LOZENGE..WHITE CIRCLE +25CF..25D3 ; Other_Math # So [5] BLACK CIRCLE..CIRCLE WITH UPPER HALF BLACK +25E2 ; Other_Math # So BLACK LOWER RIGHT TRIANGLE +25E4 ; Other_Math # So BLACK UPPER LEFT TRIANGLE +25E7..25EC ; Other_Math # So [6] SQUARE WITH LEFT HALF BLACK..WHITE UP-POINTING TRIANGLE WITH DOT +2605..2606 ; Other_Math # So [2] BLACK STAR..WHITE STAR +2640 ; Other_Math # So FEMALE SIGN +2642 ; Other_Math # So MALE SIGN +2660..2663 ; Other_Math # So [4] BLACK SPADE SUIT..BLACK CLUB SUIT +266D..266E ; Other_Math # So [2] MUSIC FLAT SIGN..MUSIC NATURAL SIGN +27C5 ; Other_Math # Ps LEFT S-SHAPED BAG DELIMITER +27C6 ; Other_Math # Pe RIGHT S-SHAPED BAG DELIMITER +27E6 ; Other_Math # Ps MATHEMATICAL LEFT WHITE SQUARE BRACKET +27E7 ; Other_Math # Pe MATHEMATICAL RIGHT WHITE SQUARE BRACKET +27E8 ; Other_Math # Ps MATHEMATICAL LEFT ANGLE BRACKET +27E9 ; Other_Math # Pe MATHEMATICAL RIGHT ANGLE BRACKET +27EA ; Other_Math # Ps MATHEMATICAL LEFT DOUBLE ANGLE BRACKET +27EB ; Other_Math # Pe MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET +27EC ; Other_Math # Ps MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET +27ED ; Other_Math # Pe MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET +27EE ; Other_Math # Ps MATHEMATICAL LEFT FLATTENED PARENTHESIS +27EF ; Other_Math # Pe MATHEMATICAL RIGHT FLATTENED PARENTHESIS +2983 ; Other_Math # Ps LEFT WHITE CURLY BRACKET +2984 ; Other_Math # Pe RIGHT WHITE CURLY BRACKET +2985 ; Other_Math # Ps LEFT WHITE PARENTHESIS +2986 ; Other_Math # Pe RIGHT WHITE PARENTHESIS +2987 ; Other_Math # Ps Z NOTATION LEFT IMAGE BRACKET +2988 ; Other_Math # Pe Z NOTATION RIGHT IMAGE BRACKET +2989 ; Other_Math # Ps Z NOTATION LEFT BINDING BRACKET +298A ; Other_Math # Pe Z NOTATION RIGHT BINDING BRACKET +298B ; Other_Math # Ps LEFT SQUARE BRACKET WITH UNDERBAR +298C ; Other_Math # Pe RIGHT SQUARE BRACKET WITH UNDERBAR +298D ; Other_Math # Ps LEFT SQUARE BRACKET WITH TICK IN TOP CORNER +298E ; Other_Math # Pe RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER +298F ; Other_Math # Ps LEFT SQUARE BRACKET WITH TICK IN BOTTOM CORNER +2990 ; Other_Math # Pe RIGHT SQUARE BRACKET WITH TICK IN TOP CORNER +2991 ; Other_Math # Ps LEFT ANGLE BRACKET WITH DOT +2992 ; Other_Math # Pe RIGHT ANGLE BRACKET WITH DOT +2993 ; Other_Math # Ps LEFT ARC LESS-THAN BRACKET +2994 ; Other_Math # Pe RIGHT ARC GREATER-THAN BRACKET +2995 ; Other_Math # Ps DOUBLE LEFT ARC GREATER-THAN BRACKET +2996 ; Other_Math # Pe DOUBLE RIGHT ARC LESS-THAN BRACKET +2997 ; Other_Math # Ps LEFT BLACK TORTOISE SHELL BRACKET +2998 ; Other_Math # Pe RIGHT BLACK TORTOISE SHELL BRACKET +29D8 ; Other_Math # Ps LEFT WIGGLY FENCE +29D9 ; Other_Math # Pe RIGHT WIGGLY FENCE +29DA ; Other_Math # Ps LEFT DOUBLE WIGGLY FENCE +29DB ; Other_Math # Pe RIGHT DOUBLE WIGGLY FENCE +29FC ; Other_Math # Ps LEFT-POINTING CURVED ANGLE BRACKET +29FD ; Other_Math # Pe RIGHT-POINTING CURVED ANGLE BRACKET +FE61 ; Other_Math # Po SMALL ASTERISK +FE63 ; Other_Math # Pd SMALL HYPHEN-MINUS +FE68 ; Other_Math # Po SMALL REVERSE SOLIDUS +FF3C ; Other_Math # Po FULLWIDTH REVERSE SOLIDUS +FF3E ; Other_Math # Sk FULLWIDTH CIRCUMFLEX ACCENT +1D400..1D454 ; Other_Math # L& [85] MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G +1D456..1D49C ; Other_Math # L& [71] MATHEMATICAL ITALIC SMALL I..MATHEMATICAL SCRIPT CAPITAL A +1D49E..1D49F ; Other_Math # L& [2] MATHEMATICAL SCRIPT CAPITAL C..MATHEMATICAL SCRIPT CAPITAL D +1D4A2 ; Other_Math # L& MATHEMATICAL SCRIPT CAPITAL G +1D4A5..1D4A6 ; Other_Math # L& [2] MATHEMATICAL SCRIPT CAPITAL J..MATHEMATICAL SCRIPT CAPITAL K +1D4A9..1D4AC ; Other_Math # L& [4] MATHEMATICAL SCRIPT CAPITAL N..MATHEMATICAL SCRIPT CAPITAL Q +1D4AE..1D4B9 ; Other_Math # L& [12] MATHEMATICAL SCRIPT CAPITAL S..MATHEMATICAL SCRIPT SMALL D +1D4BB ; Other_Math # L& MATHEMATICAL SCRIPT SMALL F +1D4BD..1D4C3 ; Other_Math # L& [7] MATHEMATICAL SCRIPT SMALL H..MATHEMATICAL SCRIPT SMALL N +1D4C5..1D505 ; Other_Math # L& [65] MATHEMATICAL SCRIPT SMALL P..MATHEMATICAL FRAKTUR CAPITAL B +1D507..1D50A ; Other_Math # L& [4] MATHEMATICAL FRAKTUR CAPITAL D..MATHEMATICAL FRAKTUR CAPITAL G +1D50D..1D514 ; Other_Math # L& [8] MATHEMATICAL FRAKTUR CAPITAL J..MATHEMATICAL FRAKTUR CAPITAL Q +1D516..1D51C ; Other_Math # L& [7] MATHEMATICAL FRAKTUR CAPITAL S..MATHEMATICAL FRAKTUR CAPITAL Y +1D51E..1D539 ; Other_Math # L& [28] MATHEMATICAL FRAKTUR SMALL A..MATHEMATICAL DOUBLE-STRUCK CAPITAL B +1D53B..1D53E ; Other_Math # L& [4] MATHEMATICAL DOUBLE-STRUCK CAPITAL D..MATHEMATICAL DOUBLE-STRUCK CAPITAL G +1D540..1D544 ; Other_Math # L& [5] MATHEMATICAL DOUBLE-STRUCK CAPITAL I..MATHEMATICAL DOUBLE-STRUCK CAPITAL M +1D546 ; Other_Math # L& MATHEMATICAL DOUBLE-STRUCK CAPITAL O +1D54A..1D550 ; Other_Math # L& [7] MATHEMATICAL DOUBLE-STRUCK CAPITAL S..MATHEMATICAL DOUBLE-STRUCK CAPITAL Y +1D552..1D6A5 ; Other_Math # L& [340] MATHEMATICAL DOUBLE-STRUCK SMALL A..MATHEMATICAL ITALIC SMALL DOTLESS J +1D6A8..1D6C0 ; Other_Math # L& [25] MATHEMATICAL BOLD CAPITAL ALPHA..MATHEMATICAL BOLD CAPITAL OMEGA +1D6C2..1D6DA ; Other_Math # L& [25] MATHEMATICAL BOLD SMALL ALPHA..MATHEMATICAL BOLD SMALL OMEGA +1D6DC..1D6FA ; Other_Math # L& [31] MATHEMATICAL BOLD EPSILON SYMBOL..MATHEMATICAL ITALIC CAPITAL OMEGA +1D6FC..1D714 ; Other_Math # L& [25] MATHEMATICAL ITALIC SMALL ALPHA..MATHEMATICAL ITALIC SMALL OMEGA +1D716..1D734 ; Other_Math # L& [31] MATHEMATICAL ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD ITALIC CAPITAL OMEGA +1D736..1D74E ; Other_Math # L& [25] MATHEMATICAL BOLD ITALIC SMALL ALPHA..MATHEMATICAL BOLD ITALIC SMALL OMEGA +1D750..1D76E ; Other_Math # L& [31] MATHEMATICAL BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA +1D770..1D788 ; Other_Math # L& [25] MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA +1D78A..1D7A8 ; Other_Math # L& [31] MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA +1D7AA..1D7C2 ; Other_Math # L& [25] MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA +1D7C4..1D7CB ; Other_Math # L& [8] MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD SMALL DIGAMMA +1D7CE..1D7FF ; Other_Math # Nd [50] MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE +1EE00..1EE03 ; Other_Math # Lo [4] ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL +1EE05..1EE1F ; Other_Math # Lo [27] ARABIC MATHEMATICAL WAW..ARABIC MATHEMATICAL DOTLESS QAF +1EE21..1EE22 ; Other_Math # Lo [2] ARABIC MATHEMATICAL INITIAL BEH..ARABIC MATHEMATICAL INITIAL JEEM +1EE24 ; Other_Math # Lo ARABIC MATHEMATICAL INITIAL HEH +1EE27 ; Other_Math # Lo ARABIC MATHEMATICAL INITIAL HAH +1EE29..1EE32 ; Other_Math # Lo [10] ARABIC MATHEMATICAL INITIAL YEH..ARABIC MATHEMATICAL INITIAL QAF +1EE34..1EE37 ; Other_Math # Lo [4] ARABIC MATHEMATICAL INITIAL SHEEN..ARABIC MATHEMATICAL INITIAL KHAH +1EE39 ; Other_Math # Lo ARABIC MATHEMATICAL INITIAL DAD +1EE3B ; Other_Math # Lo ARABIC MATHEMATICAL INITIAL GHAIN +1EE42 ; Other_Math # Lo ARABIC MATHEMATICAL TAILED JEEM +1EE47 ; Other_Math # Lo ARABIC MATHEMATICAL TAILED HAH +1EE49 ; Other_Math # Lo ARABIC MATHEMATICAL TAILED YEH +1EE4B ; Other_Math # Lo ARABIC MATHEMATICAL TAILED LAM +1EE4D..1EE4F ; Other_Math # Lo [3] ARABIC MATHEMATICAL TAILED NOON..ARABIC MATHEMATICAL TAILED AIN +1EE51..1EE52 ; Other_Math # Lo [2] ARABIC MATHEMATICAL TAILED SAD..ARABIC MATHEMATICAL TAILED QAF +1EE54 ; Other_Math # Lo ARABIC MATHEMATICAL TAILED SHEEN +1EE57 ; Other_Math # Lo ARABIC MATHEMATICAL TAILED KHAH +1EE59 ; Other_Math # Lo ARABIC MATHEMATICAL TAILED DAD +1EE5B ; Other_Math # Lo ARABIC MATHEMATICAL TAILED GHAIN +1EE5D ; Other_Math # Lo ARABIC MATHEMATICAL TAILED DOTLESS NOON +1EE5F ; Other_Math # Lo ARABIC MATHEMATICAL TAILED DOTLESS QAF +1EE61..1EE62 ; Other_Math # Lo [2] ARABIC MATHEMATICAL STRETCHED BEH..ARABIC MATHEMATICAL STRETCHED JEEM +1EE64 ; Other_Math # Lo ARABIC MATHEMATICAL STRETCHED HEH +1EE67..1EE6A ; Other_Math # Lo [4] ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF +1EE6C..1EE72 ; Other_Math # Lo [7] ARABIC MATHEMATICAL STRETCHED MEEM..ARABIC MATHEMATICAL STRETCHED QAF +1EE74..1EE77 ; Other_Math # Lo [4] ARABIC MATHEMATICAL STRETCHED SHEEN..ARABIC MATHEMATICAL STRETCHED KHAH +1EE79..1EE7C ; Other_Math # Lo [4] ARABIC MATHEMATICAL STRETCHED DAD..ARABIC MATHEMATICAL STRETCHED DOTLESS BEH +1EE7E ; Other_Math # Lo ARABIC MATHEMATICAL STRETCHED DOTLESS FEH +1EE80..1EE89 ; Other_Math # Lo [10] ARABIC MATHEMATICAL LOOPED ALEF..ARABIC MATHEMATICAL LOOPED YEH +1EE8B..1EE9B ; Other_Math # Lo [17] ARABIC MATHEMATICAL LOOPED LAM..ARABIC MATHEMATICAL LOOPED GHAIN +1EEA1..1EEA3 ; Other_Math # Lo [3] ARABIC MATHEMATICAL DOUBLE-STRUCK BEH..ARABIC MATHEMATICAL DOUBLE-STRUCK DAL +1EEA5..1EEA9 ; Other_Math # Lo [5] ARABIC MATHEMATICAL DOUBLE-STRUCK WAW..ARABIC MATHEMATICAL DOUBLE-STRUCK YEH +1EEAB..1EEBB ; Other_Math # Lo [17] ARABIC MATHEMATICAL DOUBLE-STRUCK LAM..ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN + +# Total code points: 1362 + +# ================================================ + +0030..0039 ; Hex_Digit # Nd [10] DIGIT ZERO..DIGIT NINE +0041..0046 ; Hex_Digit # L& [6] LATIN CAPITAL LETTER A..LATIN CAPITAL LETTER F +0061..0066 ; Hex_Digit # L& [6] LATIN SMALL LETTER A..LATIN SMALL LETTER F +FF10..FF19 ; Hex_Digit # Nd [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DIGIT NINE +FF21..FF26 ; Hex_Digit # L& [6] FULLWIDTH LATIN CAPITAL LETTER A..FULLWIDTH LATIN CAPITAL LETTER F +FF41..FF46 ; Hex_Digit # L& [6] FULLWIDTH LATIN SMALL LETTER A..FULLWIDTH LATIN SMALL LETTER F + +# Total code points: 44 + +# ================================================ + +0030..0039 ; ASCII_Hex_Digit # Nd [10] DIGIT ZERO..DIGIT NINE +0041..0046 ; ASCII_Hex_Digit # L& [6] LATIN CAPITAL LETTER A..LATIN CAPITAL LETTER F +0061..0066 ; ASCII_Hex_Digit # L& [6] LATIN SMALL LETTER A..LATIN SMALL LETTER F + +# Total code points: 22 + +# ================================================ + +0345 ; Other_Alphabetic # Mn COMBINING GREEK YPOGEGRAMMENI +05B0..05BD ; Other_Alphabetic # Mn [14] HEBREW POINT SHEVA..HEBREW POINT METEG +05BF ; Other_Alphabetic # Mn HEBREW POINT RAFE +05C1..05C2 ; Other_Alphabetic # Mn [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT +05C4..05C5 ; Other_Alphabetic # Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT +05C7 ; Other_Alphabetic # Mn HEBREW POINT QAMATS QATAN +0610..061A ; Other_Alphabetic # Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA +064B..0657 ; Other_Alphabetic # Mn [13] ARABIC FATHATAN..ARABIC INVERTED DAMMA +0659..065F ; Other_Alphabetic # Mn [7] ARABIC ZWARAKAY..ARABIC WAVY HAMZA BELOW +0670 ; Other_Alphabetic # Mn ARABIC LETTER SUPERSCRIPT ALEF +06D6..06DC ; Other_Alphabetic # Mn [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN +06E1..06E4 ; Other_Alphabetic # Mn [4] ARABIC SMALL HIGH DOTLESS HEAD OF KHAH..ARABIC SMALL HIGH MADDA +06E7..06E8 ; Other_Alphabetic # Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON +06ED ; Other_Alphabetic # Mn ARABIC SMALL LOW MEEM +0711 ; Other_Alphabetic # Mn SYRIAC LETTER SUPERSCRIPT ALAPH +0730..073F ; Other_Alphabetic # Mn [16] SYRIAC PTHAHA ABOVE..SYRIAC RWAHA +07A6..07B0 ; Other_Alphabetic # Mn [11] THAANA ABAFILI..THAANA SUKUN +0816..0817 ; Other_Alphabetic # Mn [2] SAMARITAN MARK IN..SAMARITAN MARK IN-ALAF +081B..0823 ; Other_Alphabetic # Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A +0825..0827 ; Other_Alphabetic # Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U +0829..082C ; Other_Alphabetic # Mn [4] SAMARITAN VOWEL SIGN LONG I..SAMARITAN VOWEL SIGN SUKUN +08D4..08DF ; Other_Alphabetic # Mn [12] ARABIC SMALL HIGH WORD AR-RUB..ARABIC SMALL HIGH WORD WAQFA +08E3..08E9 ; Other_Alphabetic # Mn [7] ARABIC TURNED DAMMA BELOW..ARABIC CURLY KASRATAN +08F0..0902 ; Other_Alphabetic # Mn [19] ARABIC OPEN FATHATAN..DEVANAGARI SIGN ANUSVARA +0903 ; Other_Alphabetic # Mc DEVANAGARI SIGN VISARGA +093A ; Other_Alphabetic # Mn DEVANAGARI VOWEL SIGN OE +093B ; Other_Alphabetic # Mc DEVANAGARI VOWEL SIGN OOE +093E..0940 ; Other_Alphabetic # Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II +0941..0948 ; Other_Alphabetic # Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI +0949..094C ; Other_Alphabetic # Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU +094E..094F ; Other_Alphabetic # Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW +0955..0957 ; Other_Alphabetic # Mn [3] DEVANAGARI VOWEL SIGN CANDRA LONG E..DEVANAGARI VOWEL SIGN UUE +0962..0963 ; Other_Alphabetic # Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL +0981 ; Other_Alphabetic # Mn BENGALI SIGN CANDRABINDU +0982..0983 ; Other_Alphabetic # Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA +09BE..09C0 ; Other_Alphabetic # Mc [3] BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN II +09C1..09C4 ; Other_Alphabetic # Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR +09C7..09C8 ; Other_Alphabetic # Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI +09CB..09CC ; Other_Alphabetic # Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU +09D7 ; Other_Alphabetic # Mc BENGALI AU LENGTH MARK +09E2..09E3 ; Other_Alphabetic # Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL +0A01..0A02 ; Other_Alphabetic # Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI +0A03 ; Other_Alphabetic # Mc GURMUKHI SIGN VISARGA +0A3E..0A40 ; Other_Alphabetic # Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II +0A41..0A42 ; Other_Alphabetic # Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU +0A47..0A48 ; Other_Alphabetic # Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI +0A4B..0A4C ; Other_Alphabetic # Mn [2] GURMUKHI VOWEL SIGN OO..GURMUKHI VOWEL SIGN AU +0A51 ; Other_Alphabetic # Mn GURMUKHI SIGN UDAAT +0A70..0A71 ; Other_Alphabetic # Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK +0A75 ; Other_Alphabetic # Mn GURMUKHI SIGN YAKASH +0A81..0A82 ; Other_Alphabetic # Mn [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA +0A83 ; Other_Alphabetic # Mc GUJARATI SIGN VISARGA +0ABE..0AC0 ; Other_Alphabetic # Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II +0AC1..0AC5 ; Other_Alphabetic # Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E +0AC7..0AC8 ; Other_Alphabetic # Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI +0AC9 ; Other_Alphabetic # Mc GUJARATI VOWEL SIGN CANDRA O +0ACB..0ACC ; Other_Alphabetic # Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU +0AE2..0AE3 ; Other_Alphabetic # Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL +0B01 ; Other_Alphabetic # Mn ORIYA SIGN CANDRABINDU +0B02..0B03 ; Other_Alphabetic # Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA +0B3E ; Other_Alphabetic # Mc ORIYA VOWEL SIGN AA +0B3F ; Other_Alphabetic # Mn ORIYA VOWEL SIGN I +0B40 ; Other_Alphabetic # Mc ORIYA VOWEL SIGN II +0B41..0B44 ; Other_Alphabetic # Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR +0B47..0B48 ; Other_Alphabetic # Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI +0B4B..0B4C ; Other_Alphabetic # Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU +0B56 ; Other_Alphabetic # Mn ORIYA AI LENGTH MARK +0B57 ; Other_Alphabetic # Mc ORIYA AU LENGTH MARK +0B62..0B63 ; Other_Alphabetic # Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL +0B82 ; Other_Alphabetic # Mn TAMIL SIGN ANUSVARA +0BBE..0BBF ; Other_Alphabetic # Mc [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN I +0BC0 ; Other_Alphabetic # Mn TAMIL VOWEL SIGN II +0BC1..0BC2 ; Other_Alphabetic # Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU +0BC6..0BC8 ; Other_Alphabetic # Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI +0BCA..0BCC ; Other_Alphabetic # Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU +0BD7 ; Other_Alphabetic # Mc TAMIL AU LENGTH MARK +0C00 ; Other_Alphabetic # Mn TELUGU SIGN COMBINING CANDRABINDU ABOVE +0C01..0C03 ; Other_Alphabetic # Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA +0C3E..0C40 ; Other_Alphabetic # Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II +0C41..0C44 ; Other_Alphabetic # Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR +0C46..0C48 ; Other_Alphabetic # Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI +0C4A..0C4C ; Other_Alphabetic # Mn [3] TELUGU VOWEL SIGN O..TELUGU VOWEL SIGN AU +0C55..0C56 ; Other_Alphabetic # Mn [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK +0C62..0C63 ; Other_Alphabetic # Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL +0C81 ; Other_Alphabetic # Mn KANNADA SIGN CANDRABINDU +0C82..0C83 ; Other_Alphabetic # Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA +0CBE ; Other_Alphabetic # Mc KANNADA VOWEL SIGN AA +0CBF ; Other_Alphabetic # Mn KANNADA VOWEL SIGN I +0CC0..0CC4 ; Other_Alphabetic # Mc [5] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN VOCALIC RR +0CC6 ; Other_Alphabetic # Mn KANNADA VOWEL SIGN E +0CC7..0CC8 ; Other_Alphabetic # Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI +0CCA..0CCB ; Other_Alphabetic # Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO +0CCC ; Other_Alphabetic # Mn KANNADA VOWEL SIGN AU +0CD5..0CD6 ; Other_Alphabetic # Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK +0CE2..0CE3 ; Other_Alphabetic # Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL +0D01 ; Other_Alphabetic # Mn MALAYALAM SIGN CANDRABINDU +0D02..0D03 ; Other_Alphabetic # Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA +0D3E..0D40 ; Other_Alphabetic # Mc [3] MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN II +0D41..0D44 ; Other_Alphabetic # Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR +0D46..0D48 ; Other_Alphabetic # Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI +0D4A..0D4C ; Other_Alphabetic # Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU +0D57 ; Other_Alphabetic # Mc MALAYALAM AU LENGTH MARK +0D62..0D63 ; Other_Alphabetic # Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL +0D82..0D83 ; Other_Alphabetic # Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA +0DCF..0DD1 ; Other_Alphabetic # Mc [3] SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA +0DD2..0DD4 ; Other_Alphabetic # Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA +0DD6 ; Other_Alphabetic # Mn SINHALA VOWEL SIGN DIGA PAA-PILLA +0DD8..0DDF ; Other_Alphabetic # Mc [8] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA +0DF2..0DF3 ; Other_Alphabetic # Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA +0E31 ; Other_Alphabetic # Mn THAI CHARACTER MAI HAN-AKAT +0E34..0E3A ; Other_Alphabetic # Mn [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU +0E4D ; Other_Alphabetic # Mn THAI CHARACTER NIKHAHIT +0EB1 ; Other_Alphabetic # Mn LAO VOWEL SIGN MAI KAN +0EB4..0EB9 ; Other_Alphabetic # Mn [6] LAO VOWEL SIGN I..LAO VOWEL SIGN UU +0EBB..0EBC ; Other_Alphabetic # Mn [2] LAO VOWEL SIGN MAI KON..LAO SEMIVOWEL SIGN LO +0ECD ; Other_Alphabetic # Mn LAO NIGGAHITA +0F71..0F7E ; Other_Alphabetic # Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO +0F7F ; Other_Alphabetic # Mc TIBETAN SIGN RNAM BCAD +0F80..0F81 ; Other_Alphabetic # Mn [2] TIBETAN VOWEL SIGN REVERSED I..TIBETAN VOWEL SIGN REVERSED II +0F8D..0F97 ; Other_Alphabetic # Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA +0F99..0FBC ; Other_Alphabetic # Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA +102B..102C ; Other_Alphabetic # Mc [2] MYANMAR VOWEL SIGN TALL AA..MYANMAR VOWEL SIGN AA +102D..1030 ; Other_Alphabetic # Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU +1031 ; Other_Alphabetic # Mc MYANMAR VOWEL SIGN E +1032..1036 ; Other_Alphabetic # Mn [5] MYANMAR VOWEL SIGN AI..MYANMAR SIGN ANUSVARA +1038 ; Other_Alphabetic # Mc MYANMAR SIGN VISARGA +103B..103C ; Other_Alphabetic # Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA +103D..103E ; Other_Alphabetic # Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA +1056..1057 ; Other_Alphabetic # Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR +1058..1059 ; Other_Alphabetic # Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL +105E..1060 ; Other_Alphabetic # Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA +1062 ; Other_Alphabetic # Mc MYANMAR VOWEL SIGN SGAW KAREN EU +1067..1068 ; Other_Alphabetic # Mc [2] MYANMAR VOWEL SIGN WESTERN PWO KAREN EU..MYANMAR VOWEL SIGN WESTERN PWO KAREN UE +1071..1074 ; Other_Alphabetic # Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE +1082 ; Other_Alphabetic # Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA +1083..1084 ; Other_Alphabetic # Mc [2] MYANMAR VOWEL SIGN SHAN AA..MYANMAR VOWEL SIGN SHAN E +1085..1086 ; Other_Alphabetic # Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y +109C ; Other_Alphabetic # Mc MYANMAR VOWEL SIGN AITON A +109D ; Other_Alphabetic # Mn MYANMAR VOWEL SIGN AITON AI +135F ; Other_Alphabetic # Mn ETHIOPIC COMBINING GEMINATION MARK +1712..1713 ; Other_Alphabetic # Mn [2] TAGALOG VOWEL SIGN I..TAGALOG VOWEL SIGN U +1732..1733 ; Other_Alphabetic # Mn [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U +1752..1753 ; Other_Alphabetic # Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U +1772..1773 ; Other_Alphabetic # Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U +17B6 ; Other_Alphabetic # Mc KHMER VOWEL SIGN AA +17B7..17BD ; Other_Alphabetic # Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA +17BE..17C5 ; Other_Alphabetic # Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU +17C6 ; Other_Alphabetic # Mn KHMER SIGN NIKAHIT +17C7..17C8 ; Other_Alphabetic # Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU +1885..1886 ; Other_Alphabetic # Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA +18A9 ; Other_Alphabetic # Mn MONGOLIAN LETTER ALI GALI DAGALGA +1920..1922 ; Other_Alphabetic # Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U +1923..1926 ; Other_Alphabetic # Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU +1927..1928 ; Other_Alphabetic # Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O +1929..192B ; Other_Alphabetic # Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA +1930..1931 ; Other_Alphabetic # Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA +1932 ; Other_Alphabetic # Mn LIMBU SMALL LETTER ANUSVARA +1933..1938 ; Other_Alphabetic # Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA +1A17..1A18 ; Other_Alphabetic # Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U +1A19..1A1A ; Other_Alphabetic # Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O +1A1B ; Other_Alphabetic # Mn BUGINESE VOWEL SIGN AE +1A55 ; Other_Alphabetic # Mc TAI THAM CONSONANT SIGN MEDIAL RA +1A56 ; Other_Alphabetic # Mn TAI THAM CONSONANT SIGN MEDIAL LA +1A57 ; Other_Alphabetic # Mc TAI THAM CONSONANT SIGN LA TANG LAI +1A58..1A5E ; Other_Alphabetic # Mn [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA +1A61 ; Other_Alphabetic # Mc TAI THAM VOWEL SIGN A +1A62 ; Other_Alphabetic # Mn TAI THAM VOWEL SIGN MAI SAT +1A63..1A64 ; Other_Alphabetic # Mc [2] TAI THAM VOWEL SIGN AA..TAI THAM VOWEL SIGN TALL AA +1A65..1A6C ; Other_Alphabetic # Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW +1A6D..1A72 ; Other_Alphabetic # Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI +1A73..1A74 ; Other_Alphabetic # Mn [2] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN MAI KANG +1B00..1B03 ; Other_Alphabetic # Mn [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG +1B04 ; Other_Alphabetic # Mc BALINESE SIGN BISAH +1B35 ; Other_Alphabetic # Mc BALINESE VOWEL SIGN TEDUNG +1B36..1B3A ; Other_Alphabetic # Mn [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA +1B3B ; Other_Alphabetic # Mc BALINESE VOWEL SIGN RA REPA TEDUNG +1B3C ; Other_Alphabetic # Mn BALINESE VOWEL SIGN LA LENGA +1B3D..1B41 ; Other_Alphabetic # Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG +1B42 ; Other_Alphabetic # Mn BALINESE VOWEL SIGN PEPET +1B43 ; Other_Alphabetic # Mc BALINESE VOWEL SIGN PEPET TEDUNG +1B80..1B81 ; Other_Alphabetic # Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR +1B82 ; Other_Alphabetic # Mc SUNDANESE SIGN PANGWISAD +1BA1 ; Other_Alphabetic # Mc SUNDANESE CONSONANT SIGN PAMINGKAL +1BA2..1BA5 ; Other_Alphabetic # Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU +1BA6..1BA7 ; Other_Alphabetic # Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG +1BA8..1BA9 ; Other_Alphabetic # Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG +1BAC..1BAD ; Other_Alphabetic # Mn [2] SUNDANESE CONSONANT SIGN PASANGAN MA..SUNDANESE CONSONANT SIGN PASANGAN WA +1BE7 ; Other_Alphabetic # Mc BATAK VOWEL SIGN E +1BE8..1BE9 ; Other_Alphabetic # Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE +1BEA..1BEC ; Other_Alphabetic # Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O +1BED ; Other_Alphabetic # Mn BATAK VOWEL SIGN KARO O +1BEE ; Other_Alphabetic # Mc BATAK VOWEL SIGN U +1BEF..1BF1 ; Other_Alphabetic # Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H +1C24..1C2B ; Other_Alphabetic # Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU +1C2C..1C33 ; Other_Alphabetic # Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T +1C34..1C35 ; Other_Alphabetic # Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG +1CF2..1CF3 ; Other_Alphabetic # Mc [2] VEDIC SIGN ARDHAVISARGA..VEDIC SIGN ROTATED ARDHAVISARGA +1DE7..1DF4 ; Other_Alphabetic # Mn [14] COMBINING LATIN SMALL LETTER ALPHA..COMBINING LATIN SMALL LETTER U WITH DIAERESIS +24B6..24E9 ; Other_Alphabetic # So [52] CIRCLED LATIN CAPITAL LETTER A..CIRCLED LATIN SMALL LETTER Z +2DE0..2DFF ; Other_Alphabetic # Mn [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS +A674..A67B ; Other_Alphabetic # Mn [8] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC LETTER OMEGA +A69E..A69F ; Other_Alphabetic # Mn [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E +A823..A824 ; Other_Alphabetic # Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I +A825..A826 ; Other_Alphabetic # Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E +A827 ; Other_Alphabetic # Mc SYLOTI NAGRI VOWEL SIGN OO +A880..A881 ; Other_Alphabetic # Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA +A8B4..A8C3 ; Other_Alphabetic # Mc [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU +A8C5 ; Other_Alphabetic # Mn SAURASHTRA SIGN CANDRABINDU +A926..A92A ; Other_Alphabetic # Mn [5] KAYAH LI VOWEL UE..KAYAH LI VOWEL O +A947..A951 ; Other_Alphabetic # Mn [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R +A952 ; Other_Alphabetic # Mc REJANG CONSONANT SIGN H +A980..A982 ; Other_Alphabetic # Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR +A983 ; Other_Alphabetic # Mc JAVANESE SIGN WIGNYAN +A9B4..A9B5 ; Other_Alphabetic # Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG +A9B6..A9B9 ; Other_Alphabetic # Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT +A9BA..A9BB ; Other_Alphabetic # Mc [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE +A9BC ; Other_Alphabetic # Mn JAVANESE VOWEL SIGN PEPET +A9BD..A9BF ; Other_Alphabetic # Mc [3] JAVANESE CONSONANT SIGN KERET..JAVANESE CONSONANT SIGN CAKRA +AA29..AA2E ; Other_Alphabetic # Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE +AA2F..AA30 ; Other_Alphabetic # Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI +AA31..AA32 ; Other_Alphabetic # Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE +AA33..AA34 ; Other_Alphabetic # Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA +AA35..AA36 ; Other_Alphabetic # Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA +AA43 ; Other_Alphabetic # Mn CHAM CONSONANT SIGN FINAL NG +AA4C ; Other_Alphabetic # Mn CHAM CONSONANT SIGN FINAL M +AA4D ; Other_Alphabetic # Mc CHAM CONSONANT SIGN FINAL H +AAB0 ; Other_Alphabetic # Mn TAI VIET MAI KANG +AAB2..AAB4 ; Other_Alphabetic # Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U +AAB7..AAB8 ; Other_Alphabetic # Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA +AABE ; Other_Alphabetic # Mn TAI VIET VOWEL AM +AAEB ; Other_Alphabetic # Mc MEETEI MAYEK VOWEL SIGN II +AAEC..AAED ; Other_Alphabetic # Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI +AAEE..AAEF ; Other_Alphabetic # Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU +AAF5 ; Other_Alphabetic # Mc MEETEI MAYEK VOWEL SIGN VISARGA +ABE3..ABE4 ; Other_Alphabetic # Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP +ABE5 ; Other_Alphabetic # Mn MEETEI MAYEK VOWEL SIGN ANAP +ABE6..ABE7 ; Other_Alphabetic # Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP +ABE8 ; Other_Alphabetic # Mn MEETEI MAYEK VOWEL SIGN UNAP +ABE9..ABEA ; Other_Alphabetic # Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG +FB1E ; Other_Alphabetic # Mn HEBREW POINT JUDEO-SPANISH VARIKA +10376..1037A ; Other_Alphabetic # Mn [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII +10A01..10A03 ; Other_Alphabetic # Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R +10A05..10A06 ; Other_Alphabetic # Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O +10A0C..10A0F ; Other_Alphabetic # Mn [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA +11000 ; Other_Alphabetic # Mc BRAHMI SIGN CANDRABINDU +11001 ; Other_Alphabetic # Mn BRAHMI SIGN ANUSVARA +11002 ; Other_Alphabetic # Mc BRAHMI SIGN VISARGA +11038..11045 ; Other_Alphabetic # Mn [14] BRAHMI VOWEL SIGN AA..BRAHMI VOWEL SIGN AU +11082 ; Other_Alphabetic # Mc KAITHI SIGN VISARGA +110B0..110B2 ; Other_Alphabetic # Mc [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II +110B3..110B6 ; Other_Alphabetic # Mn [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI +110B7..110B8 ; Other_Alphabetic # Mc [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU +11100..11102 ; Other_Alphabetic # Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA +11127..1112B ; Other_Alphabetic # Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU +1112C ; Other_Alphabetic # Mc CHAKMA VOWEL SIGN E +1112D..11132 ; Other_Alphabetic # Mn [6] CHAKMA VOWEL SIGN AI..CHAKMA AU MARK +11180..11181 ; Other_Alphabetic # Mn [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA +11182 ; Other_Alphabetic # Mc SHARADA SIGN VISARGA +111B3..111B5 ; Other_Alphabetic # Mc [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II +111B6..111BE ; Other_Alphabetic # Mn [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O +111BF ; Other_Alphabetic # Mc SHARADA VOWEL SIGN AU +1122C..1122E ; Other_Alphabetic # Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II +1122F..11231 ; Other_Alphabetic # Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI +11232..11233 ; Other_Alphabetic # Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU +11234 ; Other_Alphabetic # Mn KHOJKI SIGN ANUSVARA +11237 ; Other_Alphabetic # Mn KHOJKI SIGN SHADDA +1123E ; Other_Alphabetic # Mn KHOJKI SIGN SUKUN +112DF ; Other_Alphabetic # Mn KHUDAWADI SIGN ANUSVARA +112E0..112E2 ; Other_Alphabetic # Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II +112E3..112E8 ; Other_Alphabetic # Mn [6] KHUDAWADI VOWEL SIGN U..KHUDAWADI VOWEL SIGN AU +11300..11301 ; Other_Alphabetic # Mn [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU +11302..11303 ; Other_Alphabetic # Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA +1133E..1133F ; Other_Alphabetic # Mc [2] GRANTHA VOWEL SIGN AA..GRANTHA VOWEL SIGN I +11340 ; Other_Alphabetic # Mn GRANTHA VOWEL SIGN II +11341..11344 ; Other_Alphabetic # Mc [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR +11347..11348 ; Other_Alphabetic # Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI +1134B..1134C ; Other_Alphabetic # Mc [2] GRANTHA VOWEL SIGN OO..GRANTHA VOWEL SIGN AU +11357 ; Other_Alphabetic # Mc GRANTHA AU LENGTH MARK +11362..11363 ; Other_Alphabetic # Mc [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL +11435..11437 ; Other_Alphabetic # Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II +11438..1143F ; Other_Alphabetic # Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI +11440..11441 ; Other_Alphabetic # Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU +11443..11444 ; Other_Alphabetic # Mn [2] NEWA SIGN CANDRABINDU..NEWA SIGN ANUSVARA +11445 ; Other_Alphabetic # Mc NEWA SIGN VISARGA +114B0..114B2 ; Other_Alphabetic # Mc [3] TIRHUTA VOWEL SIGN AA..TIRHUTA VOWEL SIGN II +114B3..114B8 ; Other_Alphabetic # Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL +114B9 ; Other_Alphabetic # Mc TIRHUTA VOWEL SIGN E +114BA ; Other_Alphabetic # Mn TIRHUTA VOWEL SIGN SHORT E +114BB..114BE ; Other_Alphabetic # Mc [4] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN AU +114BF..114C0 ; Other_Alphabetic # Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA +114C1 ; Other_Alphabetic # Mc TIRHUTA SIGN VISARGA +115AF..115B1 ; Other_Alphabetic # Mc [3] SIDDHAM VOWEL SIGN AA..SIDDHAM VOWEL SIGN II +115B2..115B5 ; Other_Alphabetic # Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR +115B8..115BB ; Other_Alphabetic # Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU +115BC..115BD ; Other_Alphabetic # Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA +115BE ; Other_Alphabetic # Mc SIDDHAM SIGN VISARGA +115DC..115DD ; Other_Alphabetic # Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU +11630..11632 ; Other_Alphabetic # Mc [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II +11633..1163A ; Other_Alphabetic # Mn [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI +1163B..1163C ; Other_Alphabetic # Mc [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU +1163D ; Other_Alphabetic # Mn MODI SIGN ANUSVARA +1163E ; Other_Alphabetic # Mc MODI SIGN VISARGA +11640 ; Other_Alphabetic # Mn MODI SIGN ARDHACANDRA +116AB ; Other_Alphabetic # Mn TAKRI SIGN ANUSVARA +116AC ; Other_Alphabetic # Mc TAKRI SIGN VISARGA +116AD ; Other_Alphabetic # Mn TAKRI VOWEL SIGN AA +116AE..116AF ; Other_Alphabetic # Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II +116B0..116B5 ; Other_Alphabetic # Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU +1171D..1171F ; Other_Alphabetic # Mn [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA +11720..11721 ; Other_Alphabetic # Mc [2] AHOM VOWEL SIGN A..AHOM VOWEL SIGN AA +11722..11725 ; Other_Alphabetic # Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU +11726 ; Other_Alphabetic # Mc AHOM VOWEL SIGN E +11727..1172A ; Other_Alphabetic # Mn [4] AHOM VOWEL SIGN AW..AHOM VOWEL SIGN AM +11C2F ; Other_Alphabetic # Mc BHAIKSUKI VOWEL SIGN AA +11C30..11C36 ; Other_Alphabetic # Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L +11C38..11C3D ; Other_Alphabetic # Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA +11C3E ; Other_Alphabetic # Mc BHAIKSUKI SIGN VISARGA +11C92..11CA7 ; Other_Alphabetic # Mn [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA +11CA9 ; Other_Alphabetic # Mc MARCHEN SUBJOINED LETTER YA +11CAA..11CB0 ; Other_Alphabetic # Mn [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA +11CB1 ; Other_Alphabetic # Mc MARCHEN VOWEL SIGN I +11CB2..11CB3 ; Other_Alphabetic # Mn [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E +11CB4 ; Other_Alphabetic # Mc MARCHEN VOWEL SIGN O +11CB5..11CB6 ; Other_Alphabetic # Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU +16B30..16B36 ; Other_Alphabetic # Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM +16F51..16F7E ; Other_Alphabetic # Mc [46] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN NG +1BC9E ; Other_Alphabetic # Mn DUPLOYAN DOUBLE MARK +1E000..1E006 ; Other_Alphabetic # Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE +1E008..1E018 ; Other_Alphabetic # Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU +1E01B..1E021 ; Other_Alphabetic # Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI +1E023..1E024 ; Other_Alphabetic # Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS +1E026..1E02A ; Other_Alphabetic # Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA +1E947 ; Other_Alphabetic # Mn ADLAM HAMZA +1F130..1F149 ; Other_Alphabetic # So [26] SQUARED LATIN CAPITAL LETTER A..SQUARED LATIN CAPITAL LETTER Z +1F150..1F169 ; Other_Alphabetic # So [26] NEGATIVE CIRCLED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z +1F170..1F189 ; Other_Alphabetic # So [26] NEGATIVE SQUARED LATIN CAPITAL LETTER A..NEGATIVE SQUARED LATIN CAPITAL LETTER Z + +# Total code points: 1238 + +# ================================================ + +3006 ; Ideographic # Lo IDEOGRAPHIC CLOSING MARK +3007 ; Ideographic # Nl IDEOGRAPHIC NUMBER ZERO +3021..3029 ; Ideographic # Nl [9] HANGZHOU NUMERAL ONE..HANGZHOU NUMERAL NINE +3038..303A ; Ideographic # Nl [3] HANGZHOU NUMERAL TEN..HANGZHOU NUMERAL THIRTY +3400..4DB5 ; Ideographic # Lo [6582] CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DB5 +4E00..9FD5 ; Ideographic # Lo [20950] CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FD5 +F900..FA6D ; Ideographic # Lo [366] CJK COMPATIBILITY IDEOGRAPH-F900..CJK COMPATIBILITY IDEOGRAPH-FA6D +FA70..FAD9 ; Ideographic # Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA70..CJK COMPATIBILITY IDEOGRAPH-FAD9 +17000..187EC ; Ideographic # Lo [6125] TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187EC +18800..18AF2 ; Ideographic # Lo [755] TANGUT COMPONENT-001..TANGUT COMPONENT-755 +20000..2A6D6 ; Ideographic # Lo [42711] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6D6 +2A700..2B734 ; Ideographic # Lo [4149] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B734 +2B740..2B81D ; Ideographic # Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D +2B820..2CEA1 ; Ideographic # Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1 +2F800..2FA1D ; Ideographic # Lo [542] CJK COMPATIBILITY IDEOGRAPH-2F800..CJK COMPATIBILITY IDEOGRAPH-2FA1D + +# Total code points: 88284 + +# ================================================ + +005E ; Diacritic # Sk CIRCUMFLEX ACCENT +0060 ; Diacritic # Sk GRAVE ACCENT +00A8 ; Diacritic # Sk DIAERESIS +00AF ; Diacritic # Sk MACRON +00B4 ; Diacritic # Sk ACUTE ACCENT +00B7 ; Diacritic # Po MIDDLE DOT +00B8 ; Diacritic # Sk CEDILLA +02B0..02C1 ; Diacritic # Lm [18] MODIFIER LETTER SMALL H..MODIFIER LETTER REVERSED GLOTTAL STOP +02C2..02C5 ; Diacritic # Sk [4] MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER DOWN ARROWHEAD +02C6..02D1 ; Diacritic # Lm [12] MODIFIER LETTER CIRCUMFLEX ACCENT..MODIFIER LETTER HALF TRIANGULAR COLON +02D2..02DF ; Diacritic # Sk [14] MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER CROSS ACCENT +02E0..02E4 ; Diacritic # Lm [5] MODIFIER LETTER SMALL GAMMA..MODIFIER LETTER SMALL REVERSED GLOTTAL STOP +02E5..02EB ; Diacritic # Sk [7] MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER YANG DEPARTING TONE MARK +02EC ; Diacritic # Lm MODIFIER LETTER VOICING +02ED ; Diacritic # Sk MODIFIER LETTER UNASPIRATED +02EE ; Diacritic # Lm MODIFIER LETTER DOUBLE APOSTROPHE +02EF..02FF ; Diacritic # Sk [17] MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW +0300..034E ; Diacritic # Mn [79] COMBINING GRAVE ACCENT..COMBINING UPWARDS ARROW BELOW +0350..0357 ; Diacritic # Mn [8] COMBINING RIGHT ARROWHEAD ABOVE..COMBINING RIGHT HALF RING ABOVE +035D..0362 ; Diacritic # Mn [6] COMBINING DOUBLE BREVE..COMBINING DOUBLE RIGHTWARDS ARROW BELOW +0374 ; Diacritic # Lm GREEK NUMERAL SIGN +0375 ; Diacritic # Sk GREEK LOWER NUMERAL SIGN +037A ; Diacritic # Lm GREEK YPOGEGRAMMENI +0384..0385 ; Diacritic # Sk [2] GREEK TONOS..GREEK DIALYTIKA TONOS +0483..0487 ; Diacritic # Mn [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE +0559 ; Diacritic # Lm ARMENIAN MODIFIER LETTER LEFT HALF RING +0591..05A1 ; Diacritic # Mn [17] HEBREW ACCENT ETNAHTA..HEBREW ACCENT PAZER +05A3..05BD ; Diacritic # Mn [27] HEBREW ACCENT MUNAH..HEBREW POINT METEG +05BF ; Diacritic # Mn HEBREW POINT RAFE +05C1..05C2 ; Diacritic # Mn [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT +05C4 ; Diacritic # Mn HEBREW MARK UPPER DOT +064B..0652 ; Diacritic # Mn [8] ARABIC FATHATAN..ARABIC SUKUN +0657..0658 ; Diacritic # Mn [2] ARABIC INVERTED DAMMA..ARABIC MARK NOON GHUNNA +06DF..06E0 ; Diacritic # Mn [2] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH UPRIGHT RECTANGULAR ZERO +06E5..06E6 ; Diacritic # Lm [2] ARABIC SMALL WAW..ARABIC SMALL YEH +06EA..06EC ; Diacritic # Mn [3] ARABIC EMPTY CENTRE LOW STOP..ARABIC ROUNDED HIGH STOP WITH FILLED CENTRE +0730..074A ; Diacritic # Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH +07A6..07B0 ; Diacritic # Mn [11] THAANA ABAFILI..THAANA SUKUN +07EB..07F3 ; Diacritic # Mn [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE +07F4..07F5 ; Diacritic # Lm [2] NKO HIGH TONE APOSTROPHE..NKO LOW TONE APOSTROPHE +0818..0819 ; Diacritic # Mn [2] SAMARITAN MARK OCCLUSION..SAMARITAN MARK DAGESH +08E3..08FE ; Diacritic # Mn [28] ARABIC TURNED DAMMA BELOW..ARABIC DAMMA WITH DOT +093C ; Diacritic # Mn DEVANAGARI SIGN NUKTA +094D ; Diacritic # Mn DEVANAGARI SIGN VIRAMA +0951..0954 ; Diacritic # Mn [4] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI ACUTE ACCENT +0971 ; Diacritic # Lm DEVANAGARI SIGN HIGH SPACING DOT +09BC ; Diacritic # Mn BENGALI SIGN NUKTA +09CD ; Diacritic # Mn BENGALI SIGN VIRAMA +0A3C ; Diacritic # Mn GURMUKHI SIGN NUKTA +0A4D ; Diacritic # Mn GURMUKHI SIGN VIRAMA +0ABC ; Diacritic # Mn GUJARATI SIGN NUKTA +0ACD ; Diacritic # Mn GUJARATI SIGN VIRAMA +0B3C ; Diacritic # Mn ORIYA SIGN NUKTA +0B4D ; Diacritic # Mn ORIYA SIGN VIRAMA +0BCD ; Diacritic # Mn TAMIL SIGN VIRAMA +0C4D ; Diacritic # Mn TELUGU SIGN VIRAMA +0CBC ; Diacritic # Mn KANNADA SIGN NUKTA +0CCD ; Diacritic # Mn KANNADA SIGN VIRAMA +0D4D ; Diacritic # Mn MALAYALAM SIGN VIRAMA +0DCA ; Diacritic # Mn SINHALA SIGN AL-LAKUNA +0E47..0E4C ; Diacritic # Mn [6] THAI CHARACTER MAITAIKHU..THAI CHARACTER THANTHAKHAT +0E4E ; Diacritic # Mn THAI CHARACTER YAMAKKAN +0EC8..0ECC ; Diacritic # Mn [5] LAO TONE MAI EK..LAO CANCELLATION MARK +0F18..0F19 ; Diacritic # Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS +0F35 ; Diacritic # Mn TIBETAN MARK NGAS BZUNG NYI ZLA +0F37 ; Diacritic # Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS +0F39 ; Diacritic # Mn TIBETAN MARK TSA -PHRU +0F3E..0F3F ; Diacritic # Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES +0F82..0F84 ; Diacritic # Mn [3] TIBETAN SIGN NYI ZLA NAA DA..TIBETAN MARK HALANTA +0F86..0F87 ; Diacritic # Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS +0FC6 ; Diacritic # Mn TIBETAN SYMBOL PADMA GDAN +1037 ; Diacritic # Mn MYANMAR SIGN DOT BELOW +1039..103A ; Diacritic # Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT +1087..108C ; Diacritic # Mc [6] MYANMAR SIGN SHAN TONE-2..MYANMAR SIGN SHAN COUNCIL TONE-3 +108D ; Diacritic # Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE +108F ; Diacritic # Mc MYANMAR SIGN RUMAI PALAUNG TONE-5 +109A..109B ; Diacritic # Mc [2] MYANMAR SIGN KHAMTI TONE-1..MYANMAR SIGN KHAMTI TONE-3 +17C9..17D3 ; Diacritic # Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT +17DD ; Diacritic # Mn KHMER SIGN ATTHACAN +1939..193B ; Diacritic # Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I +1A75..1A7C ; Diacritic # Mn [8] TAI THAM SIGN TONE-1..TAI THAM SIGN KHUEN-LUE KARAN +1A7F ; Diacritic # Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT +1AB0..1ABD ; Diacritic # Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW +1B34 ; Diacritic # Mn BALINESE SIGN REREKAN +1B44 ; Diacritic # Mc BALINESE ADEG ADEG +1B6B..1B73 ; Diacritic # Mn [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG +1BAA ; Diacritic # Mc SUNDANESE SIGN PAMAAEH +1BAB ; Diacritic # Mn SUNDANESE SIGN VIRAMA +1C36..1C37 ; Diacritic # Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA +1C78..1C7D ; Diacritic # Lm [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD +1CD0..1CD2 ; Diacritic # Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA +1CD3 ; Diacritic # Po VEDIC SIGN NIHSHVASA +1CD4..1CE0 ; Diacritic # Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA +1CE1 ; Diacritic # Mc VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA +1CE2..1CE8 ; Diacritic # Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL +1CED ; Diacritic # Mn VEDIC SIGN TIRYAK +1CF4 ; Diacritic # Mn VEDIC TONE CANDRA ABOVE +1CF8..1CF9 ; Diacritic # Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE +1D2C..1D6A ; Diacritic # Lm [63] MODIFIER LETTER CAPITAL A..GREEK SUBSCRIPT SMALL LETTER CHI +1DC4..1DCF ; Diacritic # Mn [12] COMBINING MACRON-ACUTE..COMBINING ZIGZAG BELOW +1DF5 ; Diacritic # Mn COMBINING UP TACK ABOVE +1DFD..1DFF ; Diacritic # Mn [3] COMBINING ALMOST EQUAL TO BELOW..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW +1FBD ; Diacritic # Sk GREEK KORONIS +1FBF..1FC1 ; Diacritic # Sk [3] GREEK PSILI..GREEK DIALYTIKA AND PERISPOMENI +1FCD..1FCF ; Diacritic # Sk [3] GREEK PSILI AND VARIA..GREEK PSILI AND PERISPOMENI +1FDD..1FDF ; Diacritic # Sk [3] GREEK DASIA AND VARIA..GREEK DASIA AND PERISPOMENI +1FED..1FEF ; Diacritic # Sk [3] GREEK DIALYTIKA AND VARIA..GREEK VARIA +1FFD..1FFE ; Diacritic # Sk [2] GREEK OXIA..GREEK DASIA +2CEF..2CF1 ; Diacritic # Mn [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS +2E2F ; Diacritic # Lm VERTICAL TILDE +302A..302D ; Diacritic # Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK +302E..302F ; Diacritic # Mc [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK +3099..309A ; Diacritic # Mn [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK +309B..309C ; Diacritic # Sk [2] KATAKANA-HIRAGANA VOICED SOUND MARK..KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK +30FC ; Diacritic # Lm KATAKANA-HIRAGANA PROLONGED SOUND MARK +A66F ; Diacritic # Mn COMBINING CYRILLIC VZMET +A67C..A67D ; Diacritic # Mn [2] COMBINING CYRILLIC KAVYKA..COMBINING CYRILLIC PAYEROK +A67F ; Diacritic # Lm CYRILLIC PAYEROK +A69C..A69D ; Diacritic # Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN..MODIFIER LETTER CYRILLIC SOFT SIGN +A6F0..A6F1 ; Diacritic # Mn [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS +A717..A71F ; Diacritic # Lm [9] MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK +A720..A721 ; Diacritic # Sk [2] MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE +A788 ; Diacritic # Lm MODIFIER LETTER LOW CIRCUMFLEX ACCENT +A7F8..A7F9 ; Diacritic # Lm [2] MODIFIER LETTER CAPITAL H WITH STROKE..MODIFIER LETTER SMALL LIGATURE OE +A8C4 ; Diacritic # Mn SAURASHTRA SIGN VIRAMA +A8E0..A8F1 ; Diacritic # Mn [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA +A92B..A92D ; Diacritic # Mn [3] KAYAH LI TONE PLOPHU..KAYAH LI TONE CALYA PLOPHU +A92E ; Diacritic # Po KAYAH LI SIGN CWI +A953 ; Diacritic # Mc REJANG VIRAMA +A9B3 ; Diacritic # Mn JAVANESE SIGN CECAK TELU +A9C0 ; Diacritic # Mc JAVANESE PANGKON +A9E5 ; Diacritic # Mn MYANMAR SIGN SHAN SAW +AA7B ; Diacritic # Mc MYANMAR SIGN PAO KAREN TONE +AA7C ; Diacritic # Mn MYANMAR SIGN TAI LAING TONE-2 +AA7D ; Diacritic # Mc MYANMAR SIGN TAI LAING TONE-5 +AABF ; Diacritic # Mn TAI VIET TONE MAI EK +AAC0 ; Diacritic # Lo TAI VIET TONE MAI NUENG +AAC1 ; Diacritic # Mn TAI VIET TONE MAI THO +AAC2 ; Diacritic # Lo TAI VIET TONE MAI SONG +AAF6 ; Diacritic # Mn MEETEI MAYEK VIRAMA +AB5B ; Diacritic # Sk MODIFIER BREVE WITH INVERTED BREVE +AB5C..AB5F ; Diacritic # Lm [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK +ABEC ; Diacritic # Mc MEETEI MAYEK LUM IYEK +ABED ; Diacritic # Mn MEETEI MAYEK APUN IYEK +FB1E ; Diacritic # Mn HEBREW POINT JUDEO-SPANISH VARIKA +FE20..FE2F ; Diacritic # Mn [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF +FF3E ; Diacritic # Sk FULLWIDTH CIRCUMFLEX ACCENT +FF40 ; Diacritic # Sk FULLWIDTH GRAVE ACCENT +FF70 ; Diacritic # Lm HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK +FF9E..FF9F ; Diacritic # Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK +FFE3 ; Diacritic # Sk FULLWIDTH MACRON +102E0 ; Diacritic # Mn COPTIC EPACT THOUSANDS MARK +10AE5..10AE6 ; Diacritic # Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW +110B9..110BA ; Diacritic # Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA +11133..11134 ; Diacritic # Mn [2] CHAKMA VIRAMA..CHAKMA MAAYYAA +11173 ; Diacritic # Mn MAHAJANI SIGN NUKTA +111C0 ; Diacritic # Mc SHARADA SIGN VIRAMA +111CA..111CC ; Diacritic # Mn [3] SHARADA SIGN NUKTA..SHARADA EXTRA SHORT VOWEL MARK +11235 ; Diacritic # Mc KHOJKI SIGN VIRAMA +11236 ; Diacritic # Mn KHOJKI SIGN NUKTA +112E9..112EA ; Diacritic # Mn [2] KHUDAWADI SIGN NUKTA..KHUDAWADI SIGN VIRAMA +1133C ; Diacritic # Mn GRANTHA SIGN NUKTA +1134D ; Diacritic # Mc GRANTHA SIGN VIRAMA +11366..1136C ; Diacritic # Mn [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX +11370..11374 ; Diacritic # Mn [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA +11442 ; Diacritic # Mn NEWA SIGN VIRAMA +11446 ; Diacritic # Mn NEWA SIGN NUKTA +114C2..114C3 ; Diacritic # Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA +115BF..115C0 ; Diacritic # Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA +1163F ; Diacritic # Mn MODI SIGN VIRAMA +116B6 ; Diacritic # Mc TAKRI SIGN VIRAMA +116B7 ; Diacritic # Mn TAKRI SIGN NUKTA +1172B ; Diacritic # Mn AHOM SIGN KILLER +11C3F ; Diacritic # Mn BHAIKSUKI SIGN VIRAMA +16AF0..16AF4 ; Diacritic # Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE +16F8F..16F92 ; Diacritic # Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW +16F93..16F9F ; Diacritic # Lm [13] MIAO LETTER TONE-2..MIAO LETTER REFORMED TONE-8 +1D167..1D169 ; Diacritic # Mn [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3 +1D16D..1D172 ; Diacritic # Mc [6] MUSICAL SYMBOL COMBINING AUGMENTATION DOT..MUSICAL SYMBOL COMBINING FLAG-5 +1D17B..1D182 ; Diacritic # Mn [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE +1D185..1D18B ; Diacritic # Mn [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE +1D1AA..1D1AD ; Diacritic # Mn [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO +1E8D0..1E8D6 ; Diacritic # Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS +1E944..1E946 ; Diacritic # Mn [3] ADLAM ALIF LENGTHENER..ADLAM GEMINATION MARK +1E948..1E94A ; Diacritic # Mn [3] ADLAM CONSONANT MODIFIER..ADLAM NUKTA + +# Total code points: 782 + +# ================================================ + +00B7 ; Extender # Po MIDDLE DOT +02D0..02D1 ; Extender # Lm [2] MODIFIER LETTER TRIANGULAR COLON..MODIFIER LETTER HALF TRIANGULAR COLON +0640 ; Extender # Lm ARABIC TATWEEL +07FA ; Extender # Lm NKO LAJANYALAN +0E46 ; Extender # Lm THAI CHARACTER MAIYAMOK +0EC6 ; Extender # Lm LAO KO LA +180A ; Extender # Po MONGOLIAN NIRUGU +1843 ; Extender # Lm MONGOLIAN LETTER TODO LONG VOWEL SIGN +1AA7 ; Extender # Lm TAI THAM SIGN MAI YAMOK +1C36 ; Extender # Mn LEPCHA SIGN RAN +1C7B ; Extender # Lm OL CHIKI RELAA +3005 ; Extender # Lm IDEOGRAPHIC ITERATION MARK +3031..3035 ; Extender # Lm [5] VERTICAL KANA REPEAT MARK..VERTICAL KANA REPEAT MARK LOWER HALF +309D..309E ; Extender # Lm [2] HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK +30FC..30FE ; Extender # Lm [3] KATAKANA-HIRAGANA PROLONGED SOUND MARK..KATAKANA VOICED ITERATION MARK +A015 ; Extender # Lm YI SYLLABLE WU +A60C ; Extender # Lm VAI SYLLABLE LENGTHENER +A9CF ; Extender # Lm JAVANESE PANGRANGKEP +A9E6 ; Extender # Lm MYANMAR MODIFIER LETTER SHAN REDUPLICATION +AA70 ; Extender # Lm MYANMAR MODIFIER LETTER KHAMTI REDUPLICATION +AADD ; Extender # Lm TAI VIET SYMBOL SAM +AAF3..AAF4 ; Extender # Lm [2] MEETEI MAYEK SYLLABLE REPETITION MARK..MEETEI MAYEK WORD REPETITION MARK +FF70 ; Extender # Lm HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK +1135D ; Extender # Lo GRANTHA SIGN PLUTA +115C6..115C8 ; Extender # Po [3] SIDDHAM REPETITION MARK-1..SIDDHAM REPETITION MARK-3 +16B42..16B43 ; Extender # Lm [2] PAHAWH HMONG SIGN VOS NRUA..PAHAWH HMONG SIGN IB YAM +16FE0 ; Extender # Lm TANGUT ITERATION MARK +1E944..1E946 ; Extender # Mn [3] ADLAM ALIF LENGTHENER..ADLAM GEMINATION MARK + +# Total code points: 42 + +# ================================================ + +00AA ; Other_Lowercase # Lo FEMININE ORDINAL INDICATOR +00BA ; Other_Lowercase # Lo MASCULINE ORDINAL INDICATOR +02B0..02B8 ; Other_Lowercase # Lm [9] MODIFIER LETTER SMALL H..MODIFIER LETTER SMALL Y +02C0..02C1 ; Other_Lowercase # Lm [2] MODIFIER LETTER GLOTTAL STOP..MODIFIER LETTER REVERSED GLOTTAL STOP +02E0..02E4 ; Other_Lowercase # Lm [5] MODIFIER LETTER SMALL GAMMA..MODIFIER LETTER SMALL REVERSED GLOTTAL STOP +0345 ; Other_Lowercase # Mn COMBINING GREEK YPOGEGRAMMENI +037A ; Other_Lowercase # Lm GREEK YPOGEGRAMMENI +1D2C..1D6A ; Other_Lowercase # Lm [63] MODIFIER LETTER CAPITAL A..GREEK SUBSCRIPT SMALL LETTER CHI +1D78 ; Other_Lowercase # Lm MODIFIER LETTER CYRILLIC EN +1D9B..1DBF ; Other_Lowercase # Lm [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA +2071 ; Other_Lowercase # Lm SUPERSCRIPT LATIN SMALL LETTER I +207F ; Other_Lowercase # Lm SUPERSCRIPT LATIN SMALL LETTER N +2090..209C ; Other_Lowercase # Lm [13] LATIN SUBSCRIPT SMALL LETTER A..LATIN SUBSCRIPT SMALL LETTER T +2170..217F ; Other_Lowercase # Nl [16] SMALL ROMAN NUMERAL ONE..SMALL ROMAN NUMERAL ONE THOUSAND +24D0..24E9 ; Other_Lowercase # So [26] CIRCLED LATIN SMALL LETTER A..CIRCLED LATIN SMALL LETTER Z +2C7C..2C7D ; Other_Lowercase # Lm [2] LATIN SUBSCRIPT SMALL LETTER J..MODIFIER LETTER CAPITAL V +A69C..A69D ; Other_Lowercase # Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN..MODIFIER LETTER CYRILLIC SOFT SIGN +A770 ; Other_Lowercase # Lm MODIFIER LETTER US +A7F8..A7F9 ; Other_Lowercase # Lm [2] MODIFIER LETTER CAPITAL H WITH STROKE..MODIFIER LETTER SMALL LIGATURE OE +AB5C..AB5F ; Other_Lowercase # Lm [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK + +# Total code points: 189 + +# ================================================ + +2160..216F ; Other_Uppercase # Nl [16] ROMAN NUMERAL ONE..ROMAN NUMERAL ONE THOUSAND +24B6..24CF ; Other_Uppercase # So [26] CIRCLED LATIN CAPITAL LETTER A..CIRCLED LATIN CAPITAL LETTER Z +1F130..1F149 ; Other_Uppercase # So [26] SQUARED LATIN CAPITAL LETTER A..SQUARED LATIN CAPITAL LETTER Z +1F150..1F169 ; Other_Uppercase # So [26] NEGATIVE CIRCLED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z +1F170..1F189 ; Other_Uppercase # So [26] NEGATIVE SQUARED LATIN CAPITAL LETTER A..NEGATIVE SQUARED LATIN CAPITAL LETTER Z + +# Total code points: 120 + +# ================================================ + +FDD0..FDEF ; Noncharacter_Code_Point # Cn [32] .. +FFFE..FFFF ; Noncharacter_Code_Point # Cn [2] .. +1FFFE..1FFFF ; Noncharacter_Code_Point # Cn [2] .. +2FFFE..2FFFF ; Noncharacter_Code_Point # Cn [2] .. +3FFFE..3FFFF ; Noncharacter_Code_Point # Cn [2] .. +4FFFE..4FFFF ; Noncharacter_Code_Point # Cn [2] .. +5FFFE..5FFFF ; Noncharacter_Code_Point # Cn [2] .. +6FFFE..6FFFF ; Noncharacter_Code_Point # Cn [2] .. +7FFFE..7FFFF ; Noncharacter_Code_Point # Cn [2] .. +8FFFE..8FFFF ; Noncharacter_Code_Point # Cn [2] .. +9FFFE..9FFFF ; Noncharacter_Code_Point # Cn [2] .. +AFFFE..AFFFF ; Noncharacter_Code_Point # Cn [2] .. +BFFFE..BFFFF ; Noncharacter_Code_Point # Cn [2] .. +CFFFE..CFFFF ; Noncharacter_Code_Point # Cn [2] .. +DFFFE..DFFFF ; Noncharacter_Code_Point # Cn [2] .. +EFFFE..EFFFF ; Noncharacter_Code_Point # Cn [2] .. +FFFFE..FFFFF ; Noncharacter_Code_Point # Cn [2] .. +10FFFE..10FFFF; Noncharacter_Code_Point # Cn [2] .. + +# Total code points: 66 + +# ================================================ + +09BE ; Other_Grapheme_Extend # Mc BENGALI VOWEL SIGN AA +09D7 ; Other_Grapheme_Extend # Mc BENGALI AU LENGTH MARK +0B3E ; Other_Grapheme_Extend # Mc ORIYA VOWEL SIGN AA +0B57 ; Other_Grapheme_Extend # Mc ORIYA AU LENGTH MARK +0BBE ; Other_Grapheme_Extend # Mc TAMIL VOWEL SIGN AA +0BD7 ; Other_Grapheme_Extend # Mc TAMIL AU LENGTH MARK +0CC2 ; Other_Grapheme_Extend # Mc KANNADA VOWEL SIGN UU +0CD5..0CD6 ; Other_Grapheme_Extend # Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK +0D3E ; Other_Grapheme_Extend # Mc MALAYALAM VOWEL SIGN AA +0D57 ; Other_Grapheme_Extend # Mc MALAYALAM AU LENGTH MARK +0DCF ; Other_Grapheme_Extend # Mc SINHALA VOWEL SIGN AELA-PILLA +0DDF ; Other_Grapheme_Extend # Mc SINHALA VOWEL SIGN GAYANUKITTA +200C ; Other_Grapheme_Extend # Cf ZERO WIDTH NON-JOINER +302E..302F ; Other_Grapheme_Extend # Mc [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK +FF9E..FF9F ; Other_Grapheme_Extend # Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK +1133E ; Other_Grapheme_Extend # Mc GRANTHA VOWEL SIGN AA +11357 ; Other_Grapheme_Extend # Mc GRANTHA AU LENGTH MARK +114B0 ; Other_Grapheme_Extend # Mc TIRHUTA VOWEL SIGN AA +114BD ; Other_Grapheme_Extend # Mc TIRHUTA VOWEL SIGN SHORT O +115AF ; Other_Grapheme_Extend # Mc SIDDHAM VOWEL SIGN AA +1D165 ; Other_Grapheme_Extend # Mc MUSICAL SYMBOL COMBINING STEM +1D16E..1D172 ; Other_Grapheme_Extend # Mc [5] MUSICAL SYMBOL COMBINING FLAG-1..MUSICAL SYMBOL COMBINING FLAG-5 +E0020..E007F ; Other_Grapheme_Extend # Cf [96] TAG SPACE..CANCEL TAG + +# Total code points: 125 + +# ================================================ + +2FF0..2FF1 ; IDS_Binary_Operator # So [2] IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER ABOVE TO BELOW +2FF4..2FFB ; IDS_Binary_Operator # So [8] IDEOGRAPHIC DESCRIPTION CHARACTER FULL SURROUND..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID + +# Total code points: 10 + +# ================================================ + +2FF2..2FF3 ; IDS_Trinary_Operator # So [2] IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO MIDDLE AND RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER ABOVE TO MIDDLE AND BELOW + +# Total code points: 2 + +# ================================================ + +2E80..2E99 ; Radical # So [26] CJK RADICAL REPEAT..CJK RADICAL RAP +2E9B..2EF3 ; Radical # So [89] CJK RADICAL CHOKE..CJK RADICAL C-SIMPLIFIED TURTLE +2F00..2FD5 ; Radical # So [214] KANGXI RADICAL ONE..KANGXI RADICAL FLUTE + +# Total code points: 329 + +# ================================================ + +3400..4DB5 ; Unified_Ideograph # Lo [6582] CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DB5 +4E00..9FD5 ; Unified_Ideograph # Lo [20950] CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FD5 +FA0E..FA0F ; Unified_Ideograph # Lo [2] CJK COMPATIBILITY IDEOGRAPH-FA0E..CJK COMPATIBILITY IDEOGRAPH-FA0F +FA11 ; Unified_Ideograph # Lo CJK COMPATIBILITY IDEOGRAPH-FA11 +FA13..FA14 ; Unified_Ideograph # Lo [2] CJK COMPATIBILITY IDEOGRAPH-FA13..CJK COMPATIBILITY IDEOGRAPH-FA14 +FA1F ; Unified_Ideograph # Lo CJK COMPATIBILITY IDEOGRAPH-FA1F +FA21 ; Unified_Ideograph # Lo CJK COMPATIBILITY IDEOGRAPH-FA21 +FA23..FA24 ; Unified_Ideograph # Lo [2] CJK COMPATIBILITY IDEOGRAPH-FA23..CJK COMPATIBILITY IDEOGRAPH-FA24 +FA27..FA29 ; Unified_Ideograph # Lo [3] CJK COMPATIBILITY IDEOGRAPH-FA27..CJK COMPATIBILITY IDEOGRAPH-FA29 +20000..2A6D6 ; Unified_Ideograph # Lo [42711] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6D6 +2A700..2B734 ; Unified_Ideograph # Lo [4149] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B734 +2B740..2B81D ; Unified_Ideograph # Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D +2B820..2CEA1 ; Unified_Ideograph # Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1 + +# Total code points: 80388 + +# ================================================ + +034F ; Other_Default_Ignorable_Code_Point # Mn COMBINING GRAPHEME JOINER +115F..1160 ; Other_Default_Ignorable_Code_Point # Lo [2] HANGUL CHOSEONG FILLER..HANGUL JUNGSEONG FILLER +17B4..17B5 ; Other_Default_Ignorable_Code_Point # Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA +2065 ; Other_Default_Ignorable_Code_Point # Cn +3164 ; Other_Default_Ignorable_Code_Point # Lo HANGUL FILLER +FFA0 ; Other_Default_Ignorable_Code_Point # Lo HALFWIDTH HANGUL FILLER +FFF0..FFF8 ; Other_Default_Ignorable_Code_Point # Cn [9] .. +E0000 ; Other_Default_Ignorable_Code_Point # Cn +E0002..E001F ; Other_Default_Ignorable_Code_Point # Cn [30] .. +E0080..E00FF ; Other_Default_Ignorable_Code_Point # Cn [128] .. +E01F0..E0FFF ; Other_Default_Ignorable_Code_Point # Cn [3600] .. + +# Total code points: 3776 + +# ================================================ + +0149 ; Deprecated # L& LATIN SMALL LETTER N PRECEDED BY APOSTROPHE +0673 ; Deprecated # Lo ARABIC LETTER ALEF WITH WAVY HAMZA BELOW +0F77 ; Deprecated # Mn TIBETAN VOWEL SIGN VOCALIC RR +0F79 ; Deprecated # Mn TIBETAN VOWEL SIGN VOCALIC LL +17A3..17A4 ; Deprecated # Lo [2] KHMER INDEPENDENT VOWEL QAQ..KHMER INDEPENDENT VOWEL QAA +206A..206F ; Deprecated # Cf [6] INHIBIT SYMMETRIC SWAPPING..NOMINAL DIGIT SHAPES +2329 ; Deprecated # Ps LEFT-POINTING ANGLE BRACKET +232A ; Deprecated # Pe RIGHT-POINTING ANGLE BRACKET +E0001 ; Deprecated # Cf LANGUAGE TAG + +# Total code points: 15 + +# ================================================ + +0069..006A ; Soft_Dotted # L& [2] LATIN SMALL LETTER I..LATIN SMALL LETTER J +012F ; Soft_Dotted # L& LATIN SMALL LETTER I WITH OGONEK +0249 ; Soft_Dotted # L& LATIN SMALL LETTER J WITH STROKE +0268 ; Soft_Dotted # L& LATIN SMALL LETTER I WITH STROKE +029D ; Soft_Dotted # L& LATIN SMALL LETTER J WITH CROSSED-TAIL +02B2 ; Soft_Dotted # Lm MODIFIER LETTER SMALL J +03F3 ; Soft_Dotted # L& GREEK LETTER YOT +0456 ; Soft_Dotted # L& CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I +0458 ; Soft_Dotted # L& CYRILLIC SMALL LETTER JE +1D62 ; Soft_Dotted # Lm LATIN SUBSCRIPT SMALL LETTER I +1D96 ; Soft_Dotted # L& LATIN SMALL LETTER I WITH RETROFLEX HOOK +1DA4 ; Soft_Dotted # Lm MODIFIER LETTER SMALL I WITH STROKE +1DA8 ; Soft_Dotted # Lm MODIFIER LETTER SMALL J WITH CROSSED-TAIL +1E2D ; Soft_Dotted # L& LATIN SMALL LETTER I WITH TILDE BELOW +1ECB ; Soft_Dotted # L& LATIN SMALL LETTER I WITH DOT BELOW +2071 ; Soft_Dotted # Lm SUPERSCRIPT LATIN SMALL LETTER I +2148..2149 ; Soft_Dotted # L& [2] DOUBLE-STRUCK ITALIC SMALL I..DOUBLE-STRUCK ITALIC SMALL J +2C7C ; Soft_Dotted # Lm LATIN SUBSCRIPT SMALL LETTER J +1D422..1D423 ; Soft_Dotted # L& [2] MATHEMATICAL BOLD SMALL I..MATHEMATICAL BOLD SMALL J +1D456..1D457 ; Soft_Dotted # L& [2] MATHEMATICAL ITALIC SMALL I..MATHEMATICAL ITALIC SMALL J +1D48A..1D48B ; Soft_Dotted # L& [2] MATHEMATICAL BOLD ITALIC SMALL I..MATHEMATICAL BOLD ITALIC SMALL J +1D4BE..1D4BF ; Soft_Dotted # L& [2] MATHEMATICAL SCRIPT SMALL I..MATHEMATICAL SCRIPT SMALL J +1D4F2..1D4F3 ; Soft_Dotted # L& [2] MATHEMATICAL BOLD SCRIPT SMALL I..MATHEMATICAL BOLD SCRIPT SMALL J +1D526..1D527 ; Soft_Dotted # L& [2] MATHEMATICAL FRAKTUR SMALL I..MATHEMATICAL FRAKTUR SMALL J +1D55A..1D55B ; Soft_Dotted # L& [2] MATHEMATICAL DOUBLE-STRUCK SMALL I..MATHEMATICAL DOUBLE-STRUCK SMALL J +1D58E..1D58F ; Soft_Dotted # L& [2] MATHEMATICAL BOLD FRAKTUR SMALL I..MATHEMATICAL BOLD FRAKTUR SMALL J +1D5C2..1D5C3 ; Soft_Dotted # L& [2] MATHEMATICAL SANS-SERIF SMALL I..MATHEMATICAL SANS-SERIF SMALL J +1D5F6..1D5F7 ; Soft_Dotted # L& [2] MATHEMATICAL SANS-SERIF BOLD SMALL I..MATHEMATICAL SANS-SERIF BOLD SMALL J +1D62A..1D62B ; Soft_Dotted # L& [2] MATHEMATICAL SANS-SERIF ITALIC SMALL I..MATHEMATICAL SANS-SERIF ITALIC SMALL J +1D65E..1D65F ; Soft_Dotted # L& [2] MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL I..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL J +1D692..1D693 ; Soft_Dotted # L& [2] MATHEMATICAL MONOSPACE SMALL I..MATHEMATICAL MONOSPACE SMALL J + +# Total code points: 46 + +# ================================================ + +0E40..0E44 ; Logical_Order_Exception # Lo [5] THAI CHARACTER SARA E..THAI CHARACTER SARA AI MAIMALAI +0EC0..0EC4 ; Logical_Order_Exception # Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI +19B5..19B7 ; Logical_Order_Exception # Lo [3] NEW TAI LUE VOWEL SIGN E..NEW TAI LUE VOWEL SIGN O +19BA ; Logical_Order_Exception # Lo NEW TAI LUE VOWEL SIGN AY +AAB5..AAB6 ; Logical_Order_Exception # Lo [2] TAI VIET VOWEL E..TAI VIET VOWEL O +AAB9 ; Logical_Order_Exception # Lo TAI VIET VOWEL UEA +AABB..AABC ; Logical_Order_Exception # Lo [2] TAI VIET VOWEL AUE..TAI VIET VOWEL AY + +# Total code points: 19 + +# ================================================ + +1885..1886 ; Other_ID_Start # Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA +2118 ; Other_ID_Start # Sm SCRIPT CAPITAL P +212E ; Other_ID_Start # So ESTIMATED SYMBOL +309B..309C ; Other_ID_Start # Sk [2] KATAKANA-HIRAGANA VOICED SOUND MARK..KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK + +# Total code points: 6 + +# ================================================ + +00B7 ; Other_ID_Continue # Po MIDDLE DOT +0387 ; Other_ID_Continue # Po GREEK ANO TELEIA +1369..1371 ; Other_ID_Continue # No [9] ETHIOPIC DIGIT ONE..ETHIOPIC DIGIT NINE +19DA ; Other_ID_Continue # No NEW TAI LUE THAM DIGIT ONE + +# Total code points: 12 + +# ================================================ + +0021 ; Sentence_Terminal # Po EXCLAMATION MARK +002E ; Sentence_Terminal # Po FULL STOP +003F ; Sentence_Terminal # Po QUESTION MARK +0589 ; Sentence_Terminal # Po ARMENIAN FULL STOP +061F ; Sentence_Terminal # Po ARABIC QUESTION MARK +06D4 ; Sentence_Terminal # Po ARABIC FULL STOP +0700..0702 ; Sentence_Terminal # Po [3] SYRIAC END OF PARAGRAPH..SYRIAC SUBLINEAR FULL STOP +07F9 ; Sentence_Terminal # Po NKO EXCLAMATION MARK +0964..0965 ; Sentence_Terminal # Po [2] DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA +104A..104B ; Sentence_Terminal # Po [2] MYANMAR SIGN LITTLE SECTION..MYANMAR SIGN SECTION +1362 ; Sentence_Terminal # Po ETHIOPIC FULL STOP +1367..1368 ; Sentence_Terminal # Po [2] ETHIOPIC QUESTION MARK..ETHIOPIC PARAGRAPH SEPARATOR +166E ; Sentence_Terminal # Po CANADIAN SYLLABICS FULL STOP +1735..1736 ; Sentence_Terminal # Po [2] PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION +1803 ; Sentence_Terminal # Po MONGOLIAN FULL STOP +1809 ; Sentence_Terminal # Po MONGOLIAN MANCHU FULL STOP +1944..1945 ; Sentence_Terminal # Po [2] LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK +1AA8..1AAB ; Sentence_Terminal # Po [4] TAI THAM SIGN KAAN..TAI THAM SIGN SATKAANKUU +1B5A..1B5B ; Sentence_Terminal # Po [2] BALINESE PANTI..BALINESE PAMADA +1B5E..1B5F ; Sentence_Terminal # Po [2] BALINESE CARIK SIKI..BALINESE CARIK PAREREN +1C3B..1C3C ; Sentence_Terminal # Po [2] LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION NYET THYOOM TA-ROL +1C7E..1C7F ; Sentence_Terminal # Po [2] OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD +203C..203D ; Sentence_Terminal # Po [2] DOUBLE EXCLAMATION MARK..INTERROBANG +2047..2049 ; Sentence_Terminal # Po [3] DOUBLE QUESTION MARK..EXCLAMATION QUESTION MARK +2E2E ; Sentence_Terminal # Po REVERSED QUESTION MARK +2E3C ; Sentence_Terminal # Po STENOGRAPHIC FULL STOP +3002 ; Sentence_Terminal # Po IDEOGRAPHIC FULL STOP +A4FF ; Sentence_Terminal # Po LISU PUNCTUATION FULL STOP +A60E..A60F ; Sentence_Terminal # Po [2] VAI FULL STOP..VAI QUESTION MARK +A6F3 ; Sentence_Terminal # Po BAMUM FULL STOP +A6F7 ; Sentence_Terminal # Po BAMUM QUESTION MARK +A876..A877 ; Sentence_Terminal # Po [2] PHAGS-PA MARK SHAD..PHAGS-PA MARK DOUBLE SHAD +A8CE..A8CF ; Sentence_Terminal # Po [2] SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA +A92F ; Sentence_Terminal # Po KAYAH LI SIGN SHYA +A9C8..A9C9 ; Sentence_Terminal # Po [2] JAVANESE PADA LINGSA..JAVANESE PADA LUNGSI +AA5D..AA5F ; Sentence_Terminal # Po [3] CHAM PUNCTUATION DANDA..CHAM PUNCTUATION TRIPLE DANDA +AAF0..AAF1 ; Sentence_Terminal # Po [2] MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM +ABEB ; Sentence_Terminal # Po MEETEI MAYEK CHEIKHEI +FE52 ; Sentence_Terminal # Po SMALL FULL STOP +FE56..FE57 ; Sentence_Terminal # Po [2] SMALL QUESTION MARK..SMALL EXCLAMATION MARK +FF01 ; Sentence_Terminal # Po FULLWIDTH EXCLAMATION MARK +FF0E ; Sentence_Terminal # Po FULLWIDTH FULL STOP +FF1F ; Sentence_Terminal # Po FULLWIDTH QUESTION MARK +FF61 ; Sentence_Terminal # Po HALFWIDTH IDEOGRAPHIC FULL STOP +10A56..10A57 ; Sentence_Terminal # Po [2] KHAROSHTHI PUNCTUATION DANDA..KHAROSHTHI PUNCTUATION DOUBLE DANDA +11047..11048 ; Sentence_Terminal # Po [2] BRAHMI DANDA..BRAHMI DOUBLE DANDA +110BE..110C1 ; Sentence_Terminal # Po [4] KAITHI SECTION MARK..KAITHI DOUBLE DANDA +11141..11143 ; Sentence_Terminal # Po [3] CHAKMA DANDA..CHAKMA QUESTION MARK +111C5..111C6 ; Sentence_Terminal # Po [2] SHARADA DANDA..SHARADA DOUBLE DANDA +111CD ; Sentence_Terminal # Po SHARADA SUTRA MARK +111DE..111DF ; Sentence_Terminal # Po [2] SHARADA SECTION MARK-1..SHARADA SECTION MARK-2 +11238..11239 ; Sentence_Terminal # Po [2] KHOJKI DANDA..KHOJKI DOUBLE DANDA +1123B..1123C ; Sentence_Terminal # Po [2] KHOJKI SECTION MARK..KHOJKI DOUBLE SECTION MARK +112A9 ; Sentence_Terminal # Po MULTANI SECTION MARK +1144B..1144C ; Sentence_Terminal # Po [2] NEWA DANDA..NEWA DOUBLE DANDA +115C2..115C3 ; Sentence_Terminal # Po [2] SIDDHAM DANDA..SIDDHAM DOUBLE DANDA +115C9..115D7 ; Sentence_Terminal # Po [15] SIDDHAM END OF TEXT MARK..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES +11641..11642 ; Sentence_Terminal # Po [2] MODI DANDA..MODI DOUBLE DANDA +1173C..1173E ; Sentence_Terminal # Po [3] AHOM SIGN SMALL SECTION..AHOM SIGN RULAI +11C41..11C42 ; Sentence_Terminal # Po [2] BHAIKSUKI DANDA..BHAIKSUKI DOUBLE DANDA +16A6E..16A6F ; Sentence_Terminal # Po [2] MRO DANDA..MRO DOUBLE DANDA +16AF5 ; Sentence_Terminal # Po BASSA VAH FULL STOP +16B37..16B38 ; Sentence_Terminal # Po [2] PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN VOS TSHAB CEEB +16B44 ; Sentence_Terminal # Po PAHAWH HMONG SIGN XAUS +1BC9F ; Sentence_Terminal # Po DUPLOYAN PUNCTUATION CHINOOK FULL STOP +1DA88 ; Sentence_Terminal # Po SIGNWRITING FULL STOP + +# Total code points: 124 + +# ================================================ + +180B..180D ; Variation_Selector # Mn [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE +FE00..FE0F ; Variation_Selector # Mn [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16 +E0100..E01EF ; Variation_Selector # Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256 + +# Total code points: 259 + +# ================================================ + +0009..000D ; Pattern_White_Space # Cc [5] .. +0020 ; Pattern_White_Space # Zs SPACE +0085 ; Pattern_White_Space # Cc +200E..200F ; Pattern_White_Space # Cf [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK +2028 ; Pattern_White_Space # Zl LINE SEPARATOR +2029 ; Pattern_White_Space # Zp PARAGRAPH SEPARATOR + +# Total code points: 11 + +# ================================================ + +0021..0023 ; Pattern_Syntax # Po [3] EXCLAMATION MARK..NUMBER SIGN +0024 ; Pattern_Syntax # Sc DOLLAR SIGN +0025..0027 ; Pattern_Syntax # Po [3] PERCENT SIGN..APOSTROPHE +0028 ; Pattern_Syntax # Ps LEFT PARENTHESIS +0029 ; Pattern_Syntax # Pe RIGHT PARENTHESIS +002A ; Pattern_Syntax # Po ASTERISK +002B ; Pattern_Syntax # Sm PLUS SIGN +002C ; Pattern_Syntax # Po COMMA +002D ; Pattern_Syntax # Pd HYPHEN-MINUS +002E..002F ; Pattern_Syntax # Po [2] FULL STOP..SOLIDUS +003A..003B ; Pattern_Syntax # Po [2] COLON..SEMICOLON +003C..003E ; Pattern_Syntax # Sm [3] LESS-THAN SIGN..GREATER-THAN SIGN +003F..0040 ; Pattern_Syntax # Po [2] QUESTION MARK..COMMERCIAL AT +005B ; Pattern_Syntax # Ps LEFT SQUARE BRACKET +005C ; Pattern_Syntax # Po REVERSE SOLIDUS +005D ; Pattern_Syntax # Pe RIGHT SQUARE BRACKET +005E ; Pattern_Syntax # Sk CIRCUMFLEX ACCENT +0060 ; Pattern_Syntax # Sk GRAVE ACCENT +007B ; Pattern_Syntax # Ps LEFT CURLY BRACKET +007C ; Pattern_Syntax # Sm VERTICAL LINE +007D ; Pattern_Syntax # Pe RIGHT CURLY BRACKET +007E ; Pattern_Syntax # Sm TILDE +00A1 ; Pattern_Syntax # Po INVERTED EXCLAMATION MARK +00A2..00A5 ; Pattern_Syntax # Sc [4] CENT SIGN..YEN SIGN +00A6 ; Pattern_Syntax # So BROKEN BAR +00A7 ; Pattern_Syntax # Po SECTION SIGN +00A9 ; Pattern_Syntax # So COPYRIGHT SIGN +00AB ; Pattern_Syntax # Pi LEFT-POINTING DOUBLE ANGLE QUOTATION MARK +00AC ; Pattern_Syntax # Sm NOT SIGN +00AE ; Pattern_Syntax # So REGISTERED SIGN +00B0 ; Pattern_Syntax # So DEGREE SIGN +00B1 ; Pattern_Syntax # Sm PLUS-MINUS SIGN +00B6 ; Pattern_Syntax # Po PILCROW SIGN +00BB ; Pattern_Syntax # Pf RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK +00BF ; Pattern_Syntax # Po INVERTED QUESTION MARK +00D7 ; Pattern_Syntax # Sm MULTIPLICATION SIGN +00F7 ; Pattern_Syntax # Sm DIVISION SIGN +2010..2015 ; Pattern_Syntax # Pd [6] HYPHEN..HORIZONTAL BAR +2016..2017 ; Pattern_Syntax # Po [2] DOUBLE VERTICAL LINE..DOUBLE LOW LINE +2018 ; Pattern_Syntax # Pi LEFT SINGLE QUOTATION MARK +2019 ; Pattern_Syntax # Pf RIGHT SINGLE QUOTATION MARK +201A ; Pattern_Syntax # Ps SINGLE LOW-9 QUOTATION MARK +201B..201C ; Pattern_Syntax # Pi [2] SINGLE HIGH-REVERSED-9 QUOTATION MARK..LEFT DOUBLE QUOTATION MARK +201D ; Pattern_Syntax # Pf RIGHT DOUBLE QUOTATION MARK +201E ; Pattern_Syntax # Ps DOUBLE LOW-9 QUOTATION MARK +201F ; Pattern_Syntax # Pi DOUBLE HIGH-REVERSED-9 QUOTATION MARK +2020..2027 ; Pattern_Syntax # Po [8] DAGGER..HYPHENATION POINT +2030..2038 ; Pattern_Syntax # Po [9] PER MILLE SIGN..CARET +2039 ; Pattern_Syntax # Pi SINGLE LEFT-POINTING ANGLE QUOTATION MARK +203A ; Pattern_Syntax # Pf SINGLE RIGHT-POINTING ANGLE QUOTATION MARK +203B..203E ; Pattern_Syntax # Po [4] REFERENCE MARK..OVERLINE +2041..2043 ; Pattern_Syntax # Po [3] CARET INSERTION POINT..HYPHEN BULLET +2044 ; Pattern_Syntax # Sm FRACTION SLASH +2045 ; Pattern_Syntax # Ps LEFT SQUARE BRACKET WITH QUILL +2046 ; Pattern_Syntax # Pe RIGHT SQUARE BRACKET WITH QUILL +2047..2051 ; Pattern_Syntax # Po [11] DOUBLE QUESTION MARK..TWO ASTERISKS ALIGNED VERTICALLY +2052 ; Pattern_Syntax # Sm COMMERCIAL MINUS SIGN +2053 ; Pattern_Syntax # Po SWUNG DASH +2055..205E ; Pattern_Syntax # Po [10] FLOWER PUNCTUATION MARK..VERTICAL FOUR DOTS +2190..2194 ; Pattern_Syntax # Sm [5] LEFTWARDS ARROW..LEFT RIGHT ARROW +2195..2199 ; Pattern_Syntax # So [5] UP DOWN ARROW..SOUTH WEST ARROW +219A..219B ; Pattern_Syntax # Sm [2] LEFTWARDS ARROW WITH STROKE..RIGHTWARDS ARROW WITH STROKE +219C..219F ; Pattern_Syntax # So [4] LEFTWARDS WAVE ARROW..UPWARDS TWO HEADED ARROW +21A0 ; Pattern_Syntax # Sm RIGHTWARDS TWO HEADED ARROW +21A1..21A2 ; Pattern_Syntax # So [2] DOWNWARDS TWO HEADED ARROW..LEFTWARDS ARROW WITH TAIL +21A3 ; Pattern_Syntax # Sm RIGHTWARDS ARROW WITH TAIL +21A4..21A5 ; Pattern_Syntax # So [2] LEFTWARDS ARROW FROM BAR..UPWARDS ARROW FROM BAR +21A6 ; Pattern_Syntax # Sm RIGHTWARDS ARROW FROM BAR +21A7..21AD ; Pattern_Syntax # So [7] DOWNWARDS ARROW FROM BAR..LEFT RIGHT WAVE ARROW +21AE ; Pattern_Syntax # Sm LEFT RIGHT ARROW WITH STROKE +21AF..21CD ; Pattern_Syntax # So [31] DOWNWARDS ZIGZAG ARROW..LEFTWARDS DOUBLE ARROW WITH STROKE +21CE..21CF ; Pattern_Syntax # Sm [2] LEFT RIGHT DOUBLE ARROW WITH STROKE..RIGHTWARDS DOUBLE ARROW WITH STROKE +21D0..21D1 ; Pattern_Syntax # So [2] LEFTWARDS DOUBLE ARROW..UPWARDS DOUBLE ARROW +21D2 ; Pattern_Syntax # Sm RIGHTWARDS DOUBLE ARROW +21D3 ; Pattern_Syntax # So DOWNWARDS DOUBLE ARROW +21D4 ; Pattern_Syntax # Sm LEFT RIGHT DOUBLE ARROW +21D5..21F3 ; Pattern_Syntax # So [31] UP DOWN DOUBLE ARROW..UP DOWN WHITE ARROW +21F4..22FF ; Pattern_Syntax # Sm [268] RIGHT ARROW WITH SMALL CIRCLE..Z NOTATION BAG MEMBERSHIP +2300..2307 ; Pattern_Syntax # So [8] DIAMETER SIGN..WAVY LINE +2308 ; Pattern_Syntax # Ps LEFT CEILING +2309 ; Pattern_Syntax # Pe RIGHT CEILING +230A ; Pattern_Syntax # Ps LEFT FLOOR +230B ; Pattern_Syntax # Pe RIGHT FLOOR +230C..231F ; Pattern_Syntax # So [20] BOTTOM RIGHT CROP..BOTTOM RIGHT CORNER +2320..2321 ; Pattern_Syntax # Sm [2] TOP HALF INTEGRAL..BOTTOM HALF INTEGRAL +2322..2328 ; Pattern_Syntax # So [7] FROWN..KEYBOARD +2329 ; Pattern_Syntax # Ps LEFT-POINTING ANGLE BRACKET +232A ; Pattern_Syntax # Pe RIGHT-POINTING ANGLE BRACKET +232B..237B ; Pattern_Syntax # So [81] ERASE TO THE LEFT..NOT CHECK MARK +237C ; Pattern_Syntax # Sm RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW +237D..239A ; Pattern_Syntax # So [30] SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL +239B..23B3 ; Pattern_Syntax # Sm [25] LEFT PARENTHESIS UPPER HOOK..SUMMATION BOTTOM +23B4..23DB ; Pattern_Syntax # So [40] TOP SQUARE BRACKET..FUSE +23DC..23E1 ; Pattern_Syntax # Sm [6] TOP PARENTHESIS..BOTTOM TORTOISE SHELL BRACKET +23E2..23FE ; Pattern_Syntax # So [29] WHITE TRAPEZIUM..POWER SLEEP SYMBOL +23FF ; Pattern_Syntax # Cn +2400..2426 ; Pattern_Syntax # So [39] SYMBOL FOR NULL..SYMBOL FOR SUBSTITUTE FORM TWO +2427..243F ; Pattern_Syntax # Cn [25] .. +2440..244A ; Pattern_Syntax # So [11] OCR HOOK..OCR DOUBLE BACKSLASH +244B..245F ; Pattern_Syntax # Cn [21] .. +2500..25B6 ; Pattern_Syntax # So [183] BOX DRAWINGS LIGHT HORIZONTAL..BLACK RIGHT-POINTING TRIANGLE +25B7 ; Pattern_Syntax # Sm WHITE RIGHT-POINTING TRIANGLE +25B8..25C0 ; Pattern_Syntax # So [9] BLACK RIGHT-POINTING SMALL TRIANGLE..BLACK LEFT-POINTING TRIANGLE +25C1 ; Pattern_Syntax # Sm WHITE LEFT-POINTING TRIANGLE +25C2..25F7 ; Pattern_Syntax # So [54] BLACK LEFT-POINTING SMALL TRIANGLE..WHITE CIRCLE WITH UPPER RIGHT QUADRANT +25F8..25FF ; Pattern_Syntax # Sm [8] UPPER LEFT TRIANGLE..LOWER RIGHT TRIANGLE +2600..266E ; Pattern_Syntax # So [111] BLACK SUN WITH RAYS..MUSIC NATURAL SIGN +266F ; Pattern_Syntax # Sm MUSIC SHARP SIGN +2670..2767 ; Pattern_Syntax # So [248] WEST SYRIAC CROSS..ROTATED FLORAL HEART BULLET +2768 ; Pattern_Syntax # Ps MEDIUM LEFT PARENTHESIS ORNAMENT +2769 ; Pattern_Syntax # Pe MEDIUM RIGHT PARENTHESIS ORNAMENT +276A ; Pattern_Syntax # Ps MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT +276B ; Pattern_Syntax # Pe MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT +276C ; Pattern_Syntax # Ps MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT +276D ; Pattern_Syntax # Pe MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT +276E ; Pattern_Syntax # Ps HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT +276F ; Pattern_Syntax # Pe HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT +2770 ; Pattern_Syntax # Ps HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT +2771 ; Pattern_Syntax # Pe HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT +2772 ; Pattern_Syntax # Ps LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT +2773 ; Pattern_Syntax # Pe LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT +2774 ; Pattern_Syntax # Ps MEDIUM LEFT CURLY BRACKET ORNAMENT +2775 ; Pattern_Syntax # Pe MEDIUM RIGHT CURLY BRACKET ORNAMENT +2794..27BF ; Pattern_Syntax # So [44] HEAVY WIDE-HEADED RIGHTWARDS ARROW..DOUBLE CURLY LOOP +27C0..27C4 ; Pattern_Syntax # Sm [5] THREE DIMENSIONAL ANGLE..OPEN SUPERSET +27C5 ; Pattern_Syntax # Ps LEFT S-SHAPED BAG DELIMITER +27C6 ; Pattern_Syntax # Pe RIGHT S-SHAPED BAG DELIMITER +27C7..27E5 ; Pattern_Syntax # Sm [31] OR WITH DOT INSIDE..WHITE SQUARE WITH RIGHTWARDS TICK +27E6 ; Pattern_Syntax # Ps MATHEMATICAL LEFT WHITE SQUARE BRACKET +27E7 ; Pattern_Syntax # Pe MATHEMATICAL RIGHT WHITE SQUARE BRACKET +27E8 ; Pattern_Syntax # Ps MATHEMATICAL LEFT ANGLE BRACKET +27E9 ; Pattern_Syntax # Pe MATHEMATICAL RIGHT ANGLE BRACKET +27EA ; Pattern_Syntax # Ps MATHEMATICAL LEFT DOUBLE ANGLE BRACKET +27EB ; Pattern_Syntax # Pe MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET +27EC ; Pattern_Syntax # Ps MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET +27ED ; Pattern_Syntax # Pe MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET +27EE ; Pattern_Syntax # Ps MATHEMATICAL LEFT FLATTENED PARENTHESIS +27EF ; Pattern_Syntax # Pe MATHEMATICAL RIGHT FLATTENED PARENTHESIS +27F0..27FF ; Pattern_Syntax # Sm [16] UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW +2800..28FF ; Pattern_Syntax # So [256] BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678 +2900..2982 ; Pattern_Syntax # Sm [131] RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..Z NOTATION TYPE COLON +2983 ; Pattern_Syntax # Ps LEFT WHITE CURLY BRACKET +2984 ; Pattern_Syntax # Pe RIGHT WHITE CURLY BRACKET +2985 ; Pattern_Syntax # Ps LEFT WHITE PARENTHESIS +2986 ; Pattern_Syntax # Pe RIGHT WHITE PARENTHESIS +2987 ; Pattern_Syntax # Ps Z NOTATION LEFT IMAGE BRACKET +2988 ; Pattern_Syntax # Pe Z NOTATION RIGHT IMAGE BRACKET +2989 ; Pattern_Syntax # Ps Z NOTATION LEFT BINDING BRACKET +298A ; Pattern_Syntax # Pe Z NOTATION RIGHT BINDING BRACKET +298B ; Pattern_Syntax # Ps LEFT SQUARE BRACKET WITH UNDERBAR +298C ; Pattern_Syntax # Pe RIGHT SQUARE BRACKET WITH UNDERBAR +298D ; Pattern_Syntax # Ps LEFT SQUARE BRACKET WITH TICK IN TOP CORNER +298E ; Pattern_Syntax # Pe RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER +298F ; Pattern_Syntax # Ps LEFT SQUARE BRACKET WITH TICK IN BOTTOM CORNER +2990 ; Pattern_Syntax # Pe RIGHT SQUARE BRACKET WITH TICK IN TOP CORNER +2991 ; Pattern_Syntax # Ps LEFT ANGLE BRACKET WITH DOT +2992 ; Pattern_Syntax # Pe RIGHT ANGLE BRACKET WITH DOT +2993 ; Pattern_Syntax # Ps LEFT ARC LESS-THAN BRACKET +2994 ; Pattern_Syntax # Pe RIGHT ARC GREATER-THAN BRACKET +2995 ; Pattern_Syntax # Ps DOUBLE LEFT ARC GREATER-THAN BRACKET +2996 ; Pattern_Syntax # Pe DOUBLE RIGHT ARC LESS-THAN BRACKET +2997 ; Pattern_Syntax # Ps LEFT BLACK TORTOISE SHELL BRACKET +2998 ; Pattern_Syntax # Pe RIGHT BLACK TORTOISE SHELL BRACKET +2999..29D7 ; Pattern_Syntax # Sm [63] DOTTED FENCE..BLACK HOURGLASS +29D8 ; Pattern_Syntax # Ps LEFT WIGGLY FENCE +29D9 ; Pattern_Syntax # Pe RIGHT WIGGLY FENCE +29DA ; Pattern_Syntax # Ps LEFT DOUBLE WIGGLY FENCE +29DB ; Pattern_Syntax # Pe RIGHT DOUBLE WIGGLY FENCE +29DC..29FB ; Pattern_Syntax # Sm [32] INCOMPLETE INFINITY..TRIPLE PLUS +29FC ; Pattern_Syntax # Ps LEFT-POINTING CURVED ANGLE BRACKET +29FD ; Pattern_Syntax # Pe RIGHT-POINTING CURVED ANGLE BRACKET +29FE..2AFF ; Pattern_Syntax # Sm [258] TINY..N-ARY WHITE VERTICAL BAR +2B00..2B2F ; Pattern_Syntax # So [48] NORTH EAST WHITE ARROW..WHITE VERTICAL ELLIPSE +2B30..2B44 ; Pattern_Syntax # Sm [21] LEFT ARROW WITH SMALL CIRCLE..RIGHTWARDS ARROW THROUGH SUPERSET +2B45..2B46 ; Pattern_Syntax # So [2] LEFTWARDS QUADRUPLE ARROW..RIGHTWARDS QUADRUPLE ARROW +2B47..2B4C ; Pattern_Syntax # Sm [6] REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR +2B4D..2B73 ; Pattern_Syntax # So [39] DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR +2B74..2B75 ; Pattern_Syntax # Cn [2] .. +2B76..2B95 ; Pattern_Syntax # So [32] NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW +2B96..2B97 ; Pattern_Syntax # Cn [2] .. +2B98..2BB9 ; Pattern_Syntax # So [34] THREE-D TOP-LIGHTED LEFTWARDS EQUILATERAL ARROWHEAD..UP ARROWHEAD IN A RECTANGLE BOX +2BBA..2BBC ; Pattern_Syntax # Cn [3] .. +2BBD..2BC8 ; Pattern_Syntax # So [12] BALLOT BOX WITH LIGHT X..BLACK MEDIUM RIGHT-POINTING TRIANGLE CENTRED +2BC9 ; Pattern_Syntax # Cn +2BCA..2BD1 ; Pattern_Syntax # So [8] TOP HALF BLACK CIRCLE..UNCERTAINTY SIGN +2BD2..2BEB ; Pattern_Syntax # Cn [26] .. +2BEC..2BEF ; Pattern_Syntax # So [4] LEFTWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS..DOWNWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS +2BF0..2BFF ; Pattern_Syntax # Cn [16] .. +2E00..2E01 ; Pattern_Syntax # Po [2] RIGHT ANGLE SUBSTITUTION MARKER..RIGHT ANGLE DOTTED SUBSTITUTION MARKER +2E02 ; Pattern_Syntax # Pi LEFT SUBSTITUTION BRACKET +2E03 ; Pattern_Syntax # Pf RIGHT SUBSTITUTION BRACKET +2E04 ; Pattern_Syntax # Pi LEFT DOTTED SUBSTITUTION BRACKET +2E05 ; Pattern_Syntax # Pf RIGHT DOTTED SUBSTITUTION BRACKET +2E06..2E08 ; Pattern_Syntax # Po [3] RAISED INTERPOLATION MARKER..DOTTED TRANSPOSITION MARKER +2E09 ; Pattern_Syntax # Pi LEFT TRANSPOSITION BRACKET +2E0A ; Pattern_Syntax # Pf RIGHT TRANSPOSITION BRACKET +2E0B ; Pattern_Syntax # Po RAISED SQUARE +2E0C ; Pattern_Syntax # Pi LEFT RAISED OMISSION BRACKET +2E0D ; Pattern_Syntax # Pf RIGHT RAISED OMISSION BRACKET +2E0E..2E16 ; Pattern_Syntax # Po [9] EDITORIAL CORONIS..DOTTED RIGHT-POINTING ANGLE +2E17 ; Pattern_Syntax # Pd DOUBLE OBLIQUE HYPHEN +2E18..2E19 ; Pattern_Syntax # Po [2] INVERTED INTERROBANG..PALM BRANCH +2E1A ; Pattern_Syntax # Pd HYPHEN WITH DIAERESIS +2E1B ; Pattern_Syntax # Po TILDE WITH RING ABOVE +2E1C ; Pattern_Syntax # Pi LEFT LOW PARAPHRASE BRACKET +2E1D ; Pattern_Syntax # Pf RIGHT LOW PARAPHRASE BRACKET +2E1E..2E1F ; Pattern_Syntax # Po [2] TILDE WITH DOT ABOVE..TILDE WITH DOT BELOW +2E20 ; Pattern_Syntax # Pi LEFT VERTICAL BAR WITH QUILL +2E21 ; Pattern_Syntax # Pf RIGHT VERTICAL BAR WITH QUILL +2E22 ; Pattern_Syntax # Ps TOP LEFT HALF BRACKET +2E23 ; Pattern_Syntax # Pe TOP RIGHT HALF BRACKET +2E24 ; Pattern_Syntax # Ps BOTTOM LEFT HALF BRACKET +2E25 ; Pattern_Syntax # Pe BOTTOM RIGHT HALF BRACKET +2E26 ; Pattern_Syntax # Ps LEFT SIDEWAYS U BRACKET +2E27 ; Pattern_Syntax # Pe RIGHT SIDEWAYS U BRACKET +2E28 ; Pattern_Syntax # Ps LEFT DOUBLE PARENTHESIS +2E29 ; Pattern_Syntax # Pe RIGHT DOUBLE PARENTHESIS +2E2A..2E2E ; Pattern_Syntax # Po [5] TWO DOTS OVER ONE DOT PUNCTUATION..REVERSED QUESTION MARK +2E2F ; Pattern_Syntax # Lm VERTICAL TILDE +2E30..2E39 ; Pattern_Syntax # Po [10] RING POINT..TOP HALF SECTION SIGN +2E3A..2E3B ; Pattern_Syntax # Pd [2] TWO-EM DASH..THREE-EM DASH +2E3C..2E3F ; Pattern_Syntax # Po [4] STENOGRAPHIC FULL STOP..CAPITULUM +2E40 ; Pattern_Syntax # Pd DOUBLE HYPHEN +2E41 ; Pattern_Syntax # Po REVERSED COMMA +2E42 ; Pattern_Syntax # Ps DOUBLE LOW-REVERSED-9 QUOTATION MARK +2E43..2E44 ; Pattern_Syntax # Po [2] DASH WITH LEFT UPTURN..DOUBLE SUSPENSION MARK +2E45..2E7F ; Pattern_Syntax # Cn [59] .. +3001..3003 ; Pattern_Syntax # Po [3] IDEOGRAPHIC COMMA..DITTO MARK +3008 ; Pattern_Syntax # Ps LEFT ANGLE BRACKET +3009 ; Pattern_Syntax # Pe RIGHT ANGLE BRACKET +300A ; Pattern_Syntax # Ps LEFT DOUBLE ANGLE BRACKET +300B ; Pattern_Syntax # Pe RIGHT DOUBLE ANGLE BRACKET +300C ; Pattern_Syntax # Ps LEFT CORNER BRACKET +300D ; Pattern_Syntax # Pe RIGHT CORNER BRACKET +300E ; Pattern_Syntax # Ps LEFT WHITE CORNER BRACKET +300F ; Pattern_Syntax # Pe RIGHT WHITE CORNER BRACKET +3010 ; Pattern_Syntax # Ps LEFT BLACK LENTICULAR BRACKET +3011 ; Pattern_Syntax # Pe RIGHT BLACK LENTICULAR BRACKET +3012..3013 ; Pattern_Syntax # So [2] POSTAL MARK..GETA MARK +3014 ; Pattern_Syntax # Ps LEFT TORTOISE SHELL BRACKET +3015 ; Pattern_Syntax # Pe RIGHT TORTOISE SHELL BRACKET +3016 ; Pattern_Syntax # Ps LEFT WHITE LENTICULAR BRACKET +3017 ; Pattern_Syntax # Pe RIGHT WHITE LENTICULAR BRACKET +3018 ; Pattern_Syntax # Ps LEFT WHITE TORTOISE SHELL BRACKET +3019 ; Pattern_Syntax # Pe RIGHT WHITE TORTOISE SHELL BRACKET +301A ; Pattern_Syntax # Ps LEFT WHITE SQUARE BRACKET +301B ; Pattern_Syntax # Pe RIGHT WHITE SQUARE BRACKET +301C ; Pattern_Syntax # Pd WAVE DASH +301D ; Pattern_Syntax # Ps REVERSED DOUBLE PRIME QUOTATION MARK +301E..301F ; Pattern_Syntax # Pe [2] DOUBLE PRIME QUOTATION MARK..LOW DOUBLE PRIME QUOTATION MARK +3020 ; Pattern_Syntax # So POSTAL MARK FACE +3030 ; Pattern_Syntax # Pd WAVY DASH +FD3E ; Pattern_Syntax # Pe ORNATE LEFT PARENTHESIS +FD3F ; Pattern_Syntax # Ps ORNATE RIGHT PARENTHESIS +FE45..FE46 ; Pattern_Syntax # Po [2] SESAME DOT..WHITE SESAME DOT + +# Total code points: 2760 + +# ================================================ + +0600..0605 ; Prepended_Concatenation_Mark # Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE +06DD ; Prepended_Concatenation_Mark # Cf ARABIC END OF AYAH +070F ; Prepended_Concatenation_Mark # Cf SYRIAC ABBREVIATION MARK +08E2 ; Prepended_Concatenation_Mark # Cf ARABIC DISPUTED END OF AYAH +110BD ; Prepended_Concatenation_Mark # Cf KAITHI NUMBER SIGN + +# Total code points: 10 + +# EOF \ No newline at end of file diff --git a/lib/elixir/unicode/UnicodeData.txt b/lib/elixir/unicode/UnicodeData.txt index 31c8a7eaa04..a756976461b 100644 --- a/lib/elixir/unicode/UnicodeData.txt +++ b/lib/elixir/unicode/UnicodeData.txt @@ -616,7 +616,7 @@ 0267;LATIN SMALL LETTER HENG WITH HOOK;Ll;0;L;;;;;N;LATIN SMALL LETTER HENG HOOK;;;; 0268;LATIN SMALL LETTER I WITH STROKE;Ll;0;L;;;;;N;LATIN SMALL LETTER BARRED I;;0197;;0197 0269;LATIN SMALL LETTER IOTA;Ll;0;L;;;;;N;;;0196;;0196 -026A;LATIN LETTER SMALL CAPITAL I;Ll;0;L;;;;;N;;;;; +026A;LATIN LETTER SMALL CAPITAL I;Ll;0;L;;;;;N;;;A7AE;;A7AE 026B;LATIN SMALL LETTER L WITH MIDDLE TILDE;Ll;0;L;;;;;N;;;2C62;;2C62 026C;LATIN SMALL LETTER L WITH BELT;Ll;0;L;;;;;N;LATIN SMALL LETTER L BELT;;A7AD;;A7AD 026D;LATIN SMALL LETTER L WITH RETROFLEX HOOK;Ll;0;L;;;;;N;LATIN SMALL LETTER L RETROFLEX HOOK;;;; @@ -667,7 +667,7 @@ 029A;LATIN SMALL LETTER CLOSED OPEN E;Ll;0;L;;;;;N;LATIN SMALL LETTER CLOSED EPSILON;;;; 029B;LATIN LETTER SMALL CAPITAL G WITH HOOK;Ll;0;L;;;;;N;LATIN LETTER SMALL CAPITAL G HOOK;;;; 029C;LATIN LETTER SMALL CAPITAL H;Ll;0;L;;;;;N;;;;; -029D;LATIN SMALL LETTER J WITH CROSSED-TAIL;Ll;0;L;;;;;N;LATIN SMALL LETTER CROSSED-TAIL J;;;; +029D;LATIN SMALL LETTER J WITH CROSSED-TAIL;Ll;0;L;;;;;N;LATIN SMALL LETTER CROSSED-TAIL J;;A7B2;;A7B2 029E;LATIN SMALL LETTER TURNED K;Ll;0;L;;;;;N;;;A7B0;;A7B0 029F;LATIN LETTER SMALL CAPITAL L;Ll;0;L;;;;;N;;;;; 02A0;LATIN SMALL LETTER Q WITH HOOK;Ll;0;L;;;;;N;LATIN SMALL LETTER Q HOOK;;;; @@ -2091,6 +2091,32 @@ 08B0;ARABIC LETTER GAF WITH INVERTED STROKE;Lo;0;AL;;;;;N;;;;; 08B1;ARABIC LETTER STRAIGHT WAW;Lo;0;AL;;;;;N;;;;; 08B2;ARABIC LETTER ZAIN WITH INVERTED V ABOVE;Lo;0;AL;;;;;N;;;;; +08B3;ARABIC LETTER AIN WITH THREE DOTS BELOW;Lo;0;AL;;;;;N;;;;; +08B4;ARABIC LETTER KAF WITH DOT BELOW;Lo;0;AL;;;;;N;;;;; +08B6;ARABIC LETTER BEH WITH SMALL MEEM ABOVE;Lo;0;AL;;;;;N;;;;; +08B7;ARABIC LETTER PEH WITH SMALL MEEM ABOVE;Lo;0;AL;;;;;N;;;;; +08B8;ARABIC LETTER TEH WITH SMALL TEH ABOVE;Lo;0;AL;;;;;N;;;;; +08B9;ARABIC LETTER REH WITH SMALL NOON ABOVE;Lo;0;AL;;;;;N;;;;; +08BA;ARABIC LETTER YEH WITH TWO DOTS BELOW AND SMALL NOON ABOVE;Lo;0;AL;;;;;N;;;;; +08BB;ARABIC LETTER AFRICAN FEH;Lo;0;AL;;;;;N;;;;; +08BC;ARABIC LETTER AFRICAN QAF;Lo;0;AL;;;;;N;;;;; +08BD;ARABIC LETTER AFRICAN NOON;Lo;0;AL;;;;;N;;;;; +08D4;ARABIC SMALL HIGH WORD AR-RUB;Mn;230;NSM;;;;;N;;;;; +08D5;ARABIC SMALL HIGH SAD;Mn;230;NSM;;;;;N;;;;; +08D6;ARABIC SMALL HIGH AIN;Mn;230;NSM;;;;;N;;;;; +08D7;ARABIC SMALL HIGH QAF;Mn;230;NSM;;;;;N;;;;; +08D8;ARABIC SMALL HIGH NOON WITH KASRA;Mn;230;NSM;;;;;N;;;;; +08D9;ARABIC SMALL LOW NOON WITH KASRA;Mn;230;NSM;;;;;N;;;;; +08DA;ARABIC SMALL HIGH WORD ATH-THALATHA;Mn;230;NSM;;;;;N;;;;; +08DB;ARABIC SMALL HIGH WORD AS-SAJDA;Mn;230;NSM;;;;;N;;;;; +08DC;ARABIC SMALL HIGH WORD AN-NISF;Mn;230;NSM;;;;;N;;;;; +08DD;ARABIC SMALL HIGH WORD SAKTA;Mn;230;NSM;;;;;N;;;;; +08DE;ARABIC SMALL HIGH WORD QIF;Mn;230;NSM;;;;;N;;;;; +08DF;ARABIC SMALL HIGH WORD WAQFA;Mn;230;NSM;;;;;N;;;;; +08E0;ARABIC SMALL HIGH FOOTNOTE MARKER;Mn;230;NSM;;;;;N;;;;; +08E1;ARABIC SMALL HIGH SIGN SAFHA;Mn;230;NSM;;;;;N;;;;; +08E2;ARABIC DISPUTED END OF AYAH;Cf;0;AN;;;;;N;;;;; +08E3;ARABIC TURNED DAMMA BELOW;Mn;220;NSM;;;;;N;;;;; 08E4;ARABIC CURLY FATHA;Mn;230;NSM;;;;;N;;;;; 08E5;ARABIC CURLY DAMMA;Mn;230;NSM;;;;;N;;;;; 08E6;ARABIC CURLY KASRA;Mn;220;NSM;;;;;N;;;;; @@ -2503,6 +2529,7 @@ 0AEF;GUJARATI DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; 0AF0;GUJARATI ABBREVIATION SIGN;Po;0;L;;;;;N;;;;; 0AF1;GUJARATI RUPEE SIGN;Sc;0;ET;;;;;N;;;;; +0AF9;GUJARATI LETTER ZHA;Lo;0;L;;;;;N;;;;; 0B01;ORIYA SIGN CANDRABINDU;Mn;0;NSM;;;;;N;;;;; 0B02;ORIYA SIGN ANUSVARA;Mc;0;L;;;;;N;;;;; 0B03;ORIYA SIGN VISARGA;Mc;0;L;;;;;N;;;;; @@ -2738,6 +2765,7 @@ 0C56;TELUGU AI LENGTH MARK;Mn;91;NSM;;;;;N;;;;; 0C58;TELUGU LETTER TSA;Lo;0;L;;;;;N;;;;; 0C59;TELUGU LETTER DZA;Lo;0;L;;;;;N;;;;; +0C5A;TELUGU LETTER RRRA;Lo;0;L;;;;;N;;;;; 0C60;TELUGU LETTER VOCALIC RR;Lo;0;L;;;;;N;;;;; 0C61;TELUGU LETTER VOCALIC LL;Lo;0;L;;;;;N;;;;; 0C62;TELUGU VOWEL SIGN VOCALIC L;Mn;0;NSM;;;;;N;;;;; @@ -2760,6 +2788,7 @@ 0C7D;TELUGU FRACTION DIGIT TWO FOR EVEN POWERS OF FOUR;No;0;ON;;;;2;N;;;;; 0C7E;TELUGU FRACTION DIGIT THREE FOR EVEN POWERS OF FOUR;No;0;ON;;;;3;N;;;;; 0C7F;TELUGU SIGN TUUMU;So;0;L;;;;;N;;;;; +0C80;KANNADA SIGN SPACING CANDRABINDU;Lo;0;L;;;;;N;;;;; 0C81;KANNADA SIGN CANDRABINDU;Mn;0;NSM;;;;;N;;;;; 0C82;KANNADA SIGN ANUSVARA;Mc;0;L;;;;;N;;;;; 0C83;KANNADA SIGN VISARGA;Mc;0;L;;;;;N;;;;; @@ -2918,7 +2947,19 @@ 0D4C;MALAYALAM VOWEL SIGN AU;Mc;0;L;0D46 0D57;;;;N;;;;; 0D4D;MALAYALAM SIGN VIRAMA;Mn;9;NSM;;;;;N;;;;; 0D4E;MALAYALAM LETTER DOT REPH;Lo;0;L;;;;;N;;;;; +0D4F;MALAYALAM SIGN PARA;So;0;L;;;;;N;;;;; +0D54;MALAYALAM LETTER CHILLU M;Lo;0;L;;;;;N;;;;; +0D55;MALAYALAM LETTER CHILLU Y;Lo;0;L;;;;;N;;;;; +0D56;MALAYALAM LETTER CHILLU LLL;Lo;0;L;;;;;N;;;;; 0D57;MALAYALAM AU LENGTH MARK;Mc;0;L;;;;;N;;;;; +0D58;MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH;No;0;L;;;;1/160;N;;;;; +0D59;MALAYALAM FRACTION ONE FORTIETH;No;0;L;;;;1/40;N;;;;; +0D5A;MALAYALAM FRACTION THREE EIGHTIETHS;No;0;L;;;;3/80;N;;;;; +0D5B;MALAYALAM FRACTION ONE TWENTIETH;No;0;L;;;;1/20;N;;;;; +0D5C;MALAYALAM FRACTION ONE TENTH;No;0;L;;;;1/10;N;;;;; +0D5D;MALAYALAM FRACTION THREE TWENTIETHS;No;0;L;;;;3/20;N;;;;; +0D5E;MALAYALAM FRACTION ONE FIFTH;No;0;L;;;;1/5;N;;;;; +0D5F;MALAYALAM LETTER ARCHAIC II;Lo;0;L;;;;;N;;;;; 0D60;MALAYALAM LETTER VOCALIC RR;Lo;0;L;;;;;N;;;;; 0D61;MALAYALAM LETTER VOCALIC LL;Lo;0;L;;;;;N;;;;; 0D62;MALAYALAM VOWEL SIGN VOCALIC L;Mn;0;NSM;;;;;N;;;;; @@ -2939,6 +2980,9 @@ 0D73;MALAYALAM FRACTION ONE QUARTER;No;0;L;;;;1/4;N;;;;; 0D74;MALAYALAM FRACTION ONE HALF;No;0;L;;;;1/2;N;;;;; 0D75;MALAYALAM FRACTION THREE QUARTERS;No;0;L;;;;3/4;N;;;;; +0D76;MALAYALAM FRACTION ONE SIXTEENTH;No;0;L;;;;1/16;N;;;;; +0D77;MALAYALAM FRACTION ONE EIGHTH;No;0;L;;;;1/8;N;;;;; +0D78;MALAYALAM FRACTION THREE SIXTEENTHS;No;0;L;;;;3/16;N;;;;; 0D79;MALAYALAM DATE MARK;So;0;L;;;;;N;;;;; 0D7A;MALAYALAM LETTER CHILLU NN;Lo;0;L;;;;;N;;;;; 0D7B;MALAYALAM LETTER CHILLU N;Lo;0;L;;;;;N;;;;; @@ -4289,91 +4333,98 @@ 1397;ETHIOPIC TONAL MARK HIDET;So;0;ON;;;;;N;;;;; 1398;ETHIOPIC TONAL MARK DERET-HIDET;So;0;ON;;;;;N;;;;; 1399;ETHIOPIC TONAL MARK KURT;So;0;ON;;;;;N;;;;; -13A0;CHEROKEE LETTER A;Lo;0;L;;;;;N;;;;; -13A1;CHEROKEE LETTER E;Lo;0;L;;;;;N;;;;; -13A2;CHEROKEE LETTER I;Lo;0;L;;;;;N;;;;; -13A3;CHEROKEE LETTER O;Lo;0;L;;;;;N;;;;; -13A4;CHEROKEE LETTER U;Lo;0;L;;;;;N;;;;; -13A5;CHEROKEE LETTER V;Lo;0;L;;;;;N;;;;; -13A6;CHEROKEE LETTER GA;Lo;0;L;;;;;N;;;;; -13A7;CHEROKEE LETTER KA;Lo;0;L;;;;;N;;;;; -13A8;CHEROKEE LETTER GE;Lo;0;L;;;;;N;;;;; -13A9;CHEROKEE LETTER GI;Lo;0;L;;;;;N;;;;; -13AA;CHEROKEE LETTER GO;Lo;0;L;;;;;N;;;;; -13AB;CHEROKEE LETTER GU;Lo;0;L;;;;;N;;;;; -13AC;CHEROKEE LETTER GV;Lo;0;L;;;;;N;;;;; -13AD;CHEROKEE LETTER HA;Lo;0;L;;;;;N;;;;; -13AE;CHEROKEE LETTER HE;Lo;0;L;;;;;N;;;;; -13AF;CHEROKEE LETTER HI;Lo;0;L;;;;;N;;;;; -13B0;CHEROKEE LETTER HO;Lo;0;L;;;;;N;;;;; -13B1;CHEROKEE LETTER HU;Lo;0;L;;;;;N;;;;; -13B2;CHEROKEE LETTER HV;Lo;0;L;;;;;N;;;;; -13B3;CHEROKEE LETTER LA;Lo;0;L;;;;;N;;;;; -13B4;CHEROKEE LETTER LE;Lo;0;L;;;;;N;;;;; -13B5;CHEROKEE LETTER LI;Lo;0;L;;;;;N;;;;; -13B6;CHEROKEE LETTER LO;Lo;0;L;;;;;N;;;;; -13B7;CHEROKEE LETTER LU;Lo;0;L;;;;;N;;;;; -13B8;CHEROKEE LETTER LV;Lo;0;L;;;;;N;;;;; -13B9;CHEROKEE LETTER MA;Lo;0;L;;;;;N;;;;; -13BA;CHEROKEE LETTER ME;Lo;0;L;;;;;N;;;;; -13BB;CHEROKEE LETTER MI;Lo;0;L;;;;;N;;;;; -13BC;CHEROKEE LETTER MO;Lo;0;L;;;;;N;;;;; -13BD;CHEROKEE LETTER MU;Lo;0;L;;;;;N;;;;; -13BE;CHEROKEE LETTER NA;Lo;0;L;;;;;N;;;;; -13BF;CHEROKEE LETTER HNA;Lo;0;L;;;;;N;;;;; -13C0;CHEROKEE LETTER NAH;Lo;0;L;;;;;N;;;;; -13C1;CHEROKEE LETTER NE;Lo;0;L;;;;;N;;;;; -13C2;CHEROKEE LETTER NI;Lo;0;L;;;;;N;;;;; -13C3;CHEROKEE LETTER NO;Lo;0;L;;;;;N;;;;; -13C4;CHEROKEE LETTER NU;Lo;0;L;;;;;N;;;;; -13C5;CHEROKEE LETTER NV;Lo;0;L;;;;;N;;;;; -13C6;CHEROKEE LETTER QUA;Lo;0;L;;;;;N;;;;; -13C7;CHEROKEE LETTER QUE;Lo;0;L;;;;;N;;;;; -13C8;CHEROKEE LETTER QUI;Lo;0;L;;;;;N;;;;; -13C9;CHEROKEE LETTER QUO;Lo;0;L;;;;;N;;;;; -13CA;CHEROKEE LETTER QUU;Lo;0;L;;;;;N;;;;; -13CB;CHEROKEE LETTER QUV;Lo;0;L;;;;;N;;;;; -13CC;CHEROKEE LETTER SA;Lo;0;L;;;;;N;;;;; -13CD;CHEROKEE LETTER S;Lo;0;L;;;;;N;;;;; -13CE;CHEROKEE LETTER SE;Lo;0;L;;;;;N;;;;; -13CF;CHEROKEE LETTER SI;Lo;0;L;;;;;N;;;;; -13D0;CHEROKEE LETTER SO;Lo;0;L;;;;;N;;;;; -13D1;CHEROKEE LETTER SU;Lo;0;L;;;;;N;;;;; -13D2;CHEROKEE LETTER SV;Lo;0;L;;;;;N;;;;; -13D3;CHEROKEE LETTER DA;Lo;0;L;;;;;N;;;;; -13D4;CHEROKEE LETTER TA;Lo;0;L;;;;;N;;;;; -13D5;CHEROKEE LETTER DE;Lo;0;L;;;;;N;;;;; -13D6;CHEROKEE LETTER TE;Lo;0;L;;;;;N;;;;; -13D7;CHEROKEE LETTER DI;Lo;0;L;;;;;N;;;;; -13D8;CHEROKEE LETTER TI;Lo;0;L;;;;;N;;;;; -13D9;CHEROKEE LETTER DO;Lo;0;L;;;;;N;;;;; -13DA;CHEROKEE LETTER DU;Lo;0;L;;;;;N;;;;; -13DB;CHEROKEE LETTER DV;Lo;0;L;;;;;N;;;;; -13DC;CHEROKEE LETTER DLA;Lo;0;L;;;;;N;;;;; -13DD;CHEROKEE LETTER TLA;Lo;0;L;;;;;N;;;;; -13DE;CHEROKEE LETTER TLE;Lo;0;L;;;;;N;;;;; -13DF;CHEROKEE LETTER TLI;Lo;0;L;;;;;N;;;;; -13E0;CHEROKEE LETTER TLO;Lo;0;L;;;;;N;;;;; -13E1;CHEROKEE LETTER TLU;Lo;0;L;;;;;N;;;;; -13E2;CHEROKEE LETTER TLV;Lo;0;L;;;;;N;;;;; -13E3;CHEROKEE LETTER TSA;Lo;0;L;;;;;N;;;;; -13E4;CHEROKEE LETTER TSE;Lo;0;L;;;;;N;;;;; -13E5;CHEROKEE LETTER TSI;Lo;0;L;;;;;N;;;;; -13E6;CHEROKEE LETTER TSO;Lo;0;L;;;;;N;;;;; -13E7;CHEROKEE LETTER TSU;Lo;0;L;;;;;N;;;;; -13E8;CHEROKEE LETTER TSV;Lo;0;L;;;;;N;;;;; -13E9;CHEROKEE LETTER WA;Lo;0;L;;;;;N;;;;; -13EA;CHEROKEE LETTER WE;Lo;0;L;;;;;N;;;;; -13EB;CHEROKEE LETTER WI;Lo;0;L;;;;;N;;;;; -13EC;CHEROKEE LETTER WO;Lo;0;L;;;;;N;;;;; -13ED;CHEROKEE LETTER WU;Lo;0;L;;;;;N;;;;; -13EE;CHEROKEE LETTER WV;Lo;0;L;;;;;N;;;;; -13EF;CHEROKEE LETTER YA;Lo;0;L;;;;;N;;;;; -13F0;CHEROKEE LETTER YE;Lo;0;L;;;;;N;;;;; -13F1;CHEROKEE LETTER YI;Lo;0;L;;;;;N;;;;; -13F2;CHEROKEE LETTER YO;Lo;0;L;;;;;N;;;;; -13F3;CHEROKEE LETTER YU;Lo;0;L;;;;;N;;;;; -13F4;CHEROKEE LETTER YV;Lo;0;L;;;;;N;;;;; +13A0;CHEROKEE LETTER A;Lu;0;L;;;;;N;;;;AB70; +13A1;CHEROKEE LETTER E;Lu;0;L;;;;;N;;;;AB71; +13A2;CHEROKEE LETTER I;Lu;0;L;;;;;N;;;;AB72; +13A3;CHEROKEE LETTER O;Lu;0;L;;;;;N;;;;AB73; +13A4;CHEROKEE LETTER U;Lu;0;L;;;;;N;;;;AB74; +13A5;CHEROKEE LETTER V;Lu;0;L;;;;;N;;;;AB75; +13A6;CHEROKEE LETTER GA;Lu;0;L;;;;;N;;;;AB76; +13A7;CHEROKEE LETTER KA;Lu;0;L;;;;;N;;;;AB77; +13A8;CHEROKEE LETTER GE;Lu;0;L;;;;;N;;;;AB78; +13A9;CHEROKEE LETTER GI;Lu;0;L;;;;;N;;;;AB79; +13AA;CHEROKEE LETTER GO;Lu;0;L;;;;;N;;;;AB7A; +13AB;CHEROKEE LETTER GU;Lu;0;L;;;;;N;;;;AB7B; +13AC;CHEROKEE LETTER GV;Lu;0;L;;;;;N;;;;AB7C; +13AD;CHEROKEE LETTER HA;Lu;0;L;;;;;N;;;;AB7D; +13AE;CHEROKEE LETTER HE;Lu;0;L;;;;;N;;;;AB7E; +13AF;CHEROKEE LETTER HI;Lu;0;L;;;;;N;;;;AB7F; +13B0;CHEROKEE LETTER HO;Lu;0;L;;;;;N;;;;AB80; +13B1;CHEROKEE LETTER HU;Lu;0;L;;;;;N;;;;AB81; +13B2;CHEROKEE LETTER HV;Lu;0;L;;;;;N;;;;AB82; +13B3;CHEROKEE LETTER LA;Lu;0;L;;;;;N;;;;AB83; +13B4;CHEROKEE LETTER LE;Lu;0;L;;;;;N;;;;AB84; +13B5;CHEROKEE LETTER LI;Lu;0;L;;;;;N;;;;AB85; +13B6;CHEROKEE LETTER LO;Lu;0;L;;;;;N;;;;AB86; +13B7;CHEROKEE LETTER LU;Lu;0;L;;;;;N;;;;AB87; +13B8;CHEROKEE LETTER LV;Lu;0;L;;;;;N;;;;AB88; +13B9;CHEROKEE LETTER MA;Lu;0;L;;;;;N;;;;AB89; +13BA;CHEROKEE LETTER ME;Lu;0;L;;;;;N;;;;AB8A; +13BB;CHEROKEE LETTER MI;Lu;0;L;;;;;N;;;;AB8B; +13BC;CHEROKEE LETTER MO;Lu;0;L;;;;;N;;;;AB8C; +13BD;CHEROKEE LETTER MU;Lu;0;L;;;;;N;;;;AB8D; +13BE;CHEROKEE LETTER NA;Lu;0;L;;;;;N;;;;AB8E; +13BF;CHEROKEE LETTER HNA;Lu;0;L;;;;;N;;;;AB8F; +13C0;CHEROKEE LETTER NAH;Lu;0;L;;;;;N;;;;AB90; +13C1;CHEROKEE LETTER NE;Lu;0;L;;;;;N;;;;AB91; +13C2;CHEROKEE LETTER NI;Lu;0;L;;;;;N;;;;AB92; +13C3;CHEROKEE LETTER NO;Lu;0;L;;;;;N;;;;AB93; +13C4;CHEROKEE LETTER NU;Lu;0;L;;;;;N;;;;AB94; +13C5;CHEROKEE LETTER NV;Lu;0;L;;;;;N;;;;AB95; +13C6;CHEROKEE LETTER QUA;Lu;0;L;;;;;N;;;;AB96; +13C7;CHEROKEE LETTER QUE;Lu;0;L;;;;;N;;;;AB97; +13C8;CHEROKEE LETTER QUI;Lu;0;L;;;;;N;;;;AB98; +13C9;CHEROKEE LETTER QUO;Lu;0;L;;;;;N;;;;AB99; +13CA;CHEROKEE LETTER QUU;Lu;0;L;;;;;N;;;;AB9A; +13CB;CHEROKEE LETTER QUV;Lu;0;L;;;;;N;;;;AB9B; +13CC;CHEROKEE LETTER SA;Lu;0;L;;;;;N;;;;AB9C; +13CD;CHEROKEE LETTER S;Lu;0;L;;;;;N;;;;AB9D; +13CE;CHEROKEE LETTER SE;Lu;0;L;;;;;N;;;;AB9E; +13CF;CHEROKEE LETTER SI;Lu;0;L;;;;;N;;;;AB9F; +13D0;CHEROKEE LETTER SO;Lu;0;L;;;;;N;;;;ABA0; +13D1;CHEROKEE LETTER SU;Lu;0;L;;;;;N;;;;ABA1; +13D2;CHEROKEE LETTER SV;Lu;0;L;;;;;N;;;;ABA2; +13D3;CHEROKEE LETTER DA;Lu;0;L;;;;;N;;;;ABA3; +13D4;CHEROKEE LETTER TA;Lu;0;L;;;;;N;;;;ABA4; +13D5;CHEROKEE LETTER DE;Lu;0;L;;;;;N;;;;ABA5; +13D6;CHEROKEE LETTER TE;Lu;0;L;;;;;N;;;;ABA6; +13D7;CHEROKEE LETTER DI;Lu;0;L;;;;;N;;;;ABA7; +13D8;CHEROKEE LETTER TI;Lu;0;L;;;;;N;;;;ABA8; +13D9;CHEROKEE LETTER DO;Lu;0;L;;;;;N;;;;ABA9; +13DA;CHEROKEE LETTER DU;Lu;0;L;;;;;N;;;;ABAA; +13DB;CHEROKEE LETTER DV;Lu;0;L;;;;;N;;;;ABAB; +13DC;CHEROKEE LETTER DLA;Lu;0;L;;;;;N;;;;ABAC; +13DD;CHEROKEE LETTER TLA;Lu;0;L;;;;;N;;;;ABAD; +13DE;CHEROKEE LETTER TLE;Lu;0;L;;;;;N;;;;ABAE; +13DF;CHEROKEE LETTER TLI;Lu;0;L;;;;;N;;;;ABAF; +13E0;CHEROKEE LETTER TLO;Lu;0;L;;;;;N;;;;ABB0; +13E1;CHEROKEE LETTER TLU;Lu;0;L;;;;;N;;;;ABB1; +13E2;CHEROKEE LETTER TLV;Lu;0;L;;;;;N;;;;ABB2; +13E3;CHEROKEE LETTER TSA;Lu;0;L;;;;;N;;;;ABB3; +13E4;CHEROKEE LETTER TSE;Lu;0;L;;;;;N;;;;ABB4; +13E5;CHEROKEE LETTER TSI;Lu;0;L;;;;;N;;;;ABB5; +13E6;CHEROKEE LETTER TSO;Lu;0;L;;;;;N;;;;ABB6; +13E7;CHEROKEE LETTER TSU;Lu;0;L;;;;;N;;;;ABB7; +13E8;CHEROKEE LETTER TSV;Lu;0;L;;;;;N;;;;ABB8; +13E9;CHEROKEE LETTER WA;Lu;0;L;;;;;N;;;;ABB9; +13EA;CHEROKEE LETTER WE;Lu;0;L;;;;;N;;;;ABBA; +13EB;CHEROKEE LETTER WI;Lu;0;L;;;;;N;;;;ABBB; +13EC;CHEROKEE LETTER WO;Lu;0;L;;;;;N;;;;ABBC; +13ED;CHEROKEE LETTER WU;Lu;0;L;;;;;N;;;;ABBD; +13EE;CHEROKEE LETTER WV;Lu;0;L;;;;;N;;;;ABBE; +13EF;CHEROKEE LETTER YA;Lu;0;L;;;;;N;;;;ABBF; +13F0;CHEROKEE LETTER YE;Lu;0;L;;;;;N;;;;13F8; +13F1;CHEROKEE LETTER YI;Lu;0;L;;;;;N;;;;13F9; +13F2;CHEROKEE LETTER YO;Lu;0;L;;;;;N;;;;13FA; +13F3;CHEROKEE LETTER YU;Lu;0;L;;;;;N;;;;13FB; +13F4;CHEROKEE LETTER YV;Lu;0;L;;;;;N;;;;13FC; +13F5;CHEROKEE LETTER MV;Lu;0;L;;;;;N;;;;13FD; +13F8;CHEROKEE SMALL LETTER YE;Ll;0;L;;;;;N;;;13F0;;13F0 +13F9;CHEROKEE SMALL LETTER YI;Ll;0;L;;;;;N;;;13F1;;13F1 +13FA;CHEROKEE SMALL LETTER YO;Ll;0;L;;;;;N;;;13F2;;13F2 +13FB;CHEROKEE SMALL LETTER YU;Ll;0;L;;;;;N;;;13F3;;13F3 +13FC;CHEROKEE SMALL LETTER YV;Ll;0;L;;;;;N;;;13F4;;13F4 +13FD;CHEROKEE SMALL LETTER MV;Ll;0;L;;;;;N;;;13F5;;13F5 1400;CANADIAN SYLLABICS HYPHEN;Pd;0;ON;;;;;N;;;;; 1401;CANADIAN SYLLABICS E;Lo;0;L;;;;;N;;;;; 1402;CANADIAN SYLLABICS AAI;Lo;0;L;;;;;N;;;;; @@ -5445,8 +5496,8 @@ 1882;MONGOLIAN LETTER ALI GALI DAMARU;Lo;0;L;;;;;N;;;;; 1883;MONGOLIAN LETTER ALI GALI UBADAMA;Lo;0;L;;;;;N;;;;; 1884;MONGOLIAN LETTER ALI GALI INVERTED UBADAMA;Lo;0;L;;;;;N;;;;; -1885;MONGOLIAN LETTER ALI GALI BALUDA;Lo;0;L;;;;;N;;;;; -1886;MONGOLIAN LETTER ALI GALI THREE BALUDA;Lo;0;L;;;;;N;;;;; +1885;MONGOLIAN LETTER ALI GALI BALUDA;Mn;0;NSM;;;;;N;;;;; +1886;MONGOLIAN LETTER ALI GALI THREE BALUDA;Mn;0;NSM;;;;;N;;;;; 1887;MONGOLIAN LETTER ALI GALI A;Lo;0;L;;;;;N;;;;; 1888;MONGOLIAN LETTER ALI GALI I;Lo;0;L;;;;;N;;;;; 1889;MONGOLIAN LETTER ALI GALI KA;Lo;0;L;;;;;N;;;;; @@ -5700,23 +5751,23 @@ 19A9;NEW TAI LUE LETTER LOW XVA;Lo;0;L;;;;;N;;;;; 19AA;NEW TAI LUE LETTER HIGH SUA;Lo;0;L;;;;;N;;;;; 19AB;NEW TAI LUE LETTER LOW SUA;Lo;0;L;;;;;N;;;;; -19B0;NEW TAI LUE VOWEL SIGN VOWEL SHORTENER;Mc;0;L;;;;;N;;;;; -19B1;NEW TAI LUE VOWEL SIGN AA;Mc;0;L;;;;;N;;;;; -19B2;NEW TAI LUE VOWEL SIGN II;Mc;0;L;;;;;N;;;;; -19B3;NEW TAI LUE VOWEL SIGN U;Mc;0;L;;;;;N;;;;; -19B4;NEW TAI LUE VOWEL SIGN UU;Mc;0;L;;;;;N;;;;; -19B5;NEW TAI LUE VOWEL SIGN E;Mc;0;L;;;;;N;;;;; -19B6;NEW TAI LUE VOWEL SIGN AE;Mc;0;L;;;;;N;;;;; -19B7;NEW TAI LUE VOWEL SIGN O;Mc;0;L;;;;;N;;;;; -19B8;NEW TAI LUE VOWEL SIGN OA;Mc;0;L;;;;;N;;;;; -19B9;NEW TAI LUE VOWEL SIGN UE;Mc;0;L;;;;;N;;;;; -19BA;NEW TAI LUE VOWEL SIGN AY;Mc;0;L;;;;;N;;;;; -19BB;NEW TAI LUE VOWEL SIGN AAY;Mc;0;L;;;;;N;;;;; -19BC;NEW TAI LUE VOWEL SIGN UY;Mc;0;L;;;;;N;;;;; -19BD;NEW TAI LUE VOWEL SIGN OY;Mc;0;L;;;;;N;;;;; -19BE;NEW TAI LUE VOWEL SIGN OAY;Mc;0;L;;;;;N;;;;; -19BF;NEW TAI LUE VOWEL SIGN UEY;Mc;0;L;;;;;N;;;;; -19C0;NEW TAI LUE VOWEL SIGN IY;Mc;0;L;;;;;N;;;;; +19B0;NEW TAI LUE VOWEL SIGN VOWEL SHORTENER;Lo;0;L;;;;;N;;;;; +19B1;NEW TAI LUE VOWEL SIGN AA;Lo;0;L;;;;;N;;;;; +19B2;NEW TAI LUE VOWEL SIGN II;Lo;0;L;;;;;N;;;;; +19B3;NEW TAI LUE VOWEL SIGN U;Lo;0;L;;;;;N;;;;; +19B4;NEW TAI LUE VOWEL SIGN UU;Lo;0;L;;;;;N;;;;; +19B5;NEW TAI LUE VOWEL SIGN E;Lo;0;L;;;;;N;;;;; +19B6;NEW TAI LUE VOWEL SIGN AE;Lo;0;L;;;;;N;;;;; +19B7;NEW TAI LUE VOWEL SIGN O;Lo;0;L;;;;;N;;;;; +19B8;NEW TAI LUE VOWEL SIGN OA;Lo;0;L;;;;;N;;;;; +19B9;NEW TAI LUE VOWEL SIGN UE;Lo;0;L;;;;;N;;;;; +19BA;NEW TAI LUE VOWEL SIGN AY;Lo;0;L;;;;;N;;;;; +19BB;NEW TAI LUE VOWEL SIGN AAY;Lo;0;L;;;;;N;;;;; +19BC;NEW TAI LUE VOWEL SIGN UY;Lo;0;L;;;;;N;;;;; +19BD;NEW TAI LUE VOWEL SIGN OY;Lo;0;L;;;;;N;;;;; +19BE;NEW TAI LUE VOWEL SIGN OAY;Lo;0;L;;;;;N;;;;; +19BF;NEW TAI LUE VOWEL SIGN UEY;Lo;0;L;;;;;N;;;;; +19C0;NEW TAI LUE VOWEL SIGN IY;Lo;0;L;;;;;N;;;;; 19C1;NEW TAI LUE LETTER FINAL V;Lo;0;L;;;;;N;;;;; 19C2;NEW TAI LUE LETTER FINAL NG;Lo;0;L;;;;;N;;;;; 19C3;NEW TAI LUE LETTER FINAL N;Lo;0;L;;;;;N;;;;; @@ -5724,8 +5775,8 @@ 19C5;NEW TAI LUE LETTER FINAL K;Lo;0;L;;;;;N;;;;; 19C6;NEW TAI LUE LETTER FINAL D;Lo;0;L;;;;;N;;;;; 19C7;NEW TAI LUE LETTER FINAL B;Lo;0;L;;;;;N;;;;; -19C8;NEW TAI LUE TONE MARK-1;Mc;0;L;;;;;N;;;;; -19C9;NEW TAI LUE TONE MARK-2;Mc;0;L;;;;;N;;;;; +19C8;NEW TAI LUE TONE MARK-1;Lo;0;L;;;;;N;;;;; +19C9;NEW TAI LUE TONE MARK-2;Lo;0;L;;;;;N;;;;; 19D0;NEW TAI LUE DIGIT ZERO;Nd;0;L;;0;0;0;N;;;;; 19D1;NEW TAI LUE DIGIT ONE;Nd;0;L;;1;1;1;N;;;;; 19D2;NEW TAI LUE DIGIT TWO;Nd;0;L;;2;2;2;N;;;;; @@ -6306,6 +6357,15 @@ 1C7D;OL CHIKI AHAD;Lm;0;L;;;;;N;;;;; 1C7E;OL CHIKI PUNCTUATION MUCAAD;Po;0;L;;;;;N;;;;; 1C7F;OL CHIKI PUNCTUATION DOUBLE MUCAAD;Po;0;L;;;;;N;;;;; +1C80;CYRILLIC SMALL LETTER ROUNDED VE;Ll;0;L;;;;;N;;;0412;;0412 +1C81;CYRILLIC SMALL LETTER LONG-LEGGED DE;Ll;0;L;;;;;N;;;0414;;0414 +1C82;CYRILLIC SMALL LETTER NARROW O;Ll;0;L;;;;;N;;;041E;;041E +1C83;CYRILLIC SMALL LETTER WIDE ES;Ll;0;L;;;;;N;;;0421;;0421 +1C84;CYRILLIC SMALL LETTER TALL TE;Ll;0;L;;;;;N;;;0422;;0422 +1C85;CYRILLIC SMALL LETTER THREE-LEGGED TE;Ll;0;L;;;;;N;;;0422;;0422 +1C86;CYRILLIC SMALL LETTER TALL HARD SIGN;Ll;0;L;;;;;N;;;042A;;042A +1C87;CYRILLIC SMALL LETTER TALL YAT;Ll;0;L;;;;;N;;;0462;;0462 +1C88;CYRILLIC SMALL LETTER UNBLENDED UK;Ll;0;L;;;;;N;;;A64A;;A64A 1CC0;SUNDANESE PUNCTUATION BINDU SURYA;Po;0;L;;;;;N;;;;; 1CC1;SUNDANESE PUNCTUATION BINDU PANGLONG;Po;0;L;;;;;N;;;;; 1CC2;SUNDANESE PUNCTUATION BINDU PURNAMA;Po;0;L;;;;;N;;;;; @@ -6601,6 +6661,7 @@ 1DF3;COMBINING LATIN SMALL LETTER O WITH DIAERESIS;Mn;230;NSM;;;;;N;;;;; 1DF4;COMBINING LATIN SMALL LETTER U WITH DIAERESIS;Mn;230;NSM;;;;;N;;;;; 1DF5;COMBINING UP TACK ABOVE;Mn;230;NSM;;;;;N;;;;; +1DFB;COMBINING DELETION MARK;Mn;230;NSM;;;;;N;;;;; 1DFC;COMBINING DOUBLE INVERTED BREVE BELOW;Mn;233;NSM;;;;;N;;;;; 1DFD;COMBINING ALMOST EQUAL TO BELOW;Mn;220;NSM;;;;;N;;;;; 1DFE;COMBINING LEFT ARROWHEAD ABOVE;Mn;230;NSM;;;;;N;;;;; @@ -7277,6 +7338,7 @@ 20BB;NORDIC MARK SIGN;Sc;0;ET;;;;;N;;;;; 20BC;MANAT SIGN;Sc;0;ET;;;;;N;;;;; 20BD;RUBLE SIGN;Sc;0;ET;;;;;N;;;;; +20BE;LARI SIGN;Sc;0;ET;;;;;N;;;;; 20D0;COMBINING LEFT HARPOON ABOVE;Mn;230;NSM;;;;;N;NON-SPACING LEFT HARPOON ABOVE;;;; 20D1;COMBINING RIGHT HARPOON ABOVE;Mn;230;NSM;;;;;N;NON-SPACING RIGHT HARPOON ABOVE;;;; 20D2;COMBINING LONG VERTICAL LINE OVERLAY;Mn;1;NSM;;;;;N;NON-SPACING LONG VERTICAL BAR OVERLAY;;;; @@ -7448,6 +7510,8 @@ 2187;ROMAN NUMERAL FIFTY THOUSAND;Nl;0;L;;;;50000;N;;;;; 2188;ROMAN NUMERAL ONE HUNDRED THOUSAND;Nl;0;L;;;;100000;N;;;;; 2189;VULGAR FRACTION ZERO THIRDS;No;0;ON; 0030 2044 0033;;;0;N;;;;; +218A;TURNED DIGIT TWO;So;0;ON;;;;;N;;;;; +218B;TURNED DIGIT THREE;So;0;ON;;;;;N;;;;; 2190;LEFTWARDS ARROW;Sm;0;ON;;;;;N;LEFT ARROW;;;; 2191;UPWARDS ARROW;Sm;0;ON;;;;;N;UP ARROW;;;; 2192;RIGHTWARDS ARROW;Sm;0;ON;;;;;N;RIGHT ARROW;;;; @@ -8067,6 +8131,10 @@ 23F8;DOUBLE VERTICAL BAR;So;0;ON;;;;;N;;;;; 23F9;BLACK SQUARE FOR STOP;So;0;ON;;;;;N;;;;; 23FA;BLACK CIRCLE FOR RECORD;So;0;ON;;;;;N;;;;; +23FB;POWER SYMBOL;So;0;ON;;;;;N;;;;; +23FC;POWER ON-OFF SYMBOL;So;0;ON;;;;;N;;;;; +23FD;POWER ON SYMBOL;So;0;ON;;;;;N;;;;; +23FE;POWER SLEEP SYMBOL;So;0;ON;;;;;N;;;;; 2400;SYMBOL FOR NULL;So;0;ON;;;;;N;GRAPHIC FOR NULL;;;; 2401;SYMBOL FOR START OF HEADING;So;0;ON;;;;;N;GRAPHIC FOR START OF HEADING;;;; 2402;SYMBOL FOR START OF TEXT;So;0;ON;;;;;N;GRAPHIC FOR START OF TEXT;;;; @@ -10015,6 +10083,10 @@ 2BCF;ROTATED WHITE FOUR POINTED CUSP;So;0;ON;;;;;N;;;;; 2BD0;SQUARE POSITION INDICATOR;So;0;ON;;;;;N;;;;; 2BD1;UNCERTAINTY SIGN;So;0;ON;;;;;N;;;;; +2BEC;LEFTWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS;So;0;ON;;;;;N;;;;; +2BED;UPWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS;So;0;ON;;;;;N;;;;; +2BEE;RIGHTWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS;So;0;ON;;;;;N;;;;; +2BEF;DOWNWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS;So;0;ON;;;;;N;;;;; 2C00;GLAGOLITIC CAPITAL LETTER AZU;Lu;0;L;;;;;N;;;;2C30; 2C01;GLAGOLITIC CAPITAL LETTER BUKY;Lu;0;L;;;;;N;;;;2C31; 2C02;GLAGOLITIC CAPITAL LETTER VEDE;Lu;0;L;;;;;N;;;;2C32; @@ -10541,6 +10613,8 @@ 2E40;DOUBLE HYPHEN;Pd;0;ON;;;;;N;;;;; 2E41;REVERSED COMMA;Po;0;ON;;;;;N;;;;; 2E42;DOUBLE LOW-REVERSED-9 QUOTATION MARK;Ps;0;ON;;;;;N;;;;; +2E43;DASH WITH LEFT UPTURN;Po;0;ON;;;;;N;;;;; +2E44;DOUBLE SUSPENSION MARK;Po;0;ON;;;;;N;;;;; 2E80;CJK RADICAL REPEAT;So;0;ON;;;;;N;;;;; 2E81;CJK RADICAL CLIFF;So;0;ON;;;;;N;;;;; 2E82;CJK RADICAL SECOND ONE;So;0;ON;;;;;N;;;;; @@ -11942,7 +12016,7 @@ 4DFE;HEXAGRAM FOR AFTER COMPLETION;So;0;ON;;;;;N;;;;; 4DFF;HEXAGRAM FOR BEFORE COMPLETION;So;0;ON;;;;;N;;;;; 4E00;;Lo;0;L;;;;;N;;;;; -9FCC;;Lo;0;L;;;;;N;;;;; +9FD5;;Lo;0;L;;;;;N;;;;; A000;YI SYLLABLE IT;Lo;0;L;;;;;N;;;;; A001;YI SYLLABLE IX;Lo;0;L;;;;;N;;;;; A002;YI SYLLABLE I;Lo;0;L;;;;;N;;;;; @@ -13605,6 +13679,7 @@ A69A;CYRILLIC CAPITAL LETTER CROSSED O;Lu;0;L;;;;;N;;;;A69B; A69B;CYRILLIC SMALL LETTER CROSSED O;Ll;0;L;;;;;N;;;A69A;;A69A A69C;MODIFIER LETTER CYRILLIC HARD SIGN;Lm;0;L; 044A;;;;N;;;;; A69D;MODIFIER LETTER CYRILLIC SOFT SIGN;Lm;0;L; 044C;;;;N;;;;; +A69E;COMBINING CYRILLIC LETTER EF;Mn;230;NSM;;;;;N;;;;; A69F;COMBINING CYRILLIC LETTER IOTIFIED E;Mn;230;NSM;;;;;N;;;;; A6A0;BAMUM LETTER A;Lo;0;L;;;;;N;;;;; A6A1;BAMUM LETTER KA;Lo;0;L;;;;;N;;;;; @@ -13837,6 +13912,7 @@ A78B;LATIN CAPITAL LETTER SALTILLO;Lu;0;L;;;;;N;;;;A78C; A78C;LATIN SMALL LETTER SALTILLO;Ll;0;L;;;;;N;;;A78B;;A78B A78D;LATIN CAPITAL LETTER TURNED H;Lu;0;L;;;;;N;;;;0265; A78E;LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT;Ll;0;L;;;;;N;;;;; +A78F;LATIN LETTER SINOLOGICAL DOT;Lo;0;L;;;;;N;;;;; A790;LATIN CAPITAL LETTER N WITH DESCENDER;Lu;0;L;;;;;N;;;;A791; A791;LATIN SMALL LETTER N WITH DESCENDER;Ll;0;L;;;;;N;;;A790;;A790 A792;LATIN CAPITAL LETTER C WITH BAR;Lu;0;L;;;;;N;;;;A793; @@ -13867,8 +13943,15 @@ A7AA;LATIN CAPITAL LETTER H WITH HOOK;Lu;0;L;;;;;N;;;;0266; A7AB;LATIN CAPITAL LETTER REVERSED OPEN E;Lu;0;L;;;;;N;;;;025C; A7AC;LATIN CAPITAL LETTER SCRIPT G;Lu;0;L;;;;;N;;;;0261; A7AD;LATIN CAPITAL LETTER L WITH BELT;Lu;0;L;;;;;N;;;;026C; +A7AE;LATIN CAPITAL LETTER SMALL CAPITAL I;Lu;0;L;;;;;N;;;;026A; A7B0;LATIN CAPITAL LETTER TURNED K;Lu;0;L;;;;;N;;;;029E; A7B1;LATIN CAPITAL LETTER TURNED T;Lu;0;L;;;;;N;;;;0287; +A7B2;LATIN CAPITAL LETTER J WITH CROSSED-TAIL;Lu;0;L;;;;;N;;;;029D; +A7B3;LATIN CAPITAL LETTER CHI;Lu;0;L;;;;;N;;;;AB53; +A7B4;LATIN CAPITAL LETTER BETA;Lu;0;L;;;;;N;;;;A7B5; +A7B5;LATIN SMALL LETTER BETA;Ll;0;L;;;;;N;;;A7B4;;A7B4 +A7B6;LATIN CAPITAL LETTER OMEGA;Lu;0;L;;;;;N;;;;A7B7; +A7B7;LATIN SMALL LETTER OMEGA;Ll;0;L;;;;;N;;;A7B6;;A7B6 A7F7;LATIN EPIGRAPHIC LETTER SIDEWAYS I;Lo;0;L;;;;;N;;;;; A7F8;MODIFIER LETTER CAPITAL H WITH STROKE;Lm;0;L; 0126;;;;N;;;;; A7F9;MODIFIER LETTER SMALL LIGATURE OE;Lm;0;L; 0153;;;;N;;;;; @@ -14057,6 +14140,7 @@ A8C1;SAURASHTRA VOWEL SIGN O;Mc;0;L;;;;;N;;;;; A8C2;SAURASHTRA VOWEL SIGN OO;Mc;0;L;;;;;N;;;;; A8C3;SAURASHTRA VOWEL SIGN AU;Mc;0;L;;;;;N;;;;; A8C4;SAURASHTRA SIGN VIRAMA;Mn;9;NSM;;;;;N;;;;; +A8C5;SAURASHTRA SIGN CANDRABINDU;Mn;0;NSM;;;;;N;;;;; A8CE;SAURASHTRA DANDA;Po;0;L;;;;;N;;;;; A8CF;SAURASHTRA DOUBLE DANDA;Po;0;L;;;;;N;;;;; A8D0;SAURASHTRA DIGIT ZERO;Nd;0;L;;0;0;0;N;;;;; @@ -14097,6 +14181,8 @@ A8F8;DEVANAGARI SIGN PUSHPIKA;Po;0;L;;;;;N;;;;; A8F9;DEVANAGARI GAP FILLER;Po;0;L;;;;;N;;;;; A8FA;DEVANAGARI CARET;Po;0;L;;;;;N;;;;; A8FB;DEVANAGARI HEADSTROKE;Lo;0;L;;;;;N;;;;; +A8FC;DEVANAGARI SIGN SIDDHAM;Po;0;L;;;;;N;;;;; +A8FD;DEVANAGARI JAIN OM;Lo;0;L;;;;;N;;;;; A900;KAYAH LI DIGIT ZERO;Nd;0;L;;0;0;0;N;;;;; A901;KAYAH LI DIGIT ONE;Nd;0;L;;1;1;1;N;;;;; A902;KAYAH LI DIGIT TWO;Nd;0;L;;2;2;2;N;;;;; @@ -14610,7 +14696,7 @@ AB4F;LATIN SMALL LETTER U BAR WITH SHORT RIGHT LEG;Ll;0;L;;;;;N;;;;; AB50;LATIN SMALL LETTER UI;Ll;0;L;;;;;N;;;;; AB51;LATIN SMALL LETTER TURNED UI;Ll;0;L;;;;;N;;;;; AB52;LATIN SMALL LETTER U WITH LEFT HOOK;Ll;0;L;;;;;N;;;;; -AB53;LATIN SMALL LETTER CHI;Ll;0;L;;;;;N;;;;; +AB53;LATIN SMALL LETTER CHI;Ll;0;L;;;;;N;;;A7B3;;A7B3 AB54;LATIN SMALL LETTER CHI WITH LOW RIGHT RING;Ll;0;L;;;;;N;;;;; AB55;LATIN SMALL LETTER CHI WITH LOW LEFT SERIF;Ll;0;L;;;;;N;;;;; AB56;LATIN SMALL LETTER X WITH LOW RIGHT RING;Ll;0;L;;;;;N;;;;; @@ -14623,8 +14709,92 @@ AB5C;MODIFIER LETTER SMALL HENG;Lm;0;L; A727;;;;N;;;;; AB5D;MODIFIER LETTER SMALL L WITH INVERTED LAZY S;Lm;0;L; AB37;;;;N;;;;; AB5E;MODIFIER LETTER SMALL L WITH MIDDLE TILDE;Lm;0;L; 026B;;;;N;;;;; AB5F;MODIFIER LETTER SMALL U WITH LEFT HOOK;Lm;0;L; AB52;;;;N;;;;; +AB60;LATIN SMALL LETTER SAKHA YAT;Ll;0;L;;;;;N;;;;; +AB61;LATIN SMALL LETTER IOTIFIED E;Ll;0;L;;;;;N;;;;; +AB62;LATIN SMALL LETTER OPEN OE;Ll;0;L;;;;;N;;;;; +AB63;LATIN SMALL LETTER UO;Ll;0;L;;;;;N;;;;; AB64;LATIN SMALL LETTER INVERTED ALPHA;Ll;0;L;;;;;N;;;;; AB65;GREEK LETTER SMALL CAPITAL OMEGA;Ll;0;L;;;;;N;;;;; +AB70;CHEROKEE SMALL LETTER A;Ll;0;L;;;;;N;;;13A0;;13A0 +AB71;CHEROKEE SMALL LETTER E;Ll;0;L;;;;;N;;;13A1;;13A1 +AB72;CHEROKEE SMALL LETTER I;Ll;0;L;;;;;N;;;13A2;;13A2 +AB73;CHEROKEE SMALL LETTER O;Ll;0;L;;;;;N;;;13A3;;13A3 +AB74;CHEROKEE SMALL LETTER U;Ll;0;L;;;;;N;;;13A4;;13A4 +AB75;CHEROKEE SMALL LETTER V;Ll;0;L;;;;;N;;;13A5;;13A5 +AB76;CHEROKEE SMALL LETTER GA;Ll;0;L;;;;;N;;;13A6;;13A6 +AB77;CHEROKEE SMALL LETTER KA;Ll;0;L;;;;;N;;;13A7;;13A7 +AB78;CHEROKEE SMALL LETTER GE;Ll;0;L;;;;;N;;;13A8;;13A8 +AB79;CHEROKEE SMALL LETTER GI;Ll;0;L;;;;;N;;;13A9;;13A9 +AB7A;CHEROKEE SMALL LETTER GO;Ll;0;L;;;;;N;;;13AA;;13AA +AB7B;CHEROKEE SMALL LETTER GU;Ll;0;L;;;;;N;;;13AB;;13AB +AB7C;CHEROKEE SMALL LETTER GV;Ll;0;L;;;;;N;;;13AC;;13AC +AB7D;CHEROKEE SMALL LETTER HA;Ll;0;L;;;;;N;;;13AD;;13AD +AB7E;CHEROKEE SMALL LETTER HE;Ll;0;L;;;;;N;;;13AE;;13AE +AB7F;CHEROKEE SMALL LETTER HI;Ll;0;L;;;;;N;;;13AF;;13AF +AB80;CHEROKEE SMALL LETTER HO;Ll;0;L;;;;;N;;;13B0;;13B0 +AB81;CHEROKEE SMALL LETTER HU;Ll;0;L;;;;;N;;;13B1;;13B1 +AB82;CHEROKEE SMALL LETTER HV;Ll;0;L;;;;;N;;;13B2;;13B2 +AB83;CHEROKEE SMALL LETTER LA;Ll;0;L;;;;;N;;;13B3;;13B3 +AB84;CHEROKEE SMALL LETTER LE;Ll;0;L;;;;;N;;;13B4;;13B4 +AB85;CHEROKEE SMALL LETTER LI;Ll;0;L;;;;;N;;;13B5;;13B5 +AB86;CHEROKEE SMALL LETTER LO;Ll;0;L;;;;;N;;;13B6;;13B6 +AB87;CHEROKEE SMALL LETTER LU;Ll;0;L;;;;;N;;;13B7;;13B7 +AB88;CHEROKEE SMALL LETTER LV;Ll;0;L;;;;;N;;;13B8;;13B8 +AB89;CHEROKEE SMALL LETTER MA;Ll;0;L;;;;;N;;;13B9;;13B9 +AB8A;CHEROKEE SMALL LETTER ME;Ll;0;L;;;;;N;;;13BA;;13BA +AB8B;CHEROKEE SMALL LETTER MI;Ll;0;L;;;;;N;;;13BB;;13BB +AB8C;CHEROKEE SMALL LETTER MO;Ll;0;L;;;;;N;;;13BC;;13BC +AB8D;CHEROKEE SMALL LETTER MU;Ll;0;L;;;;;N;;;13BD;;13BD +AB8E;CHEROKEE SMALL LETTER NA;Ll;0;L;;;;;N;;;13BE;;13BE +AB8F;CHEROKEE SMALL LETTER HNA;Ll;0;L;;;;;N;;;13BF;;13BF +AB90;CHEROKEE SMALL LETTER NAH;Ll;0;L;;;;;N;;;13C0;;13C0 +AB91;CHEROKEE SMALL LETTER NE;Ll;0;L;;;;;N;;;13C1;;13C1 +AB92;CHEROKEE SMALL LETTER NI;Ll;0;L;;;;;N;;;13C2;;13C2 +AB93;CHEROKEE SMALL LETTER NO;Ll;0;L;;;;;N;;;13C3;;13C3 +AB94;CHEROKEE SMALL LETTER NU;Ll;0;L;;;;;N;;;13C4;;13C4 +AB95;CHEROKEE SMALL LETTER NV;Ll;0;L;;;;;N;;;13C5;;13C5 +AB96;CHEROKEE SMALL LETTER QUA;Ll;0;L;;;;;N;;;13C6;;13C6 +AB97;CHEROKEE SMALL LETTER QUE;Ll;0;L;;;;;N;;;13C7;;13C7 +AB98;CHEROKEE SMALL LETTER QUI;Ll;0;L;;;;;N;;;13C8;;13C8 +AB99;CHEROKEE SMALL LETTER QUO;Ll;0;L;;;;;N;;;13C9;;13C9 +AB9A;CHEROKEE SMALL LETTER QUU;Ll;0;L;;;;;N;;;13CA;;13CA +AB9B;CHEROKEE SMALL LETTER QUV;Ll;0;L;;;;;N;;;13CB;;13CB +AB9C;CHEROKEE SMALL LETTER SA;Ll;0;L;;;;;N;;;13CC;;13CC +AB9D;CHEROKEE SMALL LETTER S;Ll;0;L;;;;;N;;;13CD;;13CD +AB9E;CHEROKEE SMALL LETTER SE;Ll;0;L;;;;;N;;;13CE;;13CE +AB9F;CHEROKEE SMALL LETTER SI;Ll;0;L;;;;;N;;;13CF;;13CF +ABA0;CHEROKEE SMALL LETTER SO;Ll;0;L;;;;;N;;;13D0;;13D0 +ABA1;CHEROKEE SMALL LETTER SU;Ll;0;L;;;;;N;;;13D1;;13D1 +ABA2;CHEROKEE SMALL LETTER SV;Ll;0;L;;;;;N;;;13D2;;13D2 +ABA3;CHEROKEE SMALL LETTER DA;Ll;0;L;;;;;N;;;13D3;;13D3 +ABA4;CHEROKEE SMALL LETTER TA;Ll;0;L;;;;;N;;;13D4;;13D4 +ABA5;CHEROKEE SMALL LETTER DE;Ll;0;L;;;;;N;;;13D5;;13D5 +ABA6;CHEROKEE SMALL LETTER TE;Ll;0;L;;;;;N;;;13D6;;13D6 +ABA7;CHEROKEE SMALL LETTER DI;Ll;0;L;;;;;N;;;13D7;;13D7 +ABA8;CHEROKEE SMALL LETTER TI;Ll;0;L;;;;;N;;;13D8;;13D8 +ABA9;CHEROKEE SMALL LETTER DO;Ll;0;L;;;;;N;;;13D9;;13D9 +ABAA;CHEROKEE SMALL LETTER DU;Ll;0;L;;;;;N;;;13DA;;13DA +ABAB;CHEROKEE SMALL LETTER DV;Ll;0;L;;;;;N;;;13DB;;13DB +ABAC;CHEROKEE SMALL LETTER DLA;Ll;0;L;;;;;N;;;13DC;;13DC +ABAD;CHEROKEE SMALL LETTER TLA;Ll;0;L;;;;;N;;;13DD;;13DD +ABAE;CHEROKEE SMALL LETTER TLE;Ll;0;L;;;;;N;;;13DE;;13DE +ABAF;CHEROKEE SMALL LETTER TLI;Ll;0;L;;;;;N;;;13DF;;13DF +ABB0;CHEROKEE SMALL LETTER TLO;Ll;0;L;;;;;N;;;13E0;;13E0 +ABB1;CHEROKEE SMALL LETTER TLU;Ll;0;L;;;;;N;;;13E1;;13E1 +ABB2;CHEROKEE SMALL LETTER TLV;Ll;0;L;;;;;N;;;13E2;;13E2 +ABB3;CHEROKEE SMALL LETTER TSA;Ll;0;L;;;;;N;;;13E3;;13E3 +ABB4;CHEROKEE SMALL LETTER TSE;Ll;0;L;;;;;N;;;13E4;;13E4 +ABB5;CHEROKEE SMALL LETTER TSI;Ll;0;L;;;;;N;;;13E5;;13E5 +ABB6;CHEROKEE SMALL LETTER TSO;Ll;0;L;;;;;N;;;13E6;;13E6 +ABB7;CHEROKEE SMALL LETTER TSU;Ll;0;L;;;;;N;;;13E7;;13E7 +ABB8;CHEROKEE SMALL LETTER TSV;Ll;0;L;;;;;N;;;13E8;;13E8 +ABB9;CHEROKEE SMALL LETTER WA;Ll;0;L;;;;;N;;;13E9;;13E9 +ABBA;CHEROKEE SMALL LETTER WE;Ll;0;L;;;;;N;;;13EA;;13EA +ABBB;CHEROKEE SMALL LETTER WI;Ll;0;L;;;;;N;;;13EB;;13EB +ABBC;CHEROKEE SMALL LETTER WO;Ll;0;L;;;;;N;;;13EC;;13EC +ABBD;CHEROKEE SMALL LETTER WU;Ll;0;L;;;;;N;;;13ED;;13ED +ABBE;CHEROKEE SMALL LETTER WV;Ll;0;L;;;;;N;;;13EE;;13EE +ABBF;CHEROKEE SMALL LETTER YA;Ll;0;L;;;;;N;;;13EF;;13EF ABC0;MEETEI MAYEK LETTER KOK;Lo;0;L;;;;;N;;;;; ABC1;MEETEI MAYEK LETTER SAM;Lo;0;L;;;;;N;;;;; ABC2;MEETEI MAYEK LETTER LAI;Lo;0;L;;;;;N;;;;; @@ -15944,6 +16114,8 @@ FE2A;COMBINING TILDE RIGHT HALF BELOW;Mn;220;NSM;;;;;N;;;;; FE2B;COMBINING MACRON LEFT HALF BELOW;Mn;220;NSM;;;;;N;;;;; FE2C;COMBINING MACRON RIGHT HALF BELOW;Mn;220;NSM;;;;;N;;;;; FE2D;COMBINING CONJOINING MACRON BELOW;Mn;220;NSM;;;;;N;;;;; +FE2E;COMBINING CYRILLIC TITLO LEFT HALF;Mn;230;NSM;;;;;N;;;;; +FE2F;COMBINING CYRILLIC TITLO RIGHT HALF;Mn;230;NSM;;;;;N;;;;; FE30;PRESENTATION FORM FOR VERTICAL TWO DOT LEADER;Po;0;ON; 2025;;;;N;GLYPH FOR VERTICAL TWO DOT LEADER;;;; FE31;PRESENTATION FORM FOR VERTICAL EM DASH;Pd;0;ON; 2014;;;;N;GLYPH FOR VERTICAL EM DASH;;;; FE32;PRESENTATION FORM FOR VERTICAL EN DASH;Pd;0;ON; 2013;;;;N;GLYPH FOR VERTICAL EN DASH;;;; @@ -16718,6 +16890,8 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1018A;GREEK ZERO SIGN;No;0;ON;;;;0;N;;;;; 1018B;GREEK ONE QUARTER SIGN;No;0;ON;;;;1/4;N;;;;; 1018C;GREEK SINUSOID SIGN;So;0;ON;;;;;N;;;;; +1018D;GREEK INDICTION SIGN;So;0;L;;;;;N;;;;; +1018E;NOMISMA SIGN;So;0;L;;;;;N;;;;; 10190;ROMAN SEXTANS SIGN;So;0;ON;;;;;N;;;;; 10191;ROMAN UNCIA SIGN;So;0;ON;;;;;N;;;;; 10192;ROMAN SEMUNCIA SIGN;So;0;ON;;;;;N;;;;; @@ -17238,6 +17412,78 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 104A7;OSMANYA DIGIT SEVEN;Nd;0;L;;7;7;7;N;;;;; 104A8;OSMANYA DIGIT EIGHT;Nd;0;L;;8;8;8;N;;;;; 104A9;OSMANYA DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; +104B0;OSAGE CAPITAL LETTER A;Lu;0;L;;;;;N;;;;104D8; +104B1;OSAGE CAPITAL LETTER AI;Lu;0;L;;;;;N;;;;104D9; +104B2;OSAGE CAPITAL LETTER AIN;Lu;0;L;;;;;N;;;;104DA; +104B3;OSAGE CAPITAL LETTER AH;Lu;0;L;;;;;N;;;;104DB; +104B4;OSAGE CAPITAL LETTER BRA;Lu;0;L;;;;;N;;;;104DC; +104B5;OSAGE CAPITAL LETTER CHA;Lu;0;L;;;;;N;;;;104DD; +104B6;OSAGE CAPITAL LETTER EHCHA;Lu;0;L;;;;;N;;;;104DE; +104B7;OSAGE CAPITAL LETTER E;Lu;0;L;;;;;N;;;;104DF; +104B8;OSAGE CAPITAL LETTER EIN;Lu;0;L;;;;;N;;;;104E0; +104B9;OSAGE CAPITAL LETTER HA;Lu;0;L;;;;;N;;;;104E1; +104BA;OSAGE CAPITAL LETTER HYA;Lu;0;L;;;;;N;;;;104E2; +104BB;OSAGE CAPITAL LETTER I;Lu;0;L;;;;;N;;;;104E3; +104BC;OSAGE CAPITAL LETTER KA;Lu;0;L;;;;;N;;;;104E4; +104BD;OSAGE CAPITAL LETTER EHKA;Lu;0;L;;;;;N;;;;104E5; +104BE;OSAGE CAPITAL LETTER KYA;Lu;0;L;;;;;N;;;;104E6; +104BF;OSAGE CAPITAL LETTER LA;Lu;0;L;;;;;N;;;;104E7; +104C0;OSAGE CAPITAL LETTER MA;Lu;0;L;;;;;N;;;;104E8; +104C1;OSAGE CAPITAL LETTER NA;Lu;0;L;;;;;N;;;;104E9; +104C2;OSAGE CAPITAL LETTER O;Lu;0;L;;;;;N;;;;104EA; +104C3;OSAGE CAPITAL LETTER OIN;Lu;0;L;;;;;N;;;;104EB; +104C4;OSAGE CAPITAL LETTER PA;Lu;0;L;;;;;N;;;;104EC; +104C5;OSAGE CAPITAL LETTER EHPA;Lu;0;L;;;;;N;;;;104ED; +104C6;OSAGE CAPITAL LETTER SA;Lu;0;L;;;;;N;;;;104EE; +104C7;OSAGE CAPITAL LETTER SHA;Lu;0;L;;;;;N;;;;104EF; +104C8;OSAGE CAPITAL LETTER TA;Lu;0;L;;;;;N;;;;104F0; +104C9;OSAGE CAPITAL LETTER EHTA;Lu;0;L;;;;;N;;;;104F1; +104CA;OSAGE CAPITAL LETTER TSA;Lu;0;L;;;;;N;;;;104F2; +104CB;OSAGE CAPITAL LETTER EHTSA;Lu;0;L;;;;;N;;;;104F3; +104CC;OSAGE CAPITAL LETTER TSHA;Lu;0;L;;;;;N;;;;104F4; +104CD;OSAGE CAPITAL LETTER DHA;Lu;0;L;;;;;N;;;;104F5; +104CE;OSAGE CAPITAL LETTER U;Lu;0;L;;;;;N;;;;104F6; +104CF;OSAGE CAPITAL LETTER WA;Lu;0;L;;;;;N;;;;104F7; +104D0;OSAGE CAPITAL LETTER KHA;Lu;0;L;;;;;N;;;;104F8; +104D1;OSAGE CAPITAL LETTER GHA;Lu;0;L;;;;;N;;;;104F9; +104D2;OSAGE CAPITAL LETTER ZA;Lu;0;L;;;;;N;;;;104FA; +104D3;OSAGE CAPITAL LETTER ZHA;Lu;0;L;;;;;N;;;;104FB; +104D8;OSAGE SMALL LETTER A;Ll;0;L;;;;;N;;;104B0;;104B0 +104D9;OSAGE SMALL LETTER AI;Ll;0;L;;;;;N;;;104B1;;104B1 +104DA;OSAGE SMALL LETTER AIN;Ll;0;L;;;;;N;;;104B2;;104B2 +104DB;OSAGE SMALL LETTER AH;Ll;0;L;;;;;N;;;104B3;;104B3 +104DC;OSAGE SMALL LETTER BRA;Ll;0;L;;;;;N;;;104B4;;104B4 +104DD;OSAGE SMALL LETTER CHA;Ll;0;L;;;;;N;;;104B5;;104B5 +104DE;OSAGE SMALL LETTER EHCHA;Ll;0;L;;;;;N;;;104B6;;104B6 +104DF;OSAGE SMALL LETTER E;Ll;0;L;;;;;N;;;104B7;;104B7 +104E0;OSAGE SMALL LETTER EIN;Ll;0;L;;;;;N;;;104B8;;104B8 +104E1;OSAGE SMALL LETTER HA;Ll;0;L;;;;;N;;;104B9;;104B9 +104E2;OSAGE SMALL LETTER HYA;Ll;0;L;;;;;N;;;104BA;;104BA +104E3;OSAGE SMALL LETTER I;Ll;0;L;;;;;N;;;104BB;;104BB +104E4;OSAGE SMALL LETTER KA;Ll;0;L;;;;;N;;;104BC;;104BC +104E5;OSAGE SMALL LETTER EHKA;Ll;0;L;;;;;N;;;104BD;;104BD +104E6;OSAGE SMALL LETTER KYA;Ll;0;L;;;;;N;;;104BE;;104BE +104E7;OSAGE SMALL LETTER LA;Ll;0;L;;;;;N;;;104BF;;104BF +104E8;OSAGE SMALL LETTER MA;Ll;0;L;;;;;N;;;104C0;;104C0 +104E9;OSAGE SMALL LETTER NA;Ll;0;L;;;;;N;;;104C1;;104C1 +104EA;OSAGE SMALL LETTER O;Ll;0;L;;;;;N;;;104C2;;104C2 +104EB;OSAGE SMALL LETTER OIN;Ll;0;L;;;;;N;;;104C3;;104C3 +104EC;OSAGE SMALL LETTER PA;Ll;0;L;;;;;N;;;104C4;;104C4 +104ED;OSAGE SMALL LETTER EHPA;Ll;0;L;;;;;N;;;104C5;;104C5 +104EE;OSAGE SMALL LETTER SA;Ll;0;L;;;;;N;;;104C6;;104C6 +104EF;OSAGE SMALL LETTER SHA;Ll;0;L;;;;;N;;;104C7;;104C7 +104F0;OSAGE SMALL LETTER TA;Ll;0;L;;;;;N;;;104C8;;104C8 +104F1;OSAGE SMALL LETTER EHTA;Ll;0;L;;;;;N;;;104C9;;104C9 +104F2;OSAGE SMALL LETTER TSA;Ll;0;L;;;;;N;;;104CA;;104CA +104F3;OSAGE SMALL LETTER EHTSA;Ll;0;L;;;;;N;;;104CB;;104CB +104F4;OSAGE SMALL LETTER TSHA;Ll;0;L;;;;;N;;;104CC;;104CC +104F5;OSAGE SMALL LETTER DHA;Ll;0;L;;;;;N;;;104CD;;104CD +104F6;OSAGE SMALL LETTER U;Ll;0;L;;;;;N;;;104CE;;104CE +104F7;OSAGE SMALL LETTER WA;Ll;0;L;;;;;N;;;104CF;;104CF +104F8;OSAGE SMALL LETTER KHA;Ll;0;L;;;;;N;;;104D0;;104D0 +104F9;OSAGE SMALL LETTER GHA;Ll;0;L;;;;;N;;;104D1;;104D1 +104FA;OSAGE SMALL LETTER ZA;Ll;0;L;;;;;N;;;104D2;;104D2 +104FB;OSAGE SMALL LETTER ZHA;Ll;0;L;;;;;N;;;104D3;;104D3 10500;ELBASAN LETTER A;Lo;0;L;;;;;N;;;;; 10501;ELBASAN LETTER BE;Lo;0;L;;;;;N;;;;; 10502;ELBASAN LETTER CE;Lo;0;L;;;;;N;;;;; @@ -17830,6 +18076,32 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 108AD;NABATAEAN NUMBER TEN;No;0;R;;;;10;N;;;;; 108AE;NABATAEAN NUMBER TWENTY;No;0;R;;;;20;N;;;;; 108AF;NABATAEAN NUMBER ONE HUNDRED;No;0;R;;;;100;N;;;;; +108E0;HATRAN LETTER ALEPH;Lo;0;R;;;;;N;;;;; +108E1;HATRAN LETTER BETH;Lo;0;R;;;;;N;;;;; +108E2;HATRAN LETTER GIMEL;Lo;0;R;;;;;N;;;;; +108E3;HATRAN LETTER DALETH-RESH;Lo;0;R;;;;;N;;;;; +108E4;HATRAN LETTER HE;Lo;0;R;;;;;N;;;;; +108E5;HATRAN LETTER WAW;Lo;0;R;;;;;N;;;;; +108E6;HATRAN LETTER ZAYN;Lo;0;R;;;;;N;;;;; +108E7;HATRAN LETTER HETH;Lo;0;R;;;;;N;;;;; +108E8;HATRAN LETTER TETH;Lo;0;R;;;;;N;;;;; +108E9;HATRAN LETTER YODH;Lo;0;R;;;;;N;;;;; +108EA;HATRAN LETTER KAPH;Lo;0;R;;;;;N;;;;; +108EB;HATRAN LETTER LAMEDH;Lo;0;R;;;;;N;;;;; +108EC;HATRAN LETTER MEM;Lo;0;R;;;;;N;;;;; +108ED;HATRAN LETTER NUN;Lo;0;R;;;;;N;;;;; +108EE;HATRAN LETTER SAMEKH;Lo;0;R;;;;;N;;;;; +108EF;HATRAN LETTER AYN;Lo;0;R;;;;;N;;;;; +108F0;HATRAN LETTER PE;Lo;0;R;;;;;N;;;;; +108F1;HATRAN LETTER SADHE;Lo;0;R;;;;;N;;;;; +108F2;HATRAN LETTER QOPH;Lo;0;R;;;;;N;;;;; +108F4;HATRAN LETTER SHIN;Lo;0;R;;;;;N;;;;; +108F5;HATRAN LETTER TAW;Lo;0;R;;;;;N;;;;; +108FB;HATRAN NUMBER ONE;No;0;R;;;;1;N;;;;; +108FC;HATRAN NUMBER FIVE;No;0;R;;;;5;N;;;;; +108FD;HATRAN NUMBER TEN;No;0;R;;;;10;N;;;;; +108FE;HATRAN NUMBER TWENTY;No;0;R;;;;20;N;;;;; +108FF;HATRAN NUMBER ONE HUNDRED;No;0;R;;;;100;N;;;;; 10900;PHOENICIAN LETTER ALF;Lo;0;R;;;;;N;;;;; 10901;PHOENICIAN LETTER BET;Lo;0;R;;;;;N;;;;; 10902;PHOENICIAN LETTER GAML;Lo;0;R;;;;;N;;;;; @@ -17942,8 +18214,72 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 109B5;MEROITIC CURSIVE LETTER TE;Lo;0;R;;;;;N;;;;; 109B6;MEROITIC CURSIVE LETTER TO;Lo;0;R;;;;;N;;;;; 109B7;MEROITIC CURSIVE LETTER DA;Lo;0;R;;;;;N;;;;; +109BC;MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS;No;0;R;;;;11/12;N;;;;; +109BD;MEROITIC CURSIVE FRACTION ONE HALF;No;0;R;;;;1/2;N;;;;; 109BE;MEROITIC CURSIVE LOGOGRAM RMT;Lo;0;R;;;;;N;;;;; 109BF;MEROITIC CURSIVE LOGOGRAM IMN;Lo;0;R;;;;;N;;;;; +109C0;MEROITIC CURSIVE NUMBER ONE;No;0;R;;;;1;N;;;;; +109C1;MEROITIC CURSIVE NUMBER TWO;No;0;R;;;;2;N;;;;; +109C2;MEROITIC CURSIVE NUMBER THREE;No;0;R;;;;3;N;;;;; +109C3;MEROITIC CURSIVE NUMBER FOUR;No;0;R;;;;4;N;;;;; +109C4;MEROITIC CURSIVE NUMBER FIVE;No;0;R;;;;5;N;;;;; +109C5;MEROITIC CURSIVE NUMBER SIX;No;0;R;;;;6;N;;;;; +109C6;MEROITIC CURSIVE NUMBER SEVEN;No;0;R;;;;7;N;;;;; +109C7;MEROITIC CURSIVE NUMBER EIGHT;No;0;R;;;;8;N;;;;; +109C8;MEROITIC CURSIVE NUMBER NINE;No;0;R;;;;9;N;;;;; +109C9;MEROITIC CURSIVE NUMBER TEN;No;0;R;;;;10;N;;;;; +109CA;MEROITIC CURSIVE NUMBER TWENTY;No;0;R;;;;20;N;;;;; +109CB;MEROITIC CURSIVE NUMBER THIRTY;No;0;R;;;;30;N;;;;; +109CC;MEROITIC CURSIVE NUMBER FORTY;No;0;R;;;;40;N;;;;; +109CD;MEROITIC CURSIVE NUMBER FIFTY;No;0;R;;;;50;N;;;;; +109CE;MEROITIC CURSIVE NUMBER SIXTY;No;0;R;;;;60;N;;;;; +109CF;MEROITIC CURSIVE NUMBER SEVENTY;No;0;R;;;;70;N;;;;; +109D2;MEROITIC CURSIVE NUMBER ONE HUNDRED;No;0;R;;;;100;N;;;;; +109D3;MEROITIC CURSIVE NUMBER TWO HUNDRED;No;0;R;;;;200;N;;;;; +109D4;MEROITIC CURSIVE NUMBER THREE HUNDRED;No;0;R;;;;300;N;;;;; +109D5;MEROITIC CURSIVE NUMBER FOUR HUNDRED;No;0;R;;;;400;N;;;;; +109D6;MEROITIC CURSIVE NUMBER FIVE HUNDRED;No;0;R;;;;500;N;;;;; +109D7;MEROITIC CURSIVE NUMBER SIX HUNDRED;No;0;R;;;;600;N;;;;; +109D8;MEROITIC CURSIVE NUMBER SEVEN HUNDRED;No;0;R;;;;700;N;;;;; +109D9;MEROITIC CURSIVE NUMBER EIGHT HUNDRED;No;0;R;;;;800;N;;;;; +109DA;MEROITIC CURSIVE NUMBER NINE HUNDRED;No;0;R;;;;900;N;;;;; +109DB;MEROITIC CURSIVE NUMBER ONE THOUSAND;No;0;R;;;;1000;N;;;;; +109DC;MEROITIC CURSIVE NUMBER TWO THOUSAND;No;0;R;;;;2000;N;;;;; +109DD;MEROITIC CURSIVE NUMBER THREE THOUSAND;No;0;R;;;;3000;N;;;;; +109DE;MEROITIC CURSIVE NUMBER FOUR THOUSAND;No;0;R;;;;4000;N;;;;; +109DF;MEROITIC CURSIVE NUMBER FIVE THOUSAND;No;0;R;;;;5000;N;;;;; +109E0;MEROITIC CURSIVE NUMBER SIX THOUSAND;No;0;R;;;;6000;N;;;;; +109E1;MEROITIC CURSIVE NUMBER SEVEN THOUSAND;No;0;R;;;;7000;N;;;;; +109E2;MEROITIC CURSIVE NUMBER EIGHT THOUSAND;No;0;R;;;;8000;N;;;;; +109E3;MEROITIC CURSIVE NUMBER NINE THOUSAND;No;0;R;;;;9000;N;;;;; +109E4;MEROITIC CURSIVE NUMBER TEN THOUSAND;No;0;R;;;;10000;N;;;;; +109E5;MEROITIC CURSIVE NUMBER TWENTY THOUSAND;No;0;R;;;;20000;N;;;;; +109E6;MEROITIC CURSIVE NUMBER THIRTY THOUSAND;No;0;R;;;;30000;N;;;;; +109E7;MEROITIC CURSIVE NUMBER FORTY THOUSAND;No;0;R;;;;40000;N;;;;; +109E8;MEROITIC CURSIVE NUMBER FIFTY THOUSAND;No;0;R;;;;50000;N;;;;; +109E9;MEROITIC CURSIVE NUMBER SIXTY THOUSAND;No;0;R;;;;60000;N;;;;; +109EA;MEROITIC CURSIVE NUMBER SEVENTY THOUSAND;No;0;R;;;;70000;N;;;;; +109EB;MEROITIC CURSIVE NUMBER EIGHTY THOUSAND;No;0;R;;;;80000;N;;;;; +109EC;MEROITIC CURSIVE NUMBER NINETY THOUSAND;No;0;R;;;;90000;N;;;;; +109ED;MEROITIC CURSIVE NUMBER ONE HUNDRED THOUSAND;No;0;R;;;;100000;N;;;;; +109EE;MEROITIC CURSIVE NUMBER TWO HUNDRED THOUSAND;No;0;R;;;;200000;N;;;;; +109EF;MEROITIC CURSIVE NUMBER THREE HUNDRED THOUSAND;No;0;R;;;;300000;N;;;;; +109F0;MEROITIC CURSIVE NUMBER FOUR HUNDRED THOUSAND;No;0;R;;;;400000;N;;;;; +109F1;MEROITIC CURSIVE NUMBER FIVE HUNDRED THOUSAND;No;0;R;;;;500000;N;;;;; +109F2;MEROITIC CURSIVE NUMBER SIX HUNDRED THOUSAND;No;0;R;;;;600000;N;;;;; +109F3;MEROITIC CURSIVE NUMBER SEVEN HUNDRED THOUSAND;No;0;R;;;;700000;N;;;;; +109F4;MEROITIC CURSIVE NUMBER EIGHT HUNDRED THOUSAND;No;0;R;;;;800000;N;;;;; +109F5;MEROITIC CURSIVE NUMBER NINE HUNDRED THOUSAND;No;0;R;;;;900000;N;;;;; +109F6;MEROITIC CURSIVE FRACTION ONE TWELFTH;No;0;R;;;;1/12;N;;;;; +109F7;MEROITIC CURSIVE FRACTION TWO TWELFTHS;No;0;R;;;;2/12;N;;;;; +109F8;MEROITIC CURSIVE FRACTION THREE TWELFTHS;No;0;R;;;;3/12;N;;;;; +109F9;MEROITIC CURSIVE FRACTION FOUR TWELFTHS;No;0;R;;;;4/12;N;;;;; +109FA;MEROITIC CURSIVE FRACTION FIVE TWELFTHS;No;0;R;;;;5/12;N;;;;; +109FB;MEROITIC CURSIVE FRACTION SIX TWELFTHS;No;0;R;;;;6/12;N;;;;; +109FC;MEROITIC CURSIVE FRACTION SEVEN TWELFTHS;No;0;R;;;;7/12;N;;;;; +109FD;MEROITIC CURSIVE FRACTION EIGHT TWELFTHS;No;0;R;;;;8/12;N;;;;; +109FE;MEROITIC CURSIVE FRACTION NINE TWELFTHS;No;0;R;;;;9/12;N;;;;; +109FF;MEROITIC CURSIVE FRACTION TEN TWELFTHS;No;0;R;;;;10/12;N;;;;; 10A00;KHAROSHTHI LETTER A;Lo;0;R;;;;;N;;;;; 10A01;KHAROSHTHI VOWEL SIGN I;Mn;0;NSM;;;;;N;;;;; 10A02;KHAROSHTHI VOWEL SIGN U;Mn;0;NSM;;;;;N;;;;; @@ -18344,6 +18680,114 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 10C46;OLD TURKIC LETTER YENISEI AET;Lo;0;R;;;;;N;;;;; 10C47;OLD TURKIC LETTER ORKHON OT;Lo;0;R;;;;;N;;;;; 10C48;OLD TURKIC LETTER ORKHON BASH;Lo;0;R;;;;;N;;;;; +10C80;OLD HUNGARIAN CAPITAL LETTER A;Lu;0;R;;;;;N;;;;10CC0; +10C81;OLD HUNGARIAN CAPITAL LETTER AA;Lu;0;R;;;;;N;;;;10CC1; +10C82;OLD HUNGARIAN CAPITAL LETTER EB;Lu;0;R;;;;;N;;;;10CC2; +10C83;OLD HUNGARIAN CAPITAL LETTER AMB;Lu;0;R;;;;;N;;;;10CC3; +10C84;OLD HUNGARIAN CAPITAL LETTER EC;Lu;0;R;;;;;N;;;;10CC4; +10C85;OLD HUNGARIAN CAPITAL LETTER ENC;Lu;0;R;;;;;N;;;;10CC5; +10C86;OLD HUNGARIAN CAPITAL LETTER ECS;Lu;0;R;;;;;N;;;;10CC6; +10C87;OLD HUNGARIAN CAPITAL LETTER ED;Lu;0;R;;;;;N;;;;10CC7; +10C88;OLD HUNGARIAN CAPITAL LETTER AND;Lu;0;R;;;;;N;;;;10CC8; +10C89;OLD HUNGARIAN CAPITAL LETTER E;Lu;0;R;;;;;N;;;;10CC9; +10C8A;OLD HUNGARIAN CAPITAL LETTER CLOSE E;Lu;0;R;;;;;N;;;;10CCA; +10C8B;OLD HUNGARIAN CAPITAL LETTER EE;Lu;0;R;;;;;N;;;;10CCB; +10C8C;OLD HUNGARIAN CAPITAL LETTER EF;Lu;0;R;;;;;N;;;;10CCC; +10C8D;OLD HUNGARIAN CAPITAL LETTER EG;Lu;0;R;;;;;N;;;;10CCD; +10C8E;OLD HUNGARIAN CAPITAL LETTER EGY;Lu;0;R;;;;;N;;;;10CCE; +10C8F;OLD HUNGARIAN CAPITAL LETTER EH;Lu;0;R;;;;;N;;;;10CCF; +10C90;OLD HUNGARIAN CAPITAL LETTER I;Lu;0;R;;;;;N;;;;10CD0; +10C91;OLD HUNGARIAN CAPITAL LETTER II;Lu;0;R;;;;;N;;;;10CD1; +10C92;OLD HUNGARIAN CAPITAL LETTER EJ;Lu;0;R;;;;;N;;;;10CD2; +10C93;OLD HUNGARIAN CAPITAL LETTER EK;Lu;0;R;;;;;N;;;;10CD3; +10C94;OLD HUNGARIAN CAPITAL LETTER AK;Lu;0;R;;;;;N;;;;10CD4; +10C95;OLD HUNGARIAN CAPITAL LETTER UNK;Lu;0;R;;;;;N;;;;10CD5; +10C96;OLD HUNGARIAN CAPITAL LETTER EL;Lu;0;R;;;;;N;;;;10CD6; +10C97;OLD HUNGARIAN CAPITAL LETTER ELY;Lu;0;R;;;;;N;;;;10CD7; +10C98;OLD HUNGARIAN CAPITAL LETTER EM;Lu;0;R;;;;;N;;;;10CD8; +10C99;OLD HUNGARIAN CAPITAL LETTER EN;Lu;0;R;;;;;N;;;;10CD9; +10C9A;OLD HUNGARIAN CAPITAL LETTER ENY;Lu;0;R;;;;;N;;;;10CDA; +10C9B;OLD HUNGARIAN CAPITAL LETTER O;Lu;0;R;;;;;N;;;;10CDB; +10C9C;OLD HUNGARIAN CAPITAL LETTER OO;Lu;0;R;;;;;N;;;;10CDC; +10C9D;OLD HUNGARIAN CAPITAL LETTER NIKOLSBURG OE;Lu;0;R;;;;;N;;;;10CDD; +10C9E;OLD HUNGARIAN CAPITAL LETTER RUDIMENTA OE;Lu;0;R;;;;;N;;;;10CDE; +10C9F;OLD HUNGARIAN CAPITAL LETTER OEE;Lu;0;R;;;;;N;;;;10CDF; +10CA0;OLD HUNGARIAN CAPITAL LETTER EP;Lu;0;R;;;;;N;;;;10CE0; +10CA1;OLD HUNGARIAN CAPITAL LETTER EMP;Lu;0;R;;;;;N;;;;10CE1; +10CA2;OLD HUNGARIAN CAPITAL LETTER ER;Lu;0;R;;;;;N;;;;10CE2; +10CA3;OLD HUNGARIAN CAPITAL LETTER SHORT ER;Lu;0;R;;;;;N;;;;10CE3; +10CA4;OLD HUNGARIAN CAPITAL LETTER ES;Lu;0;R;;;;;N;;;;10CE4; +10CA5;OLD HUNGARIAN CAPITAL LETTER ESZ;Lu;0;R;;;;;N;;;;10CE5; +10CA6;OLD HUNGARIAN CAPITAL LETTER ET;Lu;0;R;;;;;N;;;;10CE6; +10CA7;OLD HUNGARIAN CAPITAL LETTER ENT;Lu;0;R;;;;;N;;;;10CE7; +10CA8;OLD HUNGARIAN CAPITAL LETTER ETY;Lu;0;R;;;;;N;;;;10CE8; +10CA9;OLD HUNGARIAN CAPITAL LETTER ECH;Lu;0;R;;;;;N;;;;10CE9; +10CAA;OLD HUNGARIAN CAPITAL LETTER U;Lu;0;R;;;;;N;;;;10CEA; +10CAB;OLD HUNGARIAN CAPITAL LETTER UU;Lu;0;R;;;;;N;;;;10CEB; +10CAC;OLD HUNGARIAN CAPITAL LETTER NIKOLSBURG UE;Lu;0;R;;;;;N;;;;10CEC; +10CAD;OLD HUNGARIAN CAPITAL LETTER RUDIMENTA UE;Lu;0;R;;;;;N;;;;10CED; +10CAE;OLD HUNGARIAN CAPITAL LETTER EV;Lu;0;R;;;;;N;;;;10CEE; +10CAF;OLD HUNGARIAN CAPITAL LETTER EZ;Lu;0;R;;;;;N;;;;10CEF; +10CB0;OLD HUNGARIAN CAPITAL LETTER EZS;Lu;0;R;;;;;N;;;;10CF0; +10CB1;OLD HUNGARIAN CAPITAL LETTER ENT-SHAPED SIGN;Lu;0;R;;;;;N;;;;10CF1; +10CB2;OLD HUNGARIAN CAPITAL LETTER US;Lu;0;R;;;;;N;;;;10CF2; +10CC0;OLD HUNGARIAN SMALL LETTER A;Ll;0;R;;;;;N;;;10C80;;10C80 +10CC1;OLD HUNGARIAN SMALL LETTER AA;Ll;0;R;;;;;N;;;10C81;;10C81 +10CC2;OLD HUNGARIAN SMALL LETTER EB;Ll;0;R;;;;;N;;;10C82;;10C82 +10CC3;OLD HUNGARIAN SMALL LETTER AMB;Ll;0;R;;;;;N;;;10C83;;10C83 +10CC4;OLD HUNGARIAN SMALL LETTER EC;Ll;0;R;;;;;N;;;10C84;;10C84 +10CC5;OLD HUNGARIAN SMALL LETTER ENC;Ll;0;R;;;;;N;;;10C85;;10C85 +10CC6;OLD HUNGARIAN SMALL LETTER ECS;Ll;0;R;;;;;N;;;10C86;;10C86 +10CC7;OLD HUNGARIAN SMALL LETTER ED;Ll;0;R;;;;;N;;;10C87;;10C87 +10CC8;OLD HUNGARIAN SMALL LETTER AND;Ll;0;R;;;;;N;;;10C88;;10C88 +10CC9;OLD HUNGARIAN SMALL LETTER E;Ll;0;R;;;;;N;;;10C89;;10C89 +10CCA;OLD HUNGARIAN SMALL LETTER CLOSE E;Ll;0;R;;;;;N;;;10C8A;;10C8A +10CCB;OLD HUNGARIAN SMALL LETTER EE;Ll;0;R;;;;;N;;;10C8B;;10C8B +10CCC;OLD HUNGARIAN SMALL LETTER EF;Ll;0;R;;;;;N;;;10C8C;;10C8C +10CCD;OLD HUNGARIAN SMALL LETTER EG;Ll;0;R;;;;;N;;;10C8D;;10C8D +10CCE;OLD HUNGARIAN SMALL LETTER EGY;Ll;0;R;;;;;N;;;10C8E;;10C8E +10CCF;OLD HUNGARIAN SMALL LETTER EH;Ll;0;R;;;;;N;;;10C8F;;10C8F +10CD0;OLD HUNGARIAN SMALL LETTER I;Ll;0;R;;;;;N;;;10C90;;10C90 +10CD1;OLD HUNGARIAN SMALL LETTER II;Ll;0;R;;;;;N;;;10C91;;10C91 +10CD2;OLD HUNGARIAN SMALL LETTER EJ;Ll;0;R;;;;;N;;;10C92;;10C92 +10CD3;OLD HUNGARIAN SMALL LETTER EK;Ll;0;R;;;;;N;;;10C93;;10C93 +10CD4;OLD HUNGARIAN SMALL LETTER AK;Ll;0;R;;;;;N;;;10C94;;10C94 +10CD5;OLD HUNGARIAN SMALL LETTER UNK;Ll;0;R;;;;;N;;;10C95;;10C95 +10CD6;OLD HUNGARIAN SMALL LETTER EL;Ll;0;R;;;;;N;;;10C96;;10C96 +10CD7;OLD HUNGARIAN SMALL LETTER ELY;Ll;0;R;;;;;N;;;10C97;;10C97 +10CD8;OLD HUNGARIAN SMALL LETTER EM;Ll;0;R;;;;;N;;;10C98;;10C98 +10CD9;OLD HUNGARIAN SMALL LETTER EN;Ll;0;R;;;;;N;;;10C99;;10C99 +10CDA;OLD HUNGARIAN SMALL LETTER ENY;Ll;0;R;;;;;N;;;10C9A;;10C9A +10CDB;OLD HUNGARIAN SMALL LETTER O;Ll;0;R;;;;;N;;;10C9B;;10C9B +10CDC;OLD HUNGARIAN SMALL LETTER OO;Ll;0;R;;;;;N;;;10C9C;;10C9C +10CDD;OLD HUNGARIAN SMALL LETTER NIKOLSBURG OE;Ll;0;R;;;;;N;;;10C9D;;10C9D +10CDE;OLD HUNGARIAN SMALL LETTER RUDIMENTA OE;Ll;0;R;;;;;N;;;10C9E;;10C9E +10CDF;OLD HUNGARIAN SMALL LETTER OEE;Ll;0;R;;;;;N;;;10C9F;;10C9F +10CE0;OLD HUNGARIAN SMALL LETTER EP;Ll;0;R;;;;;N;;;10CA0;;10CA0 +10CE1;OLD HUNGARIAN SMALL LETTER EMP;Ll;0;R;;;;;N;;;10CA1;;10CA1 +10CE2;OLD HUNGARIAN SMALL LETTER ER;Ll;0;R;;;;;N;;;10CA2;;10CA2 +10CE3;OLD HUNGARIAN SMALL LETTER SHORT ER;Ll;0;R;;;;;N;;;10CA3;;10CA3 +10CE4;OLD HUNGARIAN SMALL LETTER ES;Ll;0;R;;;;;N;;;10CA4;;10CA4 +10CE5;OLD HUNGARIAN SMALL LETTER ESZ;Ll;0;R;;;;;N;;;10CA5;;10CA5 +10CE6;OLD HUNGARIAN SMALL LETTER ET;Ll;0;R;;;;;N;;;10CA6;;10CA6 +10CE7;OLD HUNGARIAN SMALL LETTER ENT;Ll;0;R;;;;;N;;;10CA7;;10CA7 +10CE8;OLD HUNGARIAN SMALL LETTER ETY;Ll;0;R;;;;;N;;;10CA8;;10CA8 +10CE9;OLD HUNGARIAN SMALL LETTER ECH;Ll;0;R;;;;;N;;;10CA9;;10CA9 +10CEA;OLD HUNGARIAN SMALL LETTER U;Ll;0;R;;;;;N;;;10CAA;;10CAA +10CEB;OLD HUNGARIAN SMALL LETTER UU;Ll;0;R;;;;;N;;;10CAB;;10CAB +10CEC;OLD HUNGARIAN SMALL LETTER NIKOLSBURG UE;Ll;0;R;;;;;N;;;10CAC;;10CAC +10CED;OLD HUNGARIAN SMALL LETTER RUDIMENTA UE;Ll;0;R;;;;;N;;;10CAD;;10CAD +10CEE;OLD HUNGARIAN SMALL LETTER EV;Ll;0;R;;;;;N;;;10CAE;;10CAE +10CEF;OLD HUNGARIAN SMALL LETTER EZ;Ll;0;R;;;;;N;;;10CAF;;10CAF +10CF0;OLD HUNGARIAN SMALL LETTER EZS;Ll;0;R;;;;;N;;;10CB0;;10CB0 +10CF1;OLD HUNGARIAN SMALL LETTER ENT-SHAPED SIGN;Ll;0;R;;;;;N;;;10CB1;;10CB1 +10CF2;OLD HUNGARIAN SMALL LETTER US;Ll;0;R;;;;;N;;;10CB2;;10CB2 +10CFA;OLD HUNGARIAN NUMBER ONE;No;0;R;;;;1;N;;;;; +10CFB;OLD HUNGARIAN NUMBER FIVE;No;0;R;;;;5;N;;;;; +10CFC;OLD HUNGARIAN NUMBER TEN;No;0;R;;;;10;N;;;;; +10CFD;OLD HUNGARIAN NUMBER FIFTY;No;0;R;;;;50;N;;;;; +10CFE;OLD HUNGARIAN NUMBER ONE HUNDRED;No;0;R;;;;100;N;;;;; +10CFF;OLD HUNGARIAN NUMBER ONE THOUSAND;No;0;R;;;;1000;N;;;;; 10E60;RUMI DIGIT ONE;No;0;AN;;;1;1;N;;;;; 10E61;RUMI DIGIT TWO;No;0;AN;;;2;2;N;;;;; 10E62;RUMI DIGIT THREE;No;0;AN;;;3;3;N;;;;; @@ -18764,6 +19208,10 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 111C6;SHARADA DOUBLE DANDA;Po;0;L;;;;;N;;;;; 111C7;SHARADA ABBREVIATION SIGN;Po;0;L;;;;;N;;;;; 111C8;SHARADA SEPARATOR;Po;0;L;;;;;N;;;;; +111C9;SHARADA SANDHI MARK;Po;0;L;;;;;N;;;;; +111CA;SHARADA SIGN NUKTA;Mn;7;NSM;;;;;N;;;;; +111CB;SHARADA VOWEL MODIFIER MARK;Mn;0;NSM;;;;;N;;;;; +111CC;SHARADA EXTRA SHORT VOWEL MARK;Mn;0;NSM;;;;;N;;;;; 111CD;SHARADA SUTRA MARK;Po;0;L;;;;;N;;;;; 111D0;SHARADA DIGIT ZERO;Nd;0;L;;0;0;0;N;;;;; 111D1;SHARADA DIGIT ONE;Nd;0;L;;1;1;1;N;;;;; @@ -18776,6 +19224,11 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 111D8;SHARADA DIGIT EIGHT;Nd;0;L;;8;8;8;N;;;;; 111D9;SHARADA DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; 111DA;SHARADA EKAM;Lo;0;L;;;;;N;;;;; +111DB;SHARADA SIGN SIDDHAM;Po;0;L;;;;;N;;;;; +111DC;SHARADA HEADSTROKE;Lo;0;L;;;;;N;;;;; +111DD;SHARADA CONTINUATION SIGN;Po;0;L;;;;;N;;;;; +111DE;SHARADA SECTION MARK-1;Po;0;L;;;;;N;;;;; +111DF;SHARADA SECTION MARK-2;Po;0;L;;;;;N;;;;; 111E1;SINHALA ARCHAIC DIGIT ONE;No;0;L;;;;1;N;;;;; 111E2;SINHALA ARCHAIC DIGIT TWO;No;0;L;;;;2;N;;;;; 111E3;SINHALA ARCHAIC DIGIT THREE;No;0;L;;;;3;N;;;;; @@ -18857,6 +19310,45 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1123B;KHOJKI SECTION MARK;Po;0;L;;;;;N;;;;; 1123C;KHOJKI DOUBLE SECTION MARK;Po;0;L;;;;;N;;;;; 1123D;KHOJKI ABBREVIATION SIGN;Po;0;L;;;;;N;;;;; +1123E;KHOJKI SIGN SUKUN;Mn;0;NSM;;;;;N;;;;; +11280;MULTANI LETTER A;Lo;0;L;;;;;N;;;;; +11281;MULTANI LETTER I;Lo;0;L;;;;;N;;;;; +11282;MULTANI LETTER U;Lo;0;L;;;;;N;;;;; +11283;MULTANI LETTER E;Lo;0;L;;;;;N;;;;; +11284;MULTANI LETTER KA;Lo;0;L;;;;;N;;;;; +11285;MULTANI LETTER KHA;Lo;0;L;;;;;N;;;;; +11286;MULTANI LETTER GA;Lo;0;L;;;;;N;;;;; +11288;MULTANI LETTER GHA;Lo;0;L;;;;;N;;;;; +1128A;MULTANI LETTER CA;Lo;0;L;;;;;N;;;;; +1128B;MULTANI LETTER CHA;Lo;0;L;;;;;N;;;;; +1128C;MULTANI LETTER JA;Lo;0;L;;;;;N;;;;; +1128D;MULTANI LETTER JJA;Lo;0;L;;;;;N;;;;; +1128F;MULTANI LETTER NYA;Lo;0;L;;;;;N;;;;; +11290;MULTANI LETTER TTA;Lo;0;L;;;;;N;;;;; +11291;MULTANI LETTER TTHA;Lo;0;L;;;;;N;;;;; +11292;MULTANI LETTER DDA;Lo;0;L;;;;;N;;;;; +11293;MULTANI LETTER DDDA;Lo;0;L;;;;;N;;;;; +11294;MULTANI LETTER DDHA;Lo;0;L;;;;;N;;;;; +11295;MULTANI LETTER NNA;Lo;0;L;;;;;N;;;;; +11296;MULTANI LETTER TA;Lo;0;L;;;;;N;;;;; +11297;MULTANI LETTER THA;Lo;0;L;;;;;N;;;;; +11298;MULTANI LETTER DA;Lo;0;L;;;;;N;;;;; +11299;MULTANI LETTER DHA;Lo;0;L;;;;;N;;;;; +1129A;MULTANI LETTER NA;Lo;0;L;;;;;N;;;;; +1129B;MULTANI LETTER PA;Lo;0;L;;;;;N;;;;; +1129C;MULTANI LETTER PHA;Lo;0;L;;;;;N;;;;; +1129D;MULTANI LETTER BA;Lo;0;L;;;;;N;;;;; +1129F;MULTANI LETTER BHA;Lo;0;L;;;;;N;;;;; +112A0;MULTANI LETTER MA;Lo;0;L;;;;;N;;;;; +112A1;MULTANI LETTER YA;Lo;0;L;;;;;N;;;;; +112A2;MULTANI LETTER RA;Lo;0;L;;;;;N;;;;; +112A3;MULTANI LETTER LA;Lo;0;L;;;;;N;;;;; +112A4;MULTANI LETTER VA;Lo;0;L;;;;;N;;;;; +112A5;MULTANI LETTER SA;Lo;0;L;;;;;N;;;;; +112A6;MULTANI LETTER HA;Lo;0;L;;;;;N;;;;; +112A7;MULTANI LETTER RRA;Lo;0;L;;;;;N;;;;; +112A8;MULTANI LETTER RHA;Lo;0;L;;;;;N;;;;; +112A9;MULTANI SECTION MARK;Po;0;L;;;;;N;;;;; 112B0;KHUDAWADI LETTER A;Lo;0;L;;;;;N;;;;; 112B1;KHUDAWADI LETTER AA;Lo;0;L;;;;;N;;;;; 112B2;KHUDAWADI LETTER I;Lo;0;L;;;;;N;;;;; @@ -18926,6 +19418,7 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 112F7;KHUDAWADI DIGIT SEVEN;Nd;0;L;;7;7;7;N;;;;; 112F8;KHUDAWADI DIGIT EIGHT;Nd;0;L;;8;8;8;N;;;;; 112F9;KHUDAWADI DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; +11300;GRANTHA SIGN COMBINING ANUSVARA ABOVE;Mn;0;NSM;;;;;N;;;;; 11301;GRANTHA SIGN CANDRABINDU;Mn;0;NSM;;;;;N;;;;; 11302;GRANTHA SIGN ANUSVARA;Mc;0;L;;;;;N;;;;; 11303;GRANTHA SIGN VISARGA;Mc;0;L;;;;;N;;;;; @@ -18989,6 +19482,7 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1134B;GRANTHA VOWEL SIGN OO;Mc;0;L;11347 1133E;;;;N;;;;; 1134C;GRANTHA VOWEL SIGN AU;Mc;0;L;11347 11357;;;;N;;;;; 1134D;GRANTHA SIGN VIRAMA;Mc;9;L;;;;;N;;;;; +11350;GRANTHA OM;Lo;0;L;;;;;N;;;;; 11357;GRANTHA AU LENGTH MARK;Mc;0;L;;;;;N;;;;; 1135D;GRANTHA SIGN PLUTA;Lo;0;L;;;;;N;;;;; 1135E;GRANTHA LETTER VEDIC ANUSVARA;Lo;0;L;;;;;N;;;;; @@ -19009,6 +19503,98 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 11372;COMBINING GRANTHA LETTER NA;Mn;230;NSM;;;;;N;;;;; 11373;COMBINING GRANTHA LETTER VI;Mn;230;NSM;;;;;N;;;;; 11374;COMBINING GRANTHA LETTER PA;Mn;230;NSM;;;;;N;;;;; +11400;NEWA LETTER A;Lo;0;L;;;;;N;;;;; +11401;NEWA LETTER AA;Lo;0;L;;;;;N;;;;; +11402;NEWA LETTER I;Lo;0;L;;;;;N;;;;; +11403;NEWA LETTER II;Lo;0;L;;;;;N;;;;; +11404;NEWA LETTER U;Lo;0;L;;;;;N;;;;; +11405;NEWA LETTER UU;Lo;0;L;;;;;N;;;;; +11406;NEWA LETTER VOCALIC R;Lo;0;L;;;;;N;;;;; +11407;NEWA LETTER VOCALIC RR;Lo;0;L;;;;;N;;;;; +11408;NEWA LETTER VOCALIC L;Lo;0;L;;;;;N;;;;; +11409;NEWA LETTER VOCALIC LL;Lo;0;L;;;;;N;;;;; +1140A;NEWA LETTER E;Lo;0;L;;;;;N;;;;; +1140B;NEWA LETTER AI;Lo;0;L;;;;;N;;;;; +1140C;NEWA LETTER O;Lo;0;L;;;;;N;;;;; +1140D;NEWA LETTER AU;Lo;0;L;;;;;N;;;;; +1140E;NEWA LETTER KA;Lo;0;L;;;;;N;;;;; +1140F;NEWA LETTER KHA;Lo;0;L;;;;;N;;;;; +11410;NEWA LETTER GA;Lo;0;L;;;;;N;;;;; +11411;NEWA LETTER GHA;Lo;0;L;;;;;N;;;;; +11412;NEWA LETTER NGA;Lo;0;L;;;;;N;;;;; +11413;NEWA LETTER NGHA;Lo;0;L;;;;;N;;;;; +11414;NEWA LETTER CA;Lo;0;L;;;;;N;;;;; +11415;NEWA LETTER CHA;Lo;0;L;;;;;N;;;;; +11416;NEWA LETTER JA;Lo;0;L;;;;;N;;;;; +11417;NEWA LETTER JHA;Lo;0;L;;;;;N;;;;; +11418;NEWA LETTER NYA;Lo;0;L;;;;;N;;;;; +11419;NEWA LETTER NYHA;Lo;0;L;;;;;N;;;;; +1141A;NEWA LETTER TTA;Lo;0;L;;;;;N;;;;; +1141B;NEWA LETTER TTHA;Lo;0;L;;;;;N;;;;; +1141C;NEWA LETTER DDA;Lo;0;L;;;;;N;;;;; +1141D;NEWA LETTER DDHA;Lo;0;L;;;;;N;;;;; +1141E;NEWA LETTER NNA;Lo;0;L;;;;;N;;;;; +1141F;NEWA LETTER TA;Lo;0;L;;;;;N;;;;; +11420;NEWA LETTER THA;Lo;0;L;;;;;N;;;;; +11421;NEWA LETTER DA;Lo;0;L;;;;;N;;;;; +11422;NEWA LETTER DHA;Lo;0;L;;;;;N;;;;; +11423;NEWA LETTER NA;Lo;0;L;;;;;N;;;;; +11424;NEWA LETTER NHA;Lo;0;L;;;;;N;;;;; +11425;NEWA LETTER PA;Lo;0;L;;;;;N;;;;; +11426;NEWA LETTER PHA;Lo;0;L;;;;;N;;;;; +11427;NEWA LETTER BA;Lo;0;L;;;;;N;;;;; +11428;NEWA LETTER BHA;Lo;0;L;;;;;N;;;;; +11429;NEWA LETTER MA;Lo;0;L;;;;;N;;;;; +1142A;NEWA LETTER MHA;Lo;0;L;;;;;N;;;;; +1142B;NEWA LETTER YA;Lo;0;L;;;;;N;;;;; +1142C;NEWA LETTER RA;Lo;0;L;;;;;N;;;;; +1142D;NEWA LETTER RHA;Lo;0;L;;;;;N;;;;; +1142E;NEWA LETTER LA;Lo;0;L;;;;;N;;;;; +1142F;NEWA LETTER LHA;Lo;0;L;;;;;N;;;;; +11430;NEWA LETTER WA;Lo;0;L;;;;;N;;;;; +11431;NEWA LETTER SHA;Lo;0;L;;;;;N;;;;; +11432;NEWA LETTER SSA;Lo;0;L;;;;;N;;;;; +11433;NEWA LETTER SA;Lo;0;L;;;;;N;;;;; +11434;NEWA LETTER HA;Lo;0;L;;;;;N;;;;; +11435;NEWA VOWEL SIGN AA;Mc;0;L;;;;;N;;;;; +11436;NEWA VOWEL SIGN I;Mc;0;L;;;;;N;;;;; +11437;NEWA VOWEL SIGN II;Mc;0;L;;;;;N;;;;; +11438;NEWA VOWEL SIGN U;Mn;0;NSM;;;;;N;;;;; +11439;NEWA VOWEL SIGN UU;Mn;0;NSM;;;;;N;;;;; +1143A;NEWA VOWEL SIGN VOCALIC R;Mn;0;NSM;;;;;N;;;;; +1143B;NEWA VOWEL SIGN VOCALIC RR;Mn;0;NSM;;;;;N;;;;; +1143C;NEWA VOWEL SIGN VOCALIC L;Mn;0;NSM;;;;;N;;;;; +1143D;NEWA VOWEL SIGN VOCALIC LL;Mn;0;NSM;;;;;N;;;;; +1143E;NEWA VOWEL SIGN E;Mn;0;NSM;;;;;N;;;;; +1143F;NEWA VOWEL SIGN AI;Mn;0;NSM;;;;;N;;;;; +11440;NEWA VOWEL SIGN O;Mc;0;L;;;;;N;;;;; +11441;NEWA VOWEL SIGN AU;Mc;0;L;;;;;N;;;;; +11442;NEWA SIGN VIRAMA;Mn;9;NSM;;;;;N;;;;; +11443;NEWA SIGN CANDRABINDU;Mn;0;NSM;;;;;N;;;;; +11444;NEWA SIGN ANUSVARA;Mn;0;NSM;;;;;N;;;;; +11445;NEWA SIGN VISARGA;Mc;0;L;;;;;N;;;;; +11446;NEWA SIGN NUKTA;Mn;7;NSM;;;;;N;;;;; +11447;NEWA SIGN AVAGRAHA;Lo;0;L;;;;;N;;;;; +11448;NEWA SIGN FINAL ANUSVARA;Lo;0;L;;;;;N;;;;; +11449;NEWA OM;Lo;0;L;;;;;N;;;;; +1144A;NEWA SIDDHI;Lo;0;L;;;;;N;;;;; +1144B;NEWA DANDA;Po;0;L;;;;;N;;;;; +1144C;NEWA DOUBLE DANDA;Po;0;L;;;;;N;;;;; +1144D;NEWA COMMA;Po;0;L;;;;;N;;;;; +1144E;NEWA GAP FILLER;Po;0;L;;;;;N;;;;; +1144F;NEWA ABBREVIATION SIGN;Po;0;L;;;;;N;;;;; +11450;NEWA DIGIT ZERO;Nd;0;L;;0;0;0;N;;;;; +11451;NEWA DIGIT ONE;Nd;0;L;;1;1;1;N;;;;; +11452;NEWA DIGIT TWO;Nd;0;L;;2;2;2;N;;;;; +11453;NEWA DIGIT THREE;Nd;0;L;;3;3;3;N;;;;; +11454;NEWA DIGIT FOUR;Nd;0;L;;4;4;4;N;;;;; +11455;NEWA DIGIT FIVE;Nd;0;L;;5;5;5;N;;;;; +11456;NEWA DIGIT SIX;Nd;0;L;;6;6;6;N;;;;; +11457;NEWA DIGIT SEVEN;Nd;0;L;;7;7;7;N;;;;; +11458;NEWA DIGIT EIGHT;Nd;0;L;;8;8;8;N;;;;; +11459;NEWA DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; +1145B;NEWA PLACEHOLDER MARK;Po;0;L;;;;;N;;;;; +1145D;NEWA INSERTION SIGN;Po;0;L;;;;;N;;;;; 11480;TIRHUTA ANJI;Lo;0;L;;;;;N;;;;; 11481;TIRHUTA LETTER A;Lo;0;L;;;;;N;;;;; 11482;TIRHUTA LETTER AA;Lo;0;L;;;;;N;;;;; @@ -19163,6 +19749,26 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 115C7;SIDDHAM REPETITION MARK-2;Po;0;L;;;;;N;;;;; 115C8;SIDDHAM REPETITION MARK-3;Po;0;L;;;;;N;;;;; 115C9;SIDDHAM END OF TEXT MARK;Po;0;L;;;;;N;;;;; +115CA;SIDDHAM SECTION MARK WITH TRIDENT AND U-SHAPED ORNAMENTS;Po;0;L;;;;;N;;;;; +115CB;SIDDHAM SECTION MARK WITH TRIDENT AND DOTTED CRESCENTS;Po;0;L;;;;;N;;;;; +115CC;SIDDHAM SECTION MARK WITH RAYS AND DOTTED CRESCENTS;Po;0;L;;;;;N;;;;; +115CD;SIDDHAM SECTION MARK WITH RAYS AND DOTTED DOUBLE CRESCENTS;Po;0;L;;;;;N;;;;; +115CE;SIDDHAM SECTION MARK WITH RAYS AND DOTTED TRIPLE CRESCENTS;Po;0;L;;;;;N;;;;; +115CF;SIDDHAM SECTION MARK DOUBLE RING;Po;0;L;;;;;N;;;;; +115D0;SIDDHAM SECTION MARK DOUBLE RING WITH RAYS;Po;0;L;;;;;N;;;;; +115D1;SIDDHAM SECTION MARK WITH DOUBLE CRESCENTS;Po;0;L;;;;;N;;;;; +115D2;SIDDHAM SECTION MARK WITH TRIPLE CRESCENTS;Po;0;L;;;;;N;;;;; +115D3;SIDDHAM SECTION MARK WITH QUADRUPLE CRESCENTS;Po;0;L;;;;;N;;;;; +115D4;SIDDHAM SECTION MARK WITH SEPTUPLE CRESCENTS;Po;0;L;;;;;N;;;;; +115D5;SIDDHAM SECTION MARK WITH CIRCLES AND RAYS;Po;0;L;;;;;N;;;;; +115D6;SIDDHAM SECTION MARK WITH CIRCLES AND TWO ENCLOSURES;Po;0;L;;;;;N;;;;; +115D7;SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES;Po;0;L;;;;;N;;;;; +115D8;SIDDHAM LETTER THREE-CIRCLE ALTERNATE I;Lo;0;L;;;;;N;;;;; +115D9;SIDDHAM LETTER TWO-CIRCLE ALTERNATE I;Lo;0;L;;;;;N;;;;; +115DA;SIDDHAM LETTER TWO-CIRCLE ALTERNATE II;Lo;0;L;;;;;N;;;;; +115DB;SIDDHAM LETTER ALTERNATE U;Lo;0;L;;;;;N;;;;; +115DC;SIDDHAM VOWEL SIGN ALTERNATE U;Mn;0;NSM;;;;;N;;;;; +115DD;SIDDHAM VOWEL SIGN ALTERNATE UU;Mn;0;NSM;;;;;N;;;;; 11600;MODI LETTER A;Lo;0;L;;;;;N;;;;; 11601;MODI LETTER AA;Lo;0;L;;;;;N;;;;; 11602;MODI LETTER I;Lo;0;L;;;;;N;;;;; @@ -19242,6 +19848,19 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 11657;MODI DIGIT SEVEN;Nd;0;L;;7;7;7;N;;;;; 11658;MODI DIGIT EIGHT;Nd;0;L;;8;8;8;N;;;;; 11659;MODI DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; +11660;MONGOLIAN BIRGA WITH ORNAMENT;Po;0;ON;;;;;N;;;;; +11661;MONGOLIAN ROTATED BIRGA;Po;0;ON;;;;;N;;;;; +11662;MONGOLIAN DOUBLE BIRGA WITH ORNAMENT;Po;0;ON;;;;;N;;;;; +11663;MONGOLIAN TRIPLE BIRGA WITH ORNAMENT;Po;0;ON;;;;;N;;;;; +11664;MONGOLIAN BIRGA WITH DOUBLE ORNAMENT;Po;0;ON;;;;;N;;;;; +11665;MONGOLIAN ROTATED BIRGA WITH ORNAMENT;Po;0;ON;;;;;N;;;;; +11666;MONGOLIAN ROTATED BIRGA WITH DOUBLE ORNAMENT;Po;0;ON;;;;;N;;;;; +11667;MONGOLIAN INVERTED BIRGA;Po;0;ON;;;;;N;;;;; +11668;MONGOLIAN INVERTED BIRGA WITH DOUBLE ORNAMENT;Po;0;ON;;;;;N;;;;; +11669;MONGOLIAN SWIRL BIRGA;Po;0;ON;;;;;N;;;;; +1166A;MONGOLIAN SWIRL BIRGA WITH ORNAMENT;Po;0;ON;;;;;N;;;;; +1166B;MONGOLIAN SWIRL BIRGA WITH DOUBLE ORNAMENT;Po;0;ON;;;;;N;;;;; +1166C;MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT;Po;0;ON;;;;;N;;;;; 11680;TAKRI LETTER A;Lo;0;L;;;;;N;;;;; 11681;TAKRI LETTER AA;Lo;0;L;;;;;N;;;;; 11682;TAKRI LETTER I;Lo;0;L;;;;;N;;;;; @@ -19308,6 +19927,63 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 116C7;TAKRI DIGIT SEVEN;Nd;0;L;;7;7;7;N;;;;; 116C8;TAKRI DIGIT EIGHT;Nd;0;L;;8;8;8;N;;;;; 116C9;TAKRI DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; +11700;AHOM LETTER KA;Lo;0;L;;;;;N;;;;; +11701;AHOM LETTER KHA;Lo;0;L;;;;;N;;;;; +11702;AHOM LETTER NGA;Lo;0;L;;;;;N;;;;; +11703;AHOM LETTER NA;Lo;0;L;;;;;N;;;;; +11704;AHOM LETTER TA;Lo;0;L;;;;;N;;;;; +11705;AHOM LETTER ALTERNATE TA;Lo;0;L;;;;;N;;;;; +11706;AHOM LETTER PA;Lo;0;L;;;;;N;;;;; +11707;AHOM LETTER PHA;Lo;0;L;;;;;N;;;;; +11708;AHOM LETTER BA;Lo;0;L;;;;;N;;;;; +11709;AHOM LETTER MA;Lo;0;L;;;;;N;;;;; +1170A;AHOM LETTER JA;Lo;0;L;;;;;N;;;;; +1170B;AHOM LETTER CHA;Lo;0;L;;;;;N;;;;; +1170C;AHOM LETTER THA;Lo;0;L;;;;;N;;;;; +1170D;AHOM LETTER RA;Lo;0;L;;;;;N;;;;; +1170E;AHOM LETTER LA;Lo;0;L;;;;;N;;;;; +1170F;AHOM LETTER SA;Lo;0;L;;;;;N;;;;; +11710;AHOM LETTER NYA;Lo;0;L;;;;;N;;;;; +11711;AHOM LETTER HA;Lo;0;L;;;;;N;;;;; +11712;AHOM LETTER A;Lo;0;L;;;;;N;;;;; +11713;AHOM LETTER DA;Lo;0;L;;;;;N;;;;; +11714;AHOM LETTER DHA;Lo;0;L;;;;;N;;;;; +11715;AHOM LETTER GA;Lo;0;L;;;;;N;;;;; +11716;AHOM LETTER ALTERNATE GA;Lo;0;L;;;;;N;;;;; +11717;AHOM LETTER GHA;Lo;0;L;;;;;N;;;;; +11718;AHOM LETTER BHA;Lo;0;L;;;;;N;;;;; +11719;AHOM LETTER JHA;Lo;0;L;;;;;N;;;;; +1171D;AHOM CONSONANT SIGN MEDIAL LA;Mn;0;NSM;;;;;N;;;;; +1171E;AHOM CONSONANT SIGN MEDIAL RA;Mn;0;NSM;;;;;N;;;;; +1171F;AHOM CONSONANT SIGN MEDIAL LIGATING RA;Mn;0;NSM;;;;;N;;;;; +11720;AHOM VOWEL SIGN A;Mc;0;L;;;;;N;;;;; +11721;AHOM VOWEL SIGN AA;Mc;0;L;;;;;N;;;;; +11722;AHOM VOWEL SIGN I;Mn;0;NSM;;;;;N;;;;; +11723;AHOM VOWEL SIGN II;Mn;0;NSM;;;;;N;;;;; +11724;AHOM VOWEL SIGN U;Mn;0;NSM;;;;;N;;;;; +11725;AHOM VOWEL SIGN UU;Mn;0;NSM;;;;;N;;;;; +11726;AHOM VOWEL SIGN E;Mc;0;L;;;;;N;;;;; +11727;AHOM VOWEL SIGN AW;Mn;0;NSM;;;;;N;;;;; +11728;AHOM VOWEL SIGN O;Mn;0;NSM;;;;;N;;;;; +11729;AHOM VOWEL SIGN AI;Mn;0;NSM;;;;;N;;;;; +1172A;AHOM VOWEL SIGN AM;Mn;0;NSM;;;;;N;;;;; +1172B;AHOM SIGN KILLER;Mn;9;NSM;;;;;N;;;;; +11730;AHOM DIGIT ZERO;Nd;0;L;;0;0;0;N;;;;; +11731;AHOM DIGIT ONE;Nd;0;L;;1;1;1;N;;;;; +11732;AHOM DIGIT TWO;Nd;0;L;;2;2;2;N;;;;; +11733;AHOM DIGIT THREE;Nd;0;L;;3;3;3;N;;;;; +11734;AHOM DIGIT FOUR;Nd;0;L;;4;4;4;N;;;;; +11735;AHOM DIGIT FIVE;Nd;0;L;;5;5;5;N;;;;; +11736;AHOM DIGIT SIX;Nd;0;L;;6;6;6;N;;;;; +11737;AHOM DIGIT SEVEN;Nd;0;L;;7;7;7;N;;;;; +11738;AHOM DIGIT EIGHT;Nd;0;L;;8;8;8;N;;;;; +11739;AHOM DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; +1173A;AHOM NUMBER TEN;No;0;L;;;;10;N;;;;; +1173B;AHOM NUMBER TWENTY;No;0;L;;;;20;N;;;;; +1173C;AHOM SIGN SMALL SECTION;Po;0;L;;;;;N;;;;; +1173D;AHOM SIGN SECTION;Po;0;L;;;;;N;;;;; +1173E;AHOM SIGN RULAI;Po;0;L;;;;;N;;;;; +1173F;AHOM SYMBOL VI;So;0;L;;;;;N;;;;; 118A0;WARANG CITI CAPITAL LETTER NGAA;Lu;0;L;;;;;N;;;;118C0; 118A1;WARANG CITI CAPITAL LETTER A;Lu;0;L;;;;;N;;;;118C1; 118A2;WARANG CITI CAPITAL LETTER WI;Lu;0;L;;;;;N;;;;118C2; @@ -19449,6 +20125,171 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 11AF6;PAU CIN HAU LOW-FALLING TONE LONG FINAL;Lo;0;L;;;;;N;;;;; 11AF7;PAU CIN HAU LOW-FALLING TONE FINAL;Lo;0;L;;;;;N;;;;; 11AF8;PAU CIN HAU GLOTTAL STOP FINAL;Lo;0;L;;;;;N;;;;; +11C00;BHAIKSUKI LETTER A;Lo;0;L;;;;;N;;;;; +11C01;BHAIKSUKI LETTER AA;Lo;0;L;;;;;N;;;;; +11C02;BHAIKSUKI LETTER I;Lo;0;L;;;;;N;;;;; +11C03;BHAIKSUKI LETTER II;Lo;0;L;;;;;N;;;;; +11C04;BHAIKSUKI LETTER U;Lo;0;L;;;;;N;;;;; +11C05;BHAIKSUKI LETTER UU;Lo;0;L;;;;;N;;;;; +11C06;BHAIKSUKI LETTER VOCALIC R;Lo;0;L;;;;;N;;;;; +11C07;BHAIKSUKI LETTER VOCALIC RR;Lo;0;L;;;;;N;;;;; +11C08;BHAIKSUKI LETTER VOCALIC L;Lo;0;L;;;;;N;;;;; +11C0A;BHAIKSUKI LETTER E;Lo;0;L;;;;;N;;;;; +11C0B;BHAIKSUKI LETTER AI;Lo;0;L;;;;;N;;;;; +11C0C;BHAIKSUKI LETTER O;Lo;0;L;;;;;N;;;;; +11C0D;BHAIKSUKI LETTER AU;Lo;0;L;;;;;N;;;;; +11C0E;BHAIKSUKI LETTER KA;Lo;0;L;;;;;N;;;;; +11C0F;BHAIKSUKI LETTER KHA;Lo;0;L;;;;;N;;;;; +11C10;BHAIKSUKI LETTER GA;Lo;0;L;;;;;N;;;;; +11C11;BHAIKSUKI LETTER GHA;Lo;0;L;;;;;N;;;;; +11C12;BHAIKSUKI LETTER NGA;Lo;0;L;;;;;N;;;;; +11C13;BHAIKSUKI LETTER CA;Lo;0;L;;;;;N;;;;; +11C14;BHAIKSUKI LETTER CHA;Lo;0;L;;;;;N;;;;; +11C15;BHAIKSUKI LETTER JA;Lo;0;L;;;;;N;;;;; +11C16;BHAIKSUKI LETTER JHA;Lo;0;L;;;;;N;;;;; +11C17;BHAIKSUKI LETTER NYA;Lo;0;L;;;;;N;;;;; +11C18;BHAIKSUKI LETTER TTA;Lo;0;L;;;;;N;;;;; +11C19;BHAIKSUKI LETTER TTHA;Lo;0;L;;;;;N;;;;; +11C1A;BHAIKSUKI LETTER DDA;Lo;0;L;;;;;N;;;;; +11C1B;BHAIKSUKI LETTER DDHA;Lo;0;L;;;;;N;;;;; +11C1C;BHAIKSUKI LETTER NNA;Lo;0;L;;;;;N;;;;; +11C1D;BHAIKSUKI LETTER TA;Lo;0;L;;;;;N;;;;; +11C1E;BHAIKSUKI LETTER THA;Lo;0;L;;;;;N;;;;; +11C1F;BHAIKSUKI LETTER DA;Lo;0;L;;;;;N;;;;; +11C20;BHAIKSUKI LETTER DHA;Lo;0;L;;;;;N;;;;; +11C21;BHAIKSUKI LETTER NA;Lo;0;L;;;;;N;;;;; +11C22;BHAIKSUKI LETTER PA;Lo;0;L;;;;;N;;;;; +11C23;BHAIKSUKI LETTER PHA;Lo;0;L;;;;;N;;;;; +11C24;BHAIKSUKI LETTER BA;Lo;0;L;;;;;N;;;;; +11C25;BHAIKSUKI LETTER BHA;Lo;0;L;;;;;N;;;;; +11C26;BHAIKSUKI LETTER MA;Lo;0;L;;;;;N;;;;; +11C27;BHAIKSUKI LETTER YA;Lo;0;L;;;;;N;;;;; +11C28;BHAIKSUKI LETTER RA;Lo;0;L;;;;;N;;;;; +11C29;BHAIKSUKI LETTER LA;Lo;0;L;;;;;N;;;;; +11C2A;BHAIKSUKI LETTER VA;Lo;0;L;;;;;N;;;;; +11C2B;BHAIKSUKI LETTER SHA;Lo;0;L;;;;;N;;;;; +11C2C;BHAIKSUKI LETTER SSA;Lo;0;L;;;;;N;;;;; +11C2D;BHAIKSUKI LETTER SA;Lo;0;L;;;;;N;;;;; +11C2E;BHAIKSUKI LETTER HA;Lo;0;L;;;;;N;;;;; +11C2F;BHAIKSUKI VOWEL SIGN AA;Mc;0;L;;;;;N;;;;; +11C30;BHAIKSUKI VOWEL SIGN I;Mn;0;NSM;;;;;N;;;;; +11C31;BHAIKSUKI VOWEL SIGN II;Mn;0;NSM;;;;;N;;;;; +11C32;BHAIKSUKI VOWEL SIGN U;Mn;0;NSM;;;;;N;;;;; +11C33;BHAIKSUKI VOWEL SIGN UU;Mn;0;NSM;;;;;N;;;;; +11C34;BHAIKSUKI VOWEL SIGN VOCALIC R;Mn;0;NSM;;;;;N;;;;; +11C35;BHAIKSUKI VOWEL SIGN VOCALIC RR;Mn;0;NSM;;;;;N;;;;; +11C36;BHAIKSUKI VOWEL SIGN VOCALIC L;Mn;0;NSM;;;;;N;;;;; +11C38;BHAIKSUKI VOWEL SIGN E;Mn;0;NSM;;;;;N;;;;; +11C39;BHAIKSUKI VOWEL SIGN AI;Mn;0;NSM;;;;;N;;;;; +11C3A;BHAIKSUKI VOWEL SIGN O;Mn;0;NSM;;;;;N;;;;; +11C3B;BHAIKSUKI VOWEL SIGN AU;Mn;0;NSM;;;;;N;;;;; +11C3C;BHAIKSUKI SIGN CANDRABINDU;Mn;0;NSM;;;;;N;;;;; +11C3D;BHAIKSUKI SIGN ANUSVARA;Mn;0;NSM;;;;;N;;;;; +11C3E;BHAIKSUKI SIGN VISARGA;Mc;0;L;;;;;N;;;;; +11C3F;BHAIKSUKI SIGN VIRAMA;Mn;9;L;;;;;N;;;;; +11C40;BHAIKSUKI SIGN AVAGRAHA;Lo;0;L;;;;;N;;;;; +11C41;BHAIKSUKI DANDA;Po;0;L;;;;;N;;;;; +11C42;BHAIKSUKI DOUBLE DANDA;Po;0;L;;;;;N;;;;; +11C43;BHAIKSUKI WORD SEPARATOR;Po;0;L;;;;;N;;;;; +11C44;BHAIKSUKI GAP FILLER-1;Po;0;L;;;;;N;;;;; +11C45;BHAIKSUKI GAP FILLER-2;Po;0;L;;;;;N;;;;; +11C50;BHAIKSUKI DIGIT ZERO;Nd;0;L;;0;0;0;N;;;;; +11C51;BHAIKSUKI DIGIT ONE;Nd;0;L;;1;1;1;N;;;;; +11C52;BHAIKSUKI DIGIT TWO;Nd;0;L;;2;2;2;N;;;;; +11C53;BHAIKSUKI DIGIT THREE;Nd;0;L;;3;3;3;N;;;;; +11C54;BHAIKSUKI DIGIT FOUR;Nd;0;L;;4;4;4;N;;;;; +11C55;BHAIKSUKI DIGIT FIVE;Nd;0;L;;5;5;5;N;;;;; +11C56;BHAIKSUKI DIGIT SIX;Nd;0;L;;6;6;6;N;;;;; +11C57;BHAIKSUKI DIGIT SEVEN;Nd;0;L;;7;7;7;N;;;;; +11C58;BHAIKSUKI DIGIT EIGHT;Nd;0;L;;8;8;8;N;;;;; +11C59;BHAIKSUKI DIGIT NINE;Nd;0;L;;9;9;9;N;;;;; +11C5A;BHAIKSUKI NUMBER ONE;No;0;L;;;;1;N;;;;; +11C5B;BHAIKSUKI NUMBER TWO;No;0;L;;;;2;N;;;;; +11C5C;BHAIKSUKI NUMBER THREE;No;0;L;;;;3;N;;;;; +11C5D;BHAIKSUKI NUMBER FOUR;No;0;L;;;;4;N;;;;; +11C5E;BHAIKSUKI NUMBER FIVE;No;0;L;;;;5;N;;;;; +11C5F;BHAIKSUKI NUMBER SIX;No;0;L;;;;6;N;;;;; +11C60;BHAIKSUKI NUMBER SEVEN;No;0;L;;;;7;N;;;;; +11C61;BHAIKSUKI NUMBER EIGHT;No;0;L;;;;8;N;;;;; +11C62;BHAIKSUKI NUMBER NINE;No;0;L;;;;9;N;;;;; +11C63;BHAIKSUKI NUMBER TEN;No;0;L;;;;10;N;;;;; +11C64;BHAIKSUKI NUMBER TWENTY;No;0;L;;;;20;N;;;;; +11C65;BHAIKSUKI NUMBER THIRTY;No;0;L;;;;30;N;;;;; +11C66;BHAIKSUKI NUMBER FORTY;No;0;L;;;;40;N;;;;; +11C67;BHAIKSUKI NUMBER FIFTY;No;0;L;;;;50;N;;;;; +11C68;BHAIKSUKI NUMBER SIXTY;No;0;L;;;;60;N;;;;; +11C69;BHAIKSUKI NUMBER SEVENTY;No;0;L;;;;70;N;;;;; +11C6A;BHAIKSUKI NUMBER EIGHTY;No;0;L;;;;80;N;;;;; +11C6B;BHAIKSUKI NUMBER NINETY;No;0;L;;;;90;N;;;;; +11C6C;BHAIKSUKI HUNDREDS UNIT MARK;No;0;L;;;;100;N;;;;; +11C70;MARCHEN HEAD MARK;Po;0;L;;;;;N;;;;; +11C71;MARCHEN MARK SHAD;Po;0;L;;;;;N;;;;; +11C72;MARCHEN LETTER KA;Lo;0;L;;;;;N;;;;; +11C73;MARCHEN LETTER KHA;Lo;0;L;;;;;N;;;;; +11C74;MARCHEN LETTER GA;Lo;0;L;;;;;N;;;;; +11C75;MARCHEN LETTER NGA;Lo;0;L;;;;;N;;;;; +11C76;MARCHEN LETTER CA;Lo;0;L;;;;;N;;;;; +11C77;MARCHEN LETTER CHA;Lo;0;L;;;;;N;;;;; +11C78;MARCHEN LETTER JA;Lo;0;L;;;;;N;;;;; +11C79;MARCHEN LETTER NYA;Lo;0;L;;;;;N;;;;; +11C7A;MARCHEN LETTER TA;Lo;0;L;;;;;N;;;;; +11C7B;MARCHEN LETTER THA;Lo;0;L;;;;;N;;;;; +11C7C;MARCHEN LETTER DA;Lo;0;L;;;;;N;;;;; +11C7D;MARCHEN LETTER NA;Lo;0;L;;;;;N;;;;; +11C7E;MARCHEN LETTER PA;Lo;0;L;;;;;N;;;;; +11C7F;MARCHEN LETTER PHA;Lo;0;L;;;;;N;;;;; +11C80;MARCHEN LETTER BA;Lo;0;L;;;;;N;;;;; +11C81;MARCHEN LETTER MA;Lo;0;L;;;;;N;;;;; +11C82;MARCHEN LETTER TSA;Lo;0;L;;;;;N;;;;; +11C83;MARCHEN LETTER TSHA;Lo;0;L;;;;;N;;;;; +11C84;MARCHEN LETTER DZA;Lo;0;L;;;;;N;;;;; +11C85;MARCHEN LETTER WA;Lo;0;L;;;;;N;;;;; +11C86;MARCHEN LETTER ZHA;Lo;0;L;;;;;N;;;;; +11C87;MARCHEN LETTER ZA;Lo;0;L;;;;;N;;;;; +11C88;MARCHEN LETTER -A;Lo;0;L;;;;;N;;;;; +11C89;MARCHEN LETTER YA;Lo;0;L;;;;;N;;;;; +11C8A;MARCHEN LETTER RA;Lo;0;L;;;;;N;;;;; +11C8B;MARCHEN LETTER LA;Lo;0;L;;;;;N;;;;; +11C8C;MARCHEN LETTER SHA;Lo;0;L;;;;;N;;;;; +11C8D;MARCHEN LETTER SA;Lo;0;L;;;;;N;;;;; +11C8E;MARCHEN LETTER HA;Lo;0;L;;;;;N;;;;; +11C8F;MARCHEN LETTER A;Lo;0;L;;;;;N;;;;; +11C92;MARCHEN SUBJOINED LETTER KA;Mn;0;NSM;;;;;N;;;;; +11C93;MARCHEN SUBJOINED LETTER KHA;Mn;0;NSM;;;;;N;;;;; +11C94;MARCHEN SUBJOINED LETTER GA;Mn;0;NSM;;;;;N;;;;; +11C95;MARCHEN SUBJOINED LETTER NGA;Mn;0;NSM;;;;;N;;;;; +11C96;MARCHEN SUBJOINED LETTER CA;Mn;0;NSM;;;;;N;;;;; +11C97;MARCHEN SUBJOINED LETTER CHA;Mn;0;NSM;;;;;N;;;;; +11C98;MARCHEN SUBJOINED LETTER JA;Mn;0;NSM;;;;;N;;;;; +11C99;MARCHEN SUBJOINED LETTER NYA;Mn;0;NSM;;;;;N;;;;; +11C9A;MARCHEN SUBJOINED LETTER TA;Mn;0;NSM;;;;;N;;;;; +11C9B;MARCHEN SUBJOINED LETTER THA;Mn;0;NSM;;;;;N;;;;; +11C9C;MARCHEN SUBJOINED LETTER DA;Mn;0;NSM;;;;;N;;;;; +11C9D;MARCHEN SUBJOINED LETTER NA;Mn;0;NSM;;;;;N;;;;; +11C9E;MARCHEN SUBJOINED LETTER PA;Mn;0;NSM;;;;;N;;;;; +11C9F;MARCHEN SUBJOINED LETTER PHA;Mn;0;NSM;;;;;N;;;;; +11CA0;MARCHEN SUBJOINED LETTER BA;Mn;0;NSM;;;;;N;;;;; +11CA1;MARCHEN SUBJOINED LETTER MA;Mn;0;NSM;;;;;N;;;;; +11CA2;MARCHEN SUBJOINED LETTER TSA;Mn;0;NSM;;;;;N;;;;; +11CA3;MARCHEN SUBJOINED LETTER TSHA;Mn;0;NSM;;;;;N;;;;; +11CA4;MARCHEN SUBJOINED LETTER DZA;Mn;0;NSM;;;;;N;;;;; +11CA5;MARCHEN SUBJOINED LETTER WA;Mn;0;NSM;;;;;N;;;;; +11CA6;MARCHEN SUBJOINED LETTER ZHA;Mn;0;NSM;;;;;N;;;;; +11CA7;MARCHEN SUBJOINED LETTER ZA;Mn;0;NSM;;;;;N;;;;; +11CA9;MARCHEN SUBJOINED LETTER YA;Mc;0;L;;;;;N;;;;; +11CAA;MARCHEN SUBJOINED LETTER RA;Mn;0;NSM;;;;;N;;;;; +11CAB;MARCHEN SUBJOINED LETTER LA;Mn;0;NSM;;;;;N;;;;; +11CAC;MARCHEN SUBJOINED LETTER SHA;Mn;0;NSM;;;;;N;;;;; +11CAD;MARCHEN SUBJOINED LETTER SA;Mn;0;NSM;;;;;N;;;;; +11CAE;MARCHEN SUBJOINED LETTER HA;Mn;0;NSM;;;;;N;;;;; +11CAF;MARCHEN SUBJOINED LETTER A;Mn;0;NSM;;;;;N;;;;; +11CB0;MARCHEN VOWEL SIGN AA;Mn;0;NSM;;;;;N;;;;; +11CB1;MARCHEN VOWEL SIGN I;Mc;0;L;;;;;N;;;;; +11CB2;MARCHEN VOWEL SIGN U;Mn;0;NSM;;;;;N;;;;; +11CB3;MARCHEN VOWEL SIGN E;Mn;0;NSM;;;;;N;;;;; +11CB4;MARCHEN VOWEL SIGN O;Mc;0;L;;;;;N;;;;; +11CB5;MARCHEN SIGN ANUSVARA;Mn;0;NSM;;;;;N;;;;; +11CB6;MARCHEN SIGN CANDRABINDU;Mn;0;NSM;;;;;N;;;;; 12000;CUNEIFORM SIGN A;Lo;0;L;;;;;N;;;;; 12001;CUNEIFORM SIGN A TIMES A;Lo;0;L;;;;;N;;;;; 12002;CUNEIFORM SIGN A TIMES BAD;Lo;0;L;;;;;N;;;;; @@ -20370,6 +21211,7 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 12396;CUNEIFORM SIGN SAG TIMES IGI GUNU;Lo;0;L;;;;;N;;;;; 12397;CUNEIFORM SIGN TI2;Lo;0;L;;;;;N;;;;; 12398;CUNEIFORM SIGN UM TIMES ME;Lo;0;L;;;;;N;;;;; +12399;CUNEIFORM SIGN U U;Lo;0;L;;;;;N;;;;; 12400;CUNEIFORM NUMERIC SIGN TWO ASH;Nl;0;L;;;;2;N;;;;; 12401;CUNEIFORM NUMERIC SIGN THREE ASH;Nl;0;L;;;;3;N;;;;; 12402;CUNEIFORM NUMERIC SIGN FOUR ASH;Nl;0;L;;;;4;N;;;;; @@ -20486,6 +21328,202 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 12472;CUNEIFORM PUNCTUATION SIGN DIAGONAL COLON;Po;0;L;;;;;N;;;;; 12473;CUNEIFORM PUNCTUATION SIGN DIAGONAL TRICOLON;Po;0;L;;;;;N;;;;; 12474;CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON;Po;0;L;;;;;N;;;;; +12480;CUNEIFORM SIGN AB TIMES NUN TENU;Lo;0;L;;;;;N;;;;; +12481;CUNEIFORM SIGN AB TIMES SHU2;Lo;0;L;;;;;N;;;;; +12482;CUNEIFORM SIGN AD TIMES ESH2;Lo;0;L;;;;;N;;;;; +12483;CUNEIFORM SIGN BAD TIMES DISH TENU;Lo;0;L;;;;;N;;;;; +12484;CUNEIFORM SIGN BAHAR2 TIMES AB2;Lo;0;L;;;;;N;;;;; +12485;CUNEIFORM SIGN BAHAR2 TIMES NI;Lo;0;L;;;;;N;;;;; +12486;CUNEIFORM SIGN BAHAR2 TIMES ZA;Lo;0;L;;;;;N;;;;; +12487;CUNEIFORM SIGN BU OVER BU TIMES NA2;Lo;0;L;;;;;N;;;;; +12488;CUNEIFORM SIGN DA TIMES TAK4;Lo;0;L;;;;;N;;;;; +12489;CUNEIFORM SIGN DAG TIMES KUR;Lo;0;L;;;;;N;;;;; +1248A;CUNEIFORM SIGN DIM TIMES IGI;Lo;0;L;;;;;N;;;;; +1248B;CUNEIFORM SIGN DIM TIMES U U U;Lo;0;L;;;;;N;;;;; +1248C;CUNEIFORM SIGN DIM2 TIMES UD;Lo;0;L;;;;;N;;;;; +1248D;CUNEIFORM SIGN DUG TIMES ANSHE;Lo;0;L;;;;;N;;;;; +1248E;CUNEIFORM SIGN DUG TIMES ASH;Lo;0;L;;;;;N;;;;; +1248F;CUNEIFORM SIGN DUG TIMES ASH AT LEFT;Lo;0;L;;;;;N;;;;; +12490;CUNEIFORM SIGN DUG TIMES DIN;Lo;0;L;;;;;N;;;;; +12491;CUNEIFORM SIGN DUG TIMES DUN;Lo;0;L;;;;;N;;;;; +12492;CUNEIFORM SIGN DUG TIMES ERIN2;Lo;0;L;;;;;N;;;;; +12493;CUNEIFORM SIGN DUG TIMES GA;Lo;0;L;;;;;N;;;;; +12494;CUNEIFORM SIGN DUG TIMES GI;Lo;0;L;;;;;N;;;;; +12495;CUNEIFORM SIGN DUG TIMES GIR2 GUNU;Lo;0;L;;;;;N;;;;; +12496;CUNEIFORM SIGN DUG TIMES GISH;Lo;0;L;;;;;N;;;;; +12497;CUNEIFORM SIGN DUG TIMES HA;Lo;0;L;;;;;N;;;;; +12498;CUNEIFORM SIGN DUG TIMES HI;Lo;0;L;;;;;N;;;;; +12499;CUNEIFORM SIGN DUG TIMES IGI GUNU;Lo;0;L;;;;;N;;;;; +1249A;CUNEIFORM SIGN DUG TIMES KASKAL;Lo;0;L;;;;;N;;;;; +1249B;CUNEIFORM SIGN DUG TIMES KUR;Lo;0;L;;;;;N;;;;; +1249C;CUNEIFORM SIGN DUG TIMES KUSHU2;Lo;0;L;;;;;N;;;;; +1249D;CUNEIFORM SIGN DUG TIMES KUSHU2 PLUS KASKAL;Lo;0;L;;;;;N;;;;; +1249E;CUNEIFORM SIGN DUG TIMES LAK-020;Lo;0;L;;;;;N;;;;; +1249F;CUNEIFORM SIGN DUG TIMES LAM;Lo;0;L;;;;;N;;;;; +124A0;CUNEIFORM SIGN DUG TIMES LAM TIMES KUR;Lo;0;L;;;;;N;;;;; +124A1;CUNEIFORM SIGN DUG TIMES LUH PLUS GISH;Lo;0;L;;;;;N;;;;; +124A2;CUNEIFORM SIGN DUG TIMES MASH;Lo;0;L;;;;;N;;;;; +124A3;CUNEIFORM SIGN DUG TIMES MES;Lo;0;L;;;;;N;;;;; +124A4;CUNEIFORM SIGN DUG TIMES MI;Lo;0;L;;;;;N;;;;; +124A5;CUNEIFORM SIGN DUG TIMES NI;Lo;0;L;;;;;N;;;;; +124A6;CUNEIFORM SIGN DUG TIMES PI;Lo;0;L;;;;;N;;;;; +124A7;CUNEIFORM SIGN DUG TIMES SHE;Lo;0;L;;;;;N;;;;; +124A8;CUNEIFORM SIGN DUG TIMES SI GUNU;Lo;0;L;;;;;N;;;;; +124A9;CUNEIFORM SIGN E2 TIMES KUR;Lo;0;L;;;;;N;;;;; +124AA;CUNEIFORM SIGN E2 TIMES PAP;Lo;0;L;;;;;N;;;;; +124AB;CUNEIFORM SIGN ERIN2 X;Lo;0;L;;;;;N;;;;; +124AC;CUNEIFORM SIGN ESH2 CROSSING ESH2;Lo;0;L;;;;;N;;;;; +124AD;CUNEIFORM SIGN EZEN SHESHIG TIMES ASH;Lo;0;L;;;;;N;;;;; +124AE;CUNEIFORM SIGN EZEN SHESHIG TIMES HI;Lo;0;L;;;;;N;;;;; +124AF;CUNEIFORM SIGN EZEN SHESHIG TIMES IGI GUNU;Lo;0;L;;;;;N;;;;; +124B0;CUNEIFORM SIGN EZEN SHESHIG TIMES LA;Lo;0;L;;;;;N;;;;; +124B1;CUNEIFORM SIGN EZEN SHESHIG TIMES LAL;Lo;0;L;;;;;N;;;;; +124B2;CUNEIFORM SIGN EZEN SHESHIG TIMES ME;Lo;0;L;;;;;N;;;;; +124B3;CUNEIFORM SIGN EZEN SHESHIG TIMES MES;Lo;0;L;;;;;N;;;;; +124B4;CUNEIFORM SIGN EZEN SHESHIG TIMES SU;Lo;0;L;;;;;N;;;;; +124B5;CUNEIFORM SIGN EZEN TIMES SU;Lo;0;L;;;;;N;;;;; +124B6;CUNEIFORM SIGN GA2 TIMES BAHAR2;Lo;0;L;;;;;N;;;;; +124B7;CUNEIFORM SIGN GA2 TIMES DIM GUNU;Lo;0;L;;;;;N;;;;; +124B8;CUNEIFORM SIGN GA2 TIMES DUG TIMES IGI GUNU;Lo;0;L;;;;;N;;;;; +124B9;CUNEIFORM SIGN GA2 TIMES DUG TIMES KASKAL;Lo;0;L;;;;;N;;;;; +124BA;CUNEIFORM SIGN GA2 TIMES EREN;Lo;0;L;;;;;N;;;;; +124BB;CUNEIFORM SIGN GA2 TIMES GA;Lo;0;L;;;;;N;;;;; +124BC;CUNEIFORM SIGN GA2 TIMES GAR PLUS DI;Lo;0;L;;;;;N;;;;; +124BD;CUNEIFORM SIGN GA2 TIMES GAR PLUS NE;Lo;0;L;;;;;N;;;;; +124BE;CUNEIFORM SIGN GA2 TIMES HA PLUS A;Lo;0;L;;;;;N;;;;; +124BF;CUNEIFORM SIGN GA2 TIMES KUSHU2 PLUS KASKAL;Lo;0;L;;;;;N;;;;; +124C0;CUNEIFORM SIGN GA2 TIMES LAM;Lo;0;L;;;;;N;;;;; +124C1;CUNEIFORM SIGN GA2 TIMES LAM TIMES KUR;Lo;0;L;;;;;N;;;;; +124C2;CUNEIFORM SIGN GA2 TIMES LUH;Lo;0;L;;;;;N;;;;; +124C3;CUNEIFORM SIGN GA2 TIMES MUSH;Lo;0;L;;;;;N;;;;; +124C4;CUNEIFORM SIGN GA2 TIMES NE;Lo;0;L;;;;;N;;;;; +124C5;CUNEIFORM SIGN GA2 TIMES NE PLUS E2;Lo;0;L;;;;;N;;;;; +124C6;CUNEIFORM SIGN GA2 TIMES NE PLUS GI;Lo;0;L;;;;;N;;;;; +124C7;CUNEIFORM SIGN GA2 TIMES SHIM;Lo;0;L;;;;;N;;;;; +124C8;CUNEIFORM SIGN GA2 TIMES ZIZ2;Lo;0;L;;;;;N;;;;; +124C9;CUNEIFORM SIGN GABA ROTATED NINETY DEGREES;Lo;0;L;;;;;N;;;;; +124CA;CUNEIFORM SIGN GESHTIN TIMES U;Lo;0;L;;;;;N;;;;; +124CB;CUNEIFORM SIGN GISH TIMES GISH CROSSING GISH;Lo;0;L;;;;;N;;;;; +124CC;CUNEIFORM SIGN GU2 TIMES IGI GUNU;Lo;0;L;;;;;N;;;;; +124CD;CUNEIFORM SIGN GUD PLUS GISH TIMES TAK4;Lo;0;L;;;;;N;;;;; +124CE;CUNEIFORM SIGN HA TENU GUNU;Lo;0;L;;;;;N;;;;; +124CF;CUNEIFORM SIGN HI TIMES ASH OVER HI TIMES ASH;Lo;0;L;;;;;N;;;;; +124D0;CUNEIFORM SIGN KA TIMES BU;Lo;0;L;;;;;N;;;;; +124D1;CUNEIFORM SIGN KA TIMES KA;Lo;0;L;;;;;N;;;;; +124D2;CUNEIFORM SIGN KA TIMES U U U;Lo;0;L;;;;;N;;;;; +124D3;CUNEIFORM SIGN KA TIMES UR;Lo;0;L;;;;;N;;;;; +124D4;CUNEIFORM SIGN LAGAB TIMES ZU OVER ZU;Lo;0;L;;;;;N;;;;; +124D5;CUNEIFORM SIGN LAK-003;Lo;0;L;;;;;N;;;;; +124D6;CUNEIFORM SIGN LAK-021;Lo;0;L;;;;;N;;;;; +124D7;CUNEIFORM SIGN LAK-025;Lo;0;L;;;;;N;;;;; +124D8;CUNEIFORM SIGN LAK-030;Lo;0;L;;;;;N;;;;; +124D9;CUNEIFORM SIGN LAK-050;Lo;0;L;;;;;N;;;;; +124DA;CUNEIFORM SIGN LAK-051;Lo;0;L;;;;;N;;;;; +124DB;CUNEIFORM SIGN LAK-062;Lo;0;L;;;;;N;;;;; +124DC;CUNEIFORM SIGN LAK-079 OVER LAK-079 GUNU;Lo;0;L;;;;;N;;;;; +124DD;CUNEIFORM SIGN LAK-080;Lo;0;L;;;;;N;;;;; +124DE;CUNEIFORM SIGN LAK-081 OVER LAK-081;Lo;0;L;;;;;N;;;;; +124DF;CUNEIFORM SIGN LAK-092;Lo;0;L;;;;;N;;;;; +124E0;CUNEIFORM SIGN LAK-130;Lo;0;L;;;;;N;;;;; +124E1;CUNEIFORM SIGN LAK-142;Lo;0;L;;;;;N;;;;; +124E2;CUNEIFORM SIGN LAK-210;Lo;0;L;;;;;N;;;;; +124E3;CUNEIFORM SIGN LAK-219;Lo;0;L;;;;;N;;;;; +124E4;CUNEIFORM SIGN LAK-220;Lo;0;L;;;;;N;;;;; +124E5;CUNEIFORM SIGN LAK-225;Lo;0;L;;;;;N;;;;; +124E6;CUNEIFORM SIGN LAK-228;Lo;0;L;;;;;N;;;;; +124E7;CUNEIFORM SIGN LAK-238;Lo;0;L;;;;;N;;;;; +124E8;CUNEIFORM SIGN LAK-265;Lo;0;L;;;;;N;;;;; +124E9;CUNEIFORM SIGN LAK-266;Lo;0;L;;;;;N;;;;; +124EA;CUNEIFORM SIGN LAK-343;Lo;0;L;;;;;N;;;;; +124EB;CUNEIFORM SIGN LAK-347;Lo;0;L;;;;;N;;;;; +124EC;CUNEIFORM SIGN LAK-348;Lo;0;L;;;;;N;;;;; +124ED;CUNEIFORM SIGN LAK-383;Lo;0;L;;;;;N;;;;; +124EE;CUNEIFORM SIGN LAK-384;Lo;0;L;;;;;N;;;;; +124EF;CUNEIFORM SIGN LAK-390;Lo;0;L;;;;;N;;;;; +124F0;CUNEIFORM SIGN LAK-441;Lo;0;L;;;;;N;;;;; +124F1;CUNEIFORM SIGN LAK-449;Lo;0;L;;;;;N;;;;; +124F2;CUNEIFORM SIGN LAK-449 TIMES GU;Lo;0;L;;;;;N;;;;; +124F3;CUNEIFORM SIGN LAK-449 TIMES IGI;Lo;0;L;;;;;N;;;;; +124F4;CUNEIFORM SIGN LAK-449 TIMES PAP PLUS LU3;Lo;0;L;;;;;N;;;;; +124F5;CUNEIFORM SIGN LAK-449 TIMES PAP PLUS PAP PLUS LU3;Lo;0;L;;;;;N;;;;; +124F6;CUNEIFORM SIGN LAK-449 TIMES U2 PLUS BA;Lo;0;L;;;;;N;;;;; +124F7;CUNEIFORM SIGN LAK-450;Lo;0;L;;;;;N;;;;; +124F8;CUNEIFORM SIGN LAK-457;Lo;0;L;;;;;N;;;;; +124F9;CUNEIFORM SIGN LAK-470;Lo;0;L;;;;;N;;;;; +124FA;CUNEIFORM SIGN LAK-483;Lo;0;L;;;;;N;;;;; +124FB;CUNEIFORM SIGN LAK-490;Lo;0;L;;;;;N;;;;; +124FC;CUNEIFORM SIGN LAK-492;Lo;0;L;;;;;N;;;;; +124FD;CUNEIFORM SIGN LAK-493;Lo;0;L;;;;;N;;;;; +124FE;CUNEIFORM SIGN LAK-495;Lo;0;L;;;;;N;;;;; +124FF;CUNEIFORM SIGN LAK-550;Lo;0;L;;;;;N;;;;; +12500;CUNEIFORM SIGN LAK-608;Lo;0;L;;;;;N;;;;; +12501;CUNEIFORM SIGN LAK-617;Lo;0;L;;;;;N;;;;; +12502;CUNEIFORM SIGN LAK-617 TIMES ASH;Lo;0;L;;;;;N;;;;; +12503;CUNEIFORM SIGN LAK-617 TIMES BAD;Lo;0;L;;;;;N;;;;; +12504;CUNEIFORM SIGN LAK-617 TIMES DUN3 GUNU GUNU;Lo;0;L;;;;;N;;;;; +12505;CUNEIFORM SIGN LAK-617 TIMES KU3;Lo;0;L;;;;;N;;;;; +12506;CUNEIFORM SIGN LAK-617 TIMES LA;Lo;0;L;;;;;N;;;;; +12507;CUNEIFORM SIGN LAK-617 TIMES TAR;Lo;0;L;;;;;N;;;;; +12508;CUNEIFORM SIGN LAK-617 TIMES TE;Lo;0;L;;;;;N;;;;; +12509;CUNEIFORM SIGN LAK-617 TIMES U2;Lo;0;L;;;;;N;;;;; +1250A;CUNEIFORM SIGN LAK-617 TIMES UD;Lo;0;L;;;;;N;;;;; +1250B;CUNEIFORM SIGN LAK-617 TIMES URUDA;Lo;0;L;;;;;N;;;;; +1250C;CUNEIFORM SIGN LAK-636;Lo;0;L;;;;;N;;;;; +1250D;CUNEIFORM SIGN LAK-648;Lo;0;L;;;;;N;;;;; +1250E;CUNEIFORM SIGN LAK-648 TIMES DUB;Lo;0;L;;;;;N;;;;; +1250F;CUNEIFORM SIGN LAK-648 TIMES GA;Lo;0;L;;;;;N;;;;; +12510;CUNEIFORM SIGN LAK-648 TIMES IGI;Lo;0;L;;;;;N;;;;; +12511;CUNEIFORM SIGN LAK-648 TIMES IGI GUNU;Lo;0;L;;;;;N;;;;; +12512;CUNEIFORM SIGN LAK-648 TIMES NI;Lo;0;L;;;;;N;;;;; +12513;CUNEIFORM SIGN LAK-648 TIMES PAP PLUS PAP PLUS LU3;Lo;0;L;;;;;N;;;;; +12514;CUNEIFORM SIGN LAK-648 TIMES SHESH PLUS KI;Lo;0;L;;;;;N;;;;; +12515;CUNEIFORM SIGN LAK-648 TIMES UD;Lo;0;L;;;;;N;;;;; +12516;CUNEIFORM SIGN LAK-648 TIMES URUDA;Lo;0;L;;;;;N;;;;; +12517;CUNEIFORM SIGN LAK-724;Lo;0;L;;;;;N;;;;; +12518;CUNEIFORM SIGN LAK-749;Lo;0;L;;;;;N;;;;; +12519;CUNEIFORM SIGN LU2 GUNU TIMES ASH;Lo;0;L;;;;;N;;;;; +1251A;CUNEIFORM SIGN LU2 TIMES DISH;Lo;0;L;;;;;N;;;;; +1251B;CUNEIFORM SIGN LU2 TIMES HAL;Lo;0;L;;;;;N;;;;; +1251C;CUNEIFORM SIGN LU2 TIMES PAP;Lo;0;L;;;;;N;;;;; +1251D;CUNEIFORM SIGN LU2 TIMES PAP PLUS PAP PLUS LU3;Lo;0;L;;;;;N;;;;; +1251E;CUNEIFORM SIGN LU2 TIMES TAK4;Lo;0;L;;;;;N;;;;; +1251F;CUNEIFORM SIGN MI PLUS ZA7;Lo;0;L;;;;;N;;;;; +12520;CUNEIFORM SIGN MUSH OVER MUSH TIMES GA;Lo;0;L;;;;;N;;;;; +12521;CUNEIFORM SIGN MUSH OVER MUSH TIMES KAK;Lo;0;L;;;;;N;;;;; +12522;CUNEIFORM SIGN NINDA2 TIMES DIM GUNU;Lo;0;L;;;;;N;;;;; +12523;CUNEIFORM SIGN NINDA2 TIMES GISH;Lo;0;L;;;;;N;;;;; +12524;CUNEIFORM SIGN NINDA2 TIMES GUL;Lo;0;L;;;;;N;;;;; +12525;CUNEIFORM SIGN NINDA2 TIMES HI;Lo;0;L;;;;;N;;;;; +12526;CUNEIFORM SIGN NINDA2 TIMES KESH2;Lo;0;L;;;;;N;;;;; +12527;CUNEIFORM SIGN NINDA2 TIMES LAK-050;Lo;0;L;;;;;N;;;;; +12528;CUNEIFORM SIGN NINDA2 TIMES MASH;Lo;0;L;;;;;N;;;;; +12529;CUNEIFORM SIGN NINDA2 TIMES PAP PLUS PAP;Lo;0;L;;;;;N;;;;; +1252A;CUNEIFORM SIGN NINDA2 TIMES U;Lo;0;L;;;;;N;;;;; +1252B;CUNEIFORM SIGN NINDA2 TIMES U PLUS U;Lo;0;L;;;;;N;;;;; +1252C;CUNEIFORM SIGN NINDA2 TIMES URUDA;Lo;0;L;;;;;N;;;;; +1252D;CUNEIFORM SIGN SAG GUNU TIMES HA;Lo;0;L;;;;;N;;;;; +1252E;CUNEIFORM SIGN SAG TIMES EN;Lo;0;L;;;;;N;;;;; +1252F;CUNEIFORM SIGN SAG TIMES SHE AT LEFT;Lo;0;L;;;;;N;;;;; +12530;CUNEIFORM SIGN SAG TIMES TAK4;Lo;0;L;;;;;N;;;;; +12531;CUNEIFORM SIGN SHA6 TENU;Lo;0;L;;;;;N;;;;; +12532;CUNEIFORM SIGN SHE OVER SHE;Lo;0;L;;;;;N;;;;; +12533;CUNEIFORM SIGN SHE PLUS HUB2;Lo;0;L;;;;;N;;;;; +12534;CUNEIFORM SIGN SHE PLUS NAM2;Lo;0;L;;;;;N;;;;; +12535;CUNEIFORM SIGN SHE PLUS SAR;Lo;0;L;;;;;N;;;;; +12536;CUNEIFORM SIGN SHU2 PLUS DUG TIMES NI;Lo;0;L;;;;;N;;;;; +12537;CUNEIFORM SIGN SHU2 PLUS E2 TIMES AN;Lo;0;L;;;;;N;;;;; +12538;CUNEIFORM SIGN SI TIMES TAK4;Lo;0;L;;;;;N;;;;; +12539;CUNEIFORM SIGN TAK4 PLUS SAG;Lo;0;L;;;;;N;;;;; +1253A;CUNEIFORM SIGN TUM TIMES GAN2 TENU;Lo;0;L;;;;;N;;;;; +1253B;CUNEIFORM SIGN TUM TIMES THREE DISH;Lo;0;L;;;;;N;;;;; +1253C;CUNEIFORM SIGN UR2 INVERTED;Lo;0;L;;;;;N;;;;; +1253D;CUNEIFORM SIGN UR2 TIMES UD;Lo;0;L;;;;;N;;;;; +1253E;CUNEIFORM SIGN URU TIMES DARA3;Lo;0;L;;;;;N;;;;; +1253F;CUNEIFORM SIGN URU TIMES LAK-668;Lo;0;L;;;;;N;;;;; +12540;CUNEIFORM SIGN URU TIMES LU3;Lo;0;L;;;;;N;;;;; +12541;CUNEIFORM SIGN ZA7;Lo;0;L;;;;;N;;;;; +12542;CUNEIFORM SIGN ZU OVER ZU PLUS SAR;Lo;0;L;;;;;N;;;;; +12543;CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU;Lo;0;L;;;;;N;;;;; 13000;EGYPTIAN HIEROGLYPH A001;Lo;0;L;;;;;N;;;;; 13001;EGYPTIAN HIEROGLYPH A002;Lo;0;L;;;;;N;;;;; 13002;EGYPTIAN HIEROGLYPH A003;Lo;0;L;;;;;N;;;;; @@ -21557,6 +22595,589 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1342C;EGYPTIAN HIEROGLYPH AA030;Lo;0;L;;;;;N;;;;; 1342D;EGYPTIAN HIEROGLYPH AA031;Lo;0;L;;;;;N;;;;; 1342E;EGYPTIAN HIEROGLYPH AA032;Lo;0;L;;;;;N;;;;; +14400;ANATOLIAN HIEROGLYPH A001;Lo;0;L;;;;;N;;;;; +14401;ANATOLIAN HIEROGLYPH A002;Lo;0;L;;;;;N;;;;; +14402;ANATOLIAN HIEROGLYPH A003;Lo;0;L;;;;;N;;;;; +14403;ANATOLIAN HIEROGLYPH A004;Lo;0;L;;;;;N;;;;; +14404;ANATOLIAN HIEROGLYPH A005;Lo;0;L;;;;;N;;;;; +14405;ANATOLIAN HIEROGLYPH A006;Lo;0;L;;;;;N;;;;; +14406;ANATOLIAN HIEROGLYPH A007;Lo;0;L;;;;;N;;;;; +14407;ANATOLIAN HIEROGLYPH A008;Lo;0;L;;;;;N;;;;; +14408;ANATOLIAN HIEROGLYPH A009;Lo;0;L;;;;;N;;;;; +14409;ANATOLIAN HIEROGLYPH A010;Lo;0;L;;;;;N;;;;; +1440A;ANATOLIAN HIEROGLYPH A010A;Lo;0;L;;;;;N;;;;; +1440B;ANATOLIAN HIEROGLYPH A011;Lo;0;L;;;;;N;;;;; +1440C;ANATOLIAN HIEROGLYPH A012;Lo;0;L;;;;;N;;;;; +1440D;ANATOLIAN HIEROGLYPH A013;Lo;0;L;;;;;N;;;;; +1440E;ANATOLIAN HIEROGLYPH A014;Lo;0;L;;;;;N;;;;; +1440F;ANATOLIAN HIEROGLYPH A015;Lo;0;L;;;;;N;;;;; +14410;ANATOLIAN HIEROGLYPH A016;Lo;0;L;;;;;N;;;;; +14411;ANATOLIAN HIEROGLYPH A017;Lo;0;L;;;;;N;;;;; +14412;ANATOLIAN HIEROGLYPH A018;Lo;0;L;;;;;N;;;;; +14413;ANATOLIAN HIEROGLYPH A019;Lo;0;L;;;;;N;;;;; +14414;ANATOLIAN HIEROGLYPH A020;Lo;0;L;;;;;N;;;;; +14415;ANATOLIAN HIEROGLYPH A021;Lo;0;L;;;;;N;;;;; +14416;ANATOLIAN HIEROGLYPH A022;Lo;0;L;;;;;N;;;;; +14417;ANATOLIAN HIEROGLYPH A023;Lo;0;L;;;;;N;;;;; +14418;ANATOLIAN HIEROGLYPH A024;Lo;0;L;;;;;N;;;;; +14419;ANATOLIAN HIEROGLYPH A025;Lo;0;L;;;;;N;;;;; +1441A;ANATOLIAN HIEROGLYPH A026;Lo;0;L;;;;;N;;;;; +1441B;ANATOLIAN HIEROGLYPH A026A;Lo;0;L;;;;;N;;;;; +1441C;ANATOLIAN HIEROGLYPH A027;Lo;0;L;;;;;N;;;;; +1441D;ANATOLIAN HIEROGLYPH A028;Lo;0;L;;;;;N;;;;; +1441E;ANATOLIAN HIEROGLYPH A029;Lo;0;L;;;;;N;;;;; +1441F;ANATOLIAN HIEROGLYPH A030;Lo;0;L;;;;;N;;;;; +14420;ANATOLIAN HIEROGLYPH A031;Lo;0;L;;;;;N;;;;; +14421;ANATOLIAN HIEROGLYPH A032;Lo;0;L;;;;;N;;;;; +14422;ANATOLIAN HIEROGLYPH A033;Lo;0;L;;;;;N;;;;; +14423;ANATOLIAN HIEROGLYPH A034;Lo;0;L;;;;;N;;;;; +14424;ANATOLIAN HIEROGLYPH A035;Lo;0;L;;;;;N;;;;; +14425;ANATOLIAN HIEROGLYPH A036;Lo;0;L;;;;;N;;;;; +14426;ANATOLIAN HIEROGLYPH A037;Lo;0;L;;;;;N;;;;; +14427;ANATOLIAN HIEROGLYPH A038;Lo;0;L;;;;;N;;;;; +14428;ANATOLIAN HIEROGLYPH A039;Lo;0;L;;;;;N;;;;; +14429;ANATOLIAN HIEROGLYPH A039A;Lo;0;L;;;;;N;;;;; +1442A;ANATOLIAN HIEROGLYPH A040;Lo;0;L;;;;;N;;;;; +1442B;ANATOLIAN HIEROGLYPH A041;Lo;0;L;;;;;N;;;;; +1442C;ANATOLIAN HIEROGLYPH A041A;Lo;0;L;;;;;N;;;;; +1442D;ANATOLIAN HIEROGLYPH A042;Lo;0;L;;;;;N;;;;; +1442E;ANATOLIAN HIEROGLYPH A043;Lo;0;L;;;;;N;;;;; +1442F;ANATOLIAN HIEROGLYPH A044;Lo;0;L;;;;;N;;;;; +14430;ANATOLIAN HIEROGLYPH A045;Lo;0;L;;;;;N;;;;; +14431;ANATOLIAN HIEROGLYPH A045A;Lo;0;L;;;;;N;;;;; +14432;ANATOLIAN HIEROGLYPH A046;Lo;0;L;;;;;N;;;;; +14433;ANATOLIAN HIEROGLYPH A046A;Lo;0;L;;;;;N;;;;; +14434;ANATOLIAN HIEROGLYPH A046B;Lo;0;L;;;;;N;;;;; +14435;ANATOLIAN HIEROGLYPH A047;Lo;0;L;;;;;N;;;;; +14436;ANATOLIAN HIEROGLYPH A048;Lo;0;L;;;;;N;;;;; +14437;ANATOLIAN HIEROGLYPH A049;Lo;0;L;;;;;N;;;;; +14438;ANATOLIAN HIEROGLYPH A050;Lo;0;L;;;;;N;;;;; +14439;ANATOLIAN HIEROGLYPH A051;Lo;0;L;;;;;N;;;;; +1443A;ANATOLIAN HIEROGLYPH A052;Lo;0;L;;;;;N;;;;; +1443B;ANATOLIAN HIEROGLYPH A053;Lo;0;L;;;;;N;;;;; +1443C;ANATOLIAN HIEROGLYPH A054;Lo;0;L;;;;;N;;;;; +1443D;ANATOLIAN HIEROGLYPH A055;Lo;0;L;;;;;N;;;;; +1443E;ANATOLIAN HIEROGLYPH A056;Lo;0;L;;;;;N;;;;; +1443F;ANATOLIAN HIEROGLYPH A057;Lo;0;L;;;;;N;;;;; +14440;ANATOLIAN HIEROGLYPH A058;Lo;0;L;;;;;N;;;;; +14441;ANATOLIAN HIEROGLYPH A059;Lo;0;L;;;;;N;;;;; +14442;ANATOLIAN HIEROGLYPH A060;Lo;0;L;;;;;N;;;;; +14443;ANATOLIAN HIEROGLYPH A061;Lo;0;L;;;;;N;;;;; +14444;ANATOLIAN HIEROGLYPH A062;Lo;0;L;;;;;N;;;;; +14445;ANATOLIAN HIEROGLYPH A063;Lo;0;L;;;;;N;;;;; +14446;ANATOLIAN HIEROGLYPH A064;Lo;0;L;;;;;N;;;;; +14447;ANATOLIAN HIEROGLYPH A065;Lo;0;L;;;;;N;;;;; +14448;ANATOLIAN HIEROGLYPH A066;Lo;0;L;;;;;N;;;;; +14449;ANATOLIAN HIEROGLYPH A066A;Lo;0;L;;;;;N;;;;; +1444A;ANATOLIAN HIEROGLYPH A066B;Lo;0;L;;;;;N;;;;; +1444B;ANATOLIAN HIEROGLYPH A066C;Lo;0;L;;;;;N;;;;; +1444C;ANATOLIAN HIEROGLYPH A067;Lo;0;L;;;;;N;;;;; +1444D;ANATOLIAN HIEROGLYPH A068;Lo;0;L;;;;;N;;;;; +1444E;ANATOLIAN HIEROGLYPH A069;Lo;0;L;;;;;N;;;;; +1444F;ANATOLIAN HIEROGLYPH A070;Lo;0;L;;;;;N;;;;; +14450;ANATOLIAN HIEROGLYPH A071;Lo;0;L;;;;;N;;;;; +14451;ANATOLIAN HIEROGLYPH A072;Lo;0;L;;;;;N;;;;; +14452;ANATOLIAN HIEROGLYPH A073;Lo;0;L;;;;;N;;;;; +14453;ANATOLIAN HIEROGLYPH A074;Lo;0;L;;;;;N;;;;; +14454;ANATOLIAN HIEROGLYPH A075;Lo;0;L;;;;;N;;;;; +14455;ANATOLIAN HIEROGLYPH A076;Lo;0;L;;;;;N;;;;; +14456;ANATOLIAN HIEROGLYPH A077;Lo;0;L;;;;;N;;;;; +14457;ANATOLIAN HIEROGLYPH A078;Lo;0;L;;;;;N;;;;; +14458;ANATOLIAN HIEROGLYPH A079;Lo;0;L;;;;;N;;;;; +14459;ANATOLIAN HIEROGLYPH A080;Lo;0;L;;;;;N;;;;; +1445A;ANATOLIAN HIEROGLYPH A081;Lo;0;L;;;;;N;;;;; +1445B;ANATOLIAN HIEROGLYPH A082;Lo;0;L;;;;;N;;;;; +1445C;ANATOLIAN HIEROGLYPH A083;Lo;0;L;;;;;N;;;;; +1445D;ANATOLIAN HIEROGLYPH A084;Lo;0;L;;;;;N;;;;; +1445E;ANATOLIAN HIEROGLYPH A085;Lo;0;L;;;;;N;;;;; +1445F;ANATOLIAN HIEROGLYPH A086;Lo;0;L;;;;;N;;;;; +14460;ANATOLIAN HIEROGLYPH A087;Lo;0;L;;;;;N;;;;; +14461;ANATOLIAN HIEROGLYPH A088;Lo;0;L;;;;;N;;;;; +14462;ANATOLIAN HIEROGLYPH A089;Lo;0;L;;;;;N;;;;; +14463;ANATOLIAN HIEROGLYPH A090;Lo;0;L;;;;;N;;;;; +14464;ANATOLIAN HIEROGLYPH A091;Lo;0;L;;;;;N;;;;; +14465;ANATOLIAN HIEROGLYPH A092;Lo;0;L;;;;;N;;;;; +14466;ANATOLIAN HIEROGLYPH A093;Lo;0;L;;;;;N;;;;; +14467;ANATOLIAN HIEROGLYPH A094;Lo;0;L;;;;;N;;;;; +14468;ANATOLIAN HIEROGLYPH A095;Lo;0;L;;;;;N;;;;; +14469;ANATOLIAN HIEROGLYPH A096;Lo;0;L;;;;;N;;;;; +1446A;ANATOLIAN HIEROGLYPH A097;Lo;0;L;;;;;N;;;;; +1446B;ANATOLIAN HIEROGLYPH A097A;Lo;0;L;;;;;N;;;;; +1446C;ANATOLIAN HIEROGLYPH A098;Lo;0;L;;;;;N;;;;; +1446D;ANATOLIAN HIEROGLYPH A098A;Lo;0;L;;;;;N;;;;; +1446E;ANATOLIAN HIEROGLYPH A099;Lo;0;L;;;;;N;;;;; +1446F;ANATOLIAN HIEROGLYPH A100;Lo;0;L;;;;;N;;;;; +14470;ANATOLIAN HIEROGLYPH A100A;Lo;0;L;;;;;N;;;;; +14471;ANATOLIAN HIEROGLYPH A101;Lo;0;L;;;;;N;;;;; +14472;ANATOLIAN HIEROGLYPH A101A;Lo;0;L;;;;;N;;;;; +14473;ANATOLIAN HIEROGLYPH A102;Lo;0;L;;;;;N;;;;; +14474;ANATOLIAN HIEROGLYPH A102A;Lo;0;L;;;;;N;;;;; +14475;ANATOLIAN HIEROGLYPH A103;Lo;0;L;;;;;N;;;;; +14476;ANATOLIAN HIEROGLYPH A104;Lo;0;L;;;;;N;;;;; +14477;ANATOLIAN HIEROGLYPH A104A;Lo;0;L;;;;;N;;;;; +14478;ANATOLIAN HIEROGLYPH A104B;Lo;0;L;;;;;N;;;;; +14479;ANATOLIAN HIEROGLYPH A104C;Lo;0;L;;;;;N;;;;; +1447A;ANATOLIAN HIEROGLYPH A105;Lo;0;L;;;;;N;;;;; +1447B;ANATOLIAN HIEROGLYPH A105A;Lo;0;L;;;;;N;;;;; +1447C;ANATOLIAN HIEROGLYPH A105B;Lo;0;L;;;;;N;;;;; +1447D;ANATOLIAN HIEROGLYPH A106;Lo;0;L;;;;;N;;;;; +1447E;ANATOLIAN HIEROGLYPH A107;Lo;0;L;;;;;N;;;;; +1447F;ANATOLIAN HIEROGLYPH A107A;Lo;0;L;;;;;N;;;;; +14480;ANATOLIAN HIEROGLYPH A107B;Lo;0;L;;;;;N;;;;; +14481;ANATOLIAN HIEROGLYPH A107C;Lo;0;L;;;;;N;;;;; +14482;ANATOLIAN HIEROGLYPH A108;Lo;0;L;;;;;N;;;;; +14483;ANATOLIAN HIEROGLYPH A109;Lo;0;L;;;;;N;;;;; +14484;ANATOLIAN HIEROGLYPH A110;Lo;0;L;;;;;N;;;;; +14485;ANATOLIAN HIEROGLYPH A110A;Lo;0;L;;;;;N;;;;; +14486;ANATOLIAN HIEROGLYPH A110B;Lo;0;L;;;;;N;;;;; +14487;ANATOLIAN HIEROGLYPH A111;Lo;0;L;;;;;N;;;;; +14488;ANATOLIAN HIEROGLYPH A112;Lo;0;L;;;;;N;;;;; +14489;ANATOLIAN HIEROGLYPH A113;Lo;0;L;;;;;N;;;;; +1448A;ANATOLIAN HIEROGLYPH A114;Lo;0;L;;;;;N;;;;; +1448B;ANATOLIAN HIEROGLYPH A115;Lo;0;L;;;;;N;;;;; +1448C;ANATOLIAN HIEROGLYPH A115A;Lo;0;L;;;;;N;;;;; +1448D;ANATOLIAN HIEROGLYPH A116;Lo;0;L;;;;;N;;;;; +1448E;ANATOLIAN HIEROGLYPH A117;Lo;0;L;;;;;N;;;;; +1448F;ANATOLIAN HIEROGLYPH A118;Lo;0;L;;;;;N;;;;; +14490;ANATOLIAN HIEROGLYPH A119;Lo;0;L;;;;;N;;;;; +14491;ANATOLIAN HIEROGLYPH A120;Lo;0;L;;;;;N;;;;; +14492;ANATOLIAN HIEROGLYPH A121;Lo;0;L;;;;;N;;;;; +14493;ANATOLIAN HIEROGLYPH A122;Lo;0;L;;;;;N;;;;; +14494;ANATOLIAN HIEROGLYPH A123;Lo;0;L;;;;;N;;;;; +14495;ANATOLIAN HIEROGLYPH A124;Lo;0;L;;;;;N;;;;; +14496;ANATOLIAN HIEROGLYPH A125;Lo;0;L;;;;;N;;;;; +14497;ANATOLIAN HIEROGLYPH A125A;Lo;0;L;;;;;N;;;;; +14498;ANATOLIAN HIEROGLYPH A126;Lo;0;L;;;;;N;;;;; +14499;ANATOLIAN HIEROGLYPH A127;Lo;0;L;;;;;N;;;;; +1449A;ANATOLIAN HIEROGLYPH A128;Lo;0;L;;;;;N;;;;; +1449B;ANATOLIAN HIEROGLYPH A129;Lo;0;L;;;;;N;;;;; +1449C;ANATOLIAN HIEROGLYPH A130;Lo;0;L;;;;;N;;;;; +1449D;ANATOLIAN HIEROGLYPH A131;Lo;0;L;;;;;N;;;;; +1449E;ANATOLIAN HIEROGLYPH A132;Lo;0;L;;;;;N;;;;; +1449F;ANATOLIAN HIEROGLYPH A133;Lo;0;L;;;;;N;;;;; +144A0;ANATOLIAN HIEROGLYPH A134;Lo;0;L;;;;;N;;;;; +144A1;ANATOLIAN HIEROGLYPH A135;Lo;0;L;;;;;N;;;;; +144A2;ANATOLIAN HIEROGLYPH A135A;Lo;0;L;;;;;N;;;;; +144A3;ANATOLIAN HIEROGLYPH A136;Lo;0;L;;;;;N;;;;; +144A4;ANATOLIAN HIEROGLYPH A137;Lo;0;L;;;;;N;;;;; +144A5;ANATOLIAN HIEROGLYPH A138;Lo;0;L;;;;;N;;;;; +144A6;ANATOLIAN HIEROGLYPH A139;Lo;0;L;;;;;N;;;;; +144A7;ANATOLIAN HIEROGLYPH A140;Lo;0;L;;;;;N;;;;; +144A8;ANATOLIAN HIEROGLYPH A141;Lo;0;L;;;;;N;;;;; +144A9;ANATOLIAN HIEROGLYPH A142;Lo;0;L;;;;;N;;;;; +144AA;ANATOLIAN HIEROGLYPH A143;Lo;0;L;;;;;N;;;;; +144AB;ANATOLIAN HIEROGLYPH A144;Lo;0;L;;;;;N;;;;; +144AC;ANATOLIAN HIEROGLYPH A145;Lo;0;L;;;;;N;;;;; +144AD;ANATOLIAN HIEROGLYPH A146;Lo;0;L;;;;;N;;;;; +144AE;ANATOLIAN HIEROGLYPH A147;Lo;0;L;;;;;N;;;;; +144AF;ANATOLIAN HIEROGLYPH A148;Lo;0;L;;;;;N;;;;; +144B0;ANATOLIAN HIEROGLYPH A149;Lo;0;L;;;;;N;;;;; +144B1;ANATOLIAN HIEROGLYPH A150;Lo;0;L;;;;;N;;;;; +144B2;ANATOLIAN HIEROGLYPH A151;Lo;0;L;;;;;N;;;;; +144B3;ANATOLIAN HIEROGLYPH A152;Lo;0;L;;;;;N;;;;; +144B4;ANATOLIAN HIEROGLYPH A153;Lo;0;L;;;;;N;;;;; +144B5;ANATOLIAN HIEROGLYPH A154;Lo;0;L;;;;;N;;;;; +144B6;ANATOLIAN HIEROGLYPH A155;Lo;0;L;;;;;N;;;;; +144B7;ANATOLIAN HIEROGLYPH A156;Lo;0;L;;;;;N;;;;; +144B8;ANATOLIAN HIEROGLYPH A157;Lo;0;L;;;;;N;;;;; +144B9;ANATOLIAN HIEROGLYPH A158;Lo;0;L;;;;;N;;;;; +144BA;ANATOLIAN HIEROGLYPH A159;Lo;0;L;;;;;N;;;;; +144BB;ANATOLIAN HIEROGLYPH A160;Lo;0;L;;;;;N;;;;; +144BC;ANATOLIAN HIEROGLYPH A161;Lo;0;L;;;;;N;;;;; +144BD;ANATOLIAN HIEROGLYPH A162;Lo;0;L;;;;;N;;;;; +144BE;ANATOLIAN HIEROGLYPH A163;Lo;0;L;;;;;N;;;;; +144BF;ANATOLIAN HIEROGLYPH A164;Lo;0;L;;;;;N;;;;; +144C0;ANATOLIAN HIEROGLYPH A165;Lo;0;L;;;;;N;;;;; +144C1;ANATOLIAN HIEROGLYPH A166;Lo;0;L;;;;;N;;;;; +144C2;ANATOLIAN HIEROGLYPH A167;Lo;0;L;;;;;N;;;;; +144C3;ANATOLIAN HIEROGLYPH A168;Lo;0;L;;;;;N;;;;; +144C4;ANATOLIAN HIEROGLYPH A169;Lo;0;L;;;;;N;;;;; +144C5;ANATOLIAN HIEROGLYPH A170;Lo;0;L;;;;;N;;;;; +144C6;ANATOLIAN HIEROGLYPH A171;Lo;0;L;;;;;N;;;;; +144C7;ANATOLIAN HIEROGLYPH A172;Lo;0;L;;;;;N;;;;; +144C8;ANATOLIAN HIEROGLYPH A173;Lo;0;L;;;;;N;;;;; +144C9;ANATOLIAN HIEROGLYPH A174;Lo;0;L;;;;;N;;;;; +144CA;ANATOLIAN HIEROGLYPH A175;Lo;0;L;;;;;N;;;;; +144CB;ANATOLIAN HIEROGLYPH A176;Lo;0;L;;;;;N;;;;; +144CC;ANATOLIAN HIEROGLYPH A177;Lo;0;L;;;;;N;;;;; +144CD;ANATOLIAN HIEROGLYPH A178;Lo;0;L;;;;;N;;;;; +144CE;ANATOLIAN HIEROGLYPH A179;Lo;0;L;;;;;N;;;;; +144CF;ANATOLIAN HIEROGLYPH A180;Lo;0;L;;;;;N;;;;; +144D0;ANATOLIAN HIEROGLYPH A181;Lo;0;L;;;;;N;;;;; +144D1;ANATOLIAN HIEROGLYPH A182;Lo;0;L;;;;;N;;;;; +144D2;ANATOLIAN HIEROGLYPH A183;Lo;0;L;;;;;N;;;;; +144D3;ANATOLIAN HIEROGLYPH A184;Lo;0;L;;;;;N;;;;; +144D4;ANATOLIAN HIEROGLYPH A185;Lo;0;L;;;;;N;;;;; +144D5;ANATOLIAN HIEROGLYPH A186;Lo;0;L;;;;;N;;;;; +144D6;ANATOLIAN HIEROGLYPH A187;Lo;0;L;;;;;N;;;;; +144D7;ANATOLIAN HIEROGLYPH A188;Lo;0;L;;;;;N;;;;; +144D8;ANATOLIAN HIEROGLYPH A189;Lo;0;L;;;;;N;;;;; +144D9;ANATOLIAN HIEROGLYPH A190;Lo;0;L;;;;;N;;;;; +144DA;ANATOLIAN HIEROGLYPH A191;Lo;0;L;;;;;N;;;;; +144DB;ANATOLIAN HIEROGLYPH A192;Lo;0;L;;;;;N;;;;; +144DC;ANATOLIAN HIEROGLYPH A193;Lo;0;L;;;;;N;;;;; +144DD;ANATOLIAN HIEROGLYPH A194;Lo;0;L;;;;;N;;;;; +144DE;ANATOLIAN HIEROGLYPH A195;Lo;0;L;;;;;N;;;;; +144DF;ANATOLIAN HIEROGLYPH A196;Lo;0;L;;;;;N;;;;; +144E0;ANATOLIAN HIEROGLYPH A197;Lo;0;L;;;;;N;;;;; +144E1;ANATOLIAN HIEROGLYPH A198;Lo;0;L;;;;;N;;;;; +144E2;ANATOLIAN HIEROGLYPH A199;Lo;0;L;;;;;N;;;;; +144E3;ANATOLIAN HIEROGLYPH A200;Lo;0;L;;;;;N;;;;; +144E4;ANATOLIAN HIEROGLYPH A201;Lo;0;L;;;;;N;;;;; +144E5;ANATOLIAN HIEROGLYPH A202;Lo;0;L;;;;;N;;;;; +144E6;ANATOLIAN HIEROGLYPH A202A;Lo;0;L;;;;;N;;;;; +144E7;ANATOLIAN HIEROGLYPH A202B;Lo;0;L;;;;;N;;;;; +144E8;ANATOLIAN HIEROGLYPH A203;Lo;0;L;;;;;N;;;;; +144E9;ANATOLIAN HIEROGLYPH A204;Lo;0;L;;;;;N;;;;; +144EA;ANATOLIAN HIEROGLYPH A205;Lo;0;L;;;;;N;;;;; +144EB;ANATOLIAN HIEROGLYPH A206;Lo;0;L;;;;;N;;;;; +144EC;ANATOLIAN HIEROGLYPH A207;Lo;0;L;;;;;N;;;;; +144ED;ANATOLIAN HIEROGLYPH A207A;Lo;0;L;;;;;N;;;;; +144EE;ANATOLIAN HIEROGLYPH A208;Lo;0;L;;;;;N;;;;; +144EF;ANATOLIAN HIEROGLYPH A209;Lo;0;L;;;;;N;;;;; +144F0;ANATOLIAN HIEROGLYPH A209A;Lo;0;L;;;;;N;;;;; +144F1;ANATOLIAN HIEROGLYPH A210;Lo;0;L;;;;;N;;;;; +144F2;ANATOLIAN HIEROGLYPH A211;Lo;0;L;;;;;N;;;;; +144F3;ANATOLIAN HIEROGLYPH A212;Lo;0;L;;;;;N;;;;; +144F4;ANATOLIAN HIEROGLYPH A213;Lo;0;L;;;;;N;;;;; +144F5;ANATOLIAN HIEROGLYPH A214;Lo;0;L;;;;;N;;;;; +144F6;ANATOLIAN HIEROGLYPH A215;Lo;0;L;;;;;N;;;;; +144F7;ANATOLIAN HIEROGLYPH A215A;Lo;0;L;;;;;N;;;;; +144F8;ANATOLIAN HIEROGLYPH A216;Lo;0;L;;;;;N;;;;; +144F9;ANATOLIAN HIEROGLYPH A216A;Lo;0;L;;;;;N;;;;; +144FA;ANATOLIAN HIEROGLYPH A217;Lo;0;L;;;;;N;;;;; +144FB;ANATOLIAN HIEROGLYPH A218;Lo;0;L;;;;;N;;;;; +144FC;ANATOLIAN HIEROGLYPH A219;Lo;0;L;;;;;N;;;;; +144FD;ANATOLIAN HIEROGLYPH A220;Lo;0;L;;;;;N;;;;; +144FE;ANATOLIAN HIEROGLYPH A221;Lo;0;L;;;;;N;;;;; +144FF;ANATOLIAN HIEROGLYPH A222;Lo;0;L;;;;;N;;;;; +14500;ANATOLIAN HIEROGLYPH A223;Lo;0;L;;;;;N;;;;; +14501;ANATOLIAN HIEROGLYPH A224;Lo;0;L;;;;;N;;;;; +14502;ANATOLIAN HIEROGLYPH A225;Lo;0;L;;;;;N;;;;; +14503;ANATOLIAN HIEROGLYPH A226;Lo;0;L;;;;;N;;;;; +14504;ANATOLIAN HIEROGLYPH A227;Lo;0;L;;;;;N;;;;; +14505;ANATOLIAN HIEROGLYPH A227A;Lo;0;L;;;;;N;;;;; +14506;ANATOLIAN HIEROGLYPH A228;Lo;0;L;;;;;N;;;;; +14507;ANATOLIAN HIEROGLYPH A229;Lo;0;L;;;;;N;;;;; +14508;ANATOLIAN HIEROGLYPH A230;Lo;0;L;;;;;N;;;;; +14509;ANATOLIAN HIEROGLYPH A231;Lo;0;L;;;;;N;;;;; +1450A;ANATOLIAN HIEROGLYPH A232;Lo;0;L;;;;;N;;;;; +1450B;ANATOLIAN HIEROGLYPH A233;Lo;0;L;;;;;N;;;;; +1450C;ANATOLIAN HIEROGLYPH A234;Lo;0;L;;;;;N;;;;; +1450D;ANATOLIAN HIEROGLYPH A235;Lo;0;L;;;;;N;;;;; +1450E;ANATOLIAN HIEROGLYPH A236;Lo;0;L;;;;;N;;;;; +1450F;ANATOLIAN HIEROGLYPH A237;Lo;0;L;;;;;N;;;;; +14510;ANATOLIAN HIEROGLYPH A238;Lo;0;L;;;;;N;;;;; +14511;ANATOLIAN HIEROGLYPH A239;Lo;0;L;;;;;N;;;;; +14512;ANATOLIAN HIEROGLYPH A240;Lo;0;L;;;;;N;;;;; +14513;ANATOLIAN HIEROGLYPH A241;Lo;0;L;;;;;N;;;;; +14514;ANATOLIAN HIEROGLYPH A242;Lo;0;L;;;;;N;;;;; +14515;ANATOLIAN HIEROGLYPH A243;Lo;0;L;;;;;N;;;;; +14516;ANATOLIAN HIEROGLYPH A244;Lo;0;L;;;;;N;;;;; +14517;ANATOLIAN HIEROGLYPH A245;Lo;0;L;;;;;N;;;;; +14518;ANATOLIAN HIEROGLYPH A246;Lo;0;L;;;;;N;;;;; +14519;ANATOLIAN HIEROGLYPH A247;Lo;0;L;;;;;N;;;;; +1451A;ANATOLIAN HIEROGLYPH A248;Lo;0;L;;;;;N;;;;; +1451B;ANATOLIAN HIEROGLYPH A249;Lo;0;L;;;;;N;;;;; +1451C;ANATOLIAN HIEROGLYPH A250;Lo;0;L;;;;;N;;;;; +1451D;ANATOLIAN HIEROGLYPH A251;Lo;0;L;;;;;N;;;;; +1451E;ANATOLIAN HIEROGLYPH A252;Lo;0;L;;;;;N;;;;; +1451F;ANATOLIAN HIEROGLYPH A253;Lo;0;L;;;;;N;;;;; +14520;ANATOLIAN HIEROGLYPH A254;Lo;0;L;;;;;N;;;;; +14521;ANATOLIAN HIEROGLYPH A255;Lo;0;L;;;;;N;;;;; +14522;ANATOLIAN HIEROGLYPH A256;Lo;0;L;;;;;N;;;;; +14523;ANATOLIAN HIEROGLYPH A257;Lo;0;L;;;;;N;;;;; +14524;ANATOLIAN HIEROGLYPH A258;Lo;0;L;;;;;N;;;;; +14525;ANATOLIAN HIEROGLYPH A259;Lo;0;L;;;;;N;;;;; +14526;ANATOLIAN HIEROGLYPH A260;Lo;0;L;;;;;N;;;;; +14527;ANATOLIAN HIEROGLYPH A261;Lo;0;L;;;;;N;;;;; +14528;ANATOLIAN HIEROGLYPH A262;Lo;0;L;;;;;N;;;;; +14529;ANATOLIAN HIEROGLYPH A263;Lo;0;L;;;;;N;;;;; +1452A;ANATOLIAN HIEROGLYPH A264;Lo;0;L;;;;;N;;;;; +1452B;ANATOLIAN HIEROGLYPH A265;Lo;0;L;;;;;N;;;;; +1452C;ANATOLIAN HIEROGLYPH A266;Lo;0;L;;;;;N;;;;; +1452D;ANATOLIAN HIEROGLYPH A267;Lo;0;L;;;;;N;;;;; +1452E;ANATOLIAN HIEROGLYPH A267A;Lo;0;L;;;;;N;;;;; +1452F;ANATOLIAN HIEROGLYPH A268;Lo;0;L;;;;;N;;;;; +14530;ANATOLIAN HIEROGLYPH A269;Lo;0;L;;;;;N;;;;; +14531;ANATOLIAN HIEROGLYPH A270;Lo;0;L;;;;;N;;;;; +14532;ANATOLIAN HIEROGLYPH A271;Lo;0;L;;;;;N;;;;; +14533;ANATOLIAN HIEROGLYPH A272;Lo;0;L;;;;;N;;;;; +14534;ANATOLIAN HIEROGLYPH A273;Lo;0;L;;;;;N;;;;; +14535;ANATOLIAN HIEROGLYPH A274;Lo;0;L;;;;;N;;;;; +14536;ANATOLIAN HIEROGLYPH A275;Lo;0;L;;;;;N;;;;; +14537;ANATOLIAN HIEROGLYPH A276;Lo;0;L;;;;;N;;;;; +14538;ANATOLIAN HIEROGLYPH A277;Lo;0;L;;;;;N;;;;; +14539;ANATOLIAN HIEROGLYPH A278;Lo;0;L;;;;;N;;;;; +1453A;ANATOLIAN HIEROGLYPH A279;Lo;0;L;;;;;N;;;;; +1453B;ANATOLIAN HIEROGLYPH A280;Lo;0;L;;;;;N;;;;; +1453C;ANATOLIAN HIEROGLYPH A281;Lo;0;L;;;;;N;;;;; +1453D;ANATOLIAN HIEROGLYPH A282;Lo;0;L;;;;;N;;;;; +1453E;ANATOLIAN HIEROGLYPH A283;Lo;0;L;;;;;N;;;;; +1453F;ANATOLIAN HIEROGLYPH A284;Lo;0;L;;;;;N;;;;; +14540;ANATOLIAN HIEROGLYPH A285;Lo;0;L;;;;;N;;;;; +14541;ANATOLIAN HIEROGLYPH A286;Lo;0;L;;;;;N;;;;; +14542;ANATOLIAN HIEROGLYPH A287;Lo;0;L;;;;;N;;;;; +14543;ANATOLIAN HIEROGLYPH A288;Lo;0;L;;;;;N;;;;; +14544;ANATOLIAN HIEROGLYPH A289;Lo;0;L;;;;;N;;;;; +14545;ANATOLIAN HIEROGLYPH A289A;Lo;0;L;;;;;N;;;;; +14546;ANATOLIAN HIEROGLYPH A290;Lo;0;L;;;;;N;;;;; +14547;ANATOLIAN HIEROGLYPH A291;Lo;0;L;;;;;N;;;;; +14548;ANATOLIAN HIEROGLYPH A292;Lo;0;L;;;;;N;;;;; +14549;ANATOLIAN HIEROGLYPH A293;Lo;0;L;;;;;N;;;;; +1454A;ANATOLIAN HIEROGLYPH A294;Lo;0;L;;;;;N;;;;; +1454B;ANATOLIAN HIEROGLYPH A294A;Lo;0;L;;;;;N;;;;; +1454C;ANATOLIAN HIEROGLYPH A295;Lo;0;L;;;;;N;;;;; +1454D;ANATOLIAN HIEROGLYPH A296;Lo;0;L;;;;;N;;;;; +1454E;ANATOLIAN HIEROGLYPH A297;Lo;0;L;;;;;N;;;;; +1454F;ANATOLIAN HIEROGLYPH A298;Lo;0;L;;;;;N;;;;; +14550;ANATOLIAN HIEROGLYPH A299;Lo;0;L;;;;;N;;;;; +14551;ANATOLIAN HIEROGLYPH A299A;Lo;0;L;;;;;N;;;;; +14552;ANATOLIAN HIEROGLYPH A300;Lo;0;L;;;;;N;;;;; +14553;ANATOLIAN HIEROGLYPH A301;Lo;0;L;;;;;N;;;;; +14554;ANATOLIAN HIEROGLYPH A302;Lo;0;L;;;;;N;;;;; +14555;ANATOLIAN HIEROGLYPH A303;Lo;0;L;;;;;N;;;;; +14556;ANATOLIAN HIEROGLYPH A304;Lo;0;L;;;;;N;;;;; +14557;ANATOLIAN HIEROGLYPH A305;Lo;0;L;;;;;N;;;;; +14558;ANATOLIAN HIEROGLYPH A306;Lo;0;L;;;;;N;;;;; +14559;ANATOLIAN HIEROGLYPH A307;Lo;0;L;;;;;N;;;;; +1455A;ANATOLIAN HIEROGLYPH A308;Lo;0;L;;;;;N;;;;; +1455B;ANATOLIAN HIEROGLYPH A309;Lo;0;L;;;;;N;;;;; +1455C;ANATOLIAN HIEROGLYPH A309A;Lo;0;L;;;;;N;;;;; +1455D;ANATOLIAN HIEROGLYPH A310;Lo;0;L;;;;;N;;;;; +1455E;ANATOLIAN HIEROGLYPH A311;Lo;0;L;;;;;N;;;;; +1455F;ANATOLIAN HIEROGLYPH A312;Lo;0;L;;;;;N;;;;; +14560;ANATOLIAN HIEROGLYPH A313;Lo;0;L;;;;;N;;;;; +14561;ANATOLIAN HIEROGLYPH A314;Lo;0;L;;;;;N;;;;; +14562;ANATOLIAN HIEROGLYPH A315;Lo;0;L;;;;;N;;;;; +14563;ANATOLIAN HIEROGLYPH A316;Lo;0;L;;;;;N;;;;; +14564;ANATOLIAN HIEROGLYPH A317;Lo;0;L;;;;;N;;;;; +14565;ANATOLIAN HIEROGLYPH A318;Lo;0;L;;;;;N;;;;; +14566;ANATOLIAN HIEROGLYPH A319;Lo;0;L;;;;;N;;;;; +14567;ANATOLIAN HIEROGLYPH A320;Lo;0;L;;;;;N;;;;; +14568;ANATOLIAN HIEROGLYPH A321;Lo;0;L;;;;;N;;;;; +14569;ANATOLIAN HIEROGLYPH A322;Lo;0;L;;;;;N;;;;; +1456A;ANATOLIAN HIEROGLYPH A323;Lo;0;L;;;;;N;;;;; +1456B;ANATOLIAN HIEROGLYPH A324;Lo;0;L;;;;;N;;;;; +1456C;ANATOLIAN HIEROGLYPH A325;Lo;0;L;;;;;N;;;;; +1456D;ANATOLIAN HIEROGLYPH A326;Lo;0;L;;;;;N;;;;; +1456E;ANATOLIAN HIEROGLYPH A327;Lo;0;L;;;;;N;;;;; +1456F;ANATOLIAN HIEROGLYPH A328;Lo;0;L;;;;;N;;;;; +14570;ANATOLIAN HIEROGLYPH A329;Lo;0;L;;;;;N;;;;; +14571;ANATOLIAN HIEROGLYPH A329A;Lo;0;L;;;;;N;;;;; +14572;ANATOLIAN HIEROGLYPH A330;Lo;0;L;;;;;N;;;;; +14573;ANATOLIAN HIEROGLYPH A331;Lo;0;L;;;;;N;;;;; +14574;ANATOLIAN HIEROGLYPH A332A;Lo;0;L;;;;;N;;;;; +14575;ANATOLIAN HIEROGLYPH A332B;Lo;0;L;;;;;N;;;;; +14576;ANATOLIAN HIEROGLYPH A332C;Lo;0;L;;;;;N;;;;; +14577;ANATOLIAN HIEROGLYPH A333;Lo;0;L;;;;;N;;;;; +14578;ANATOLIAN HIEROGLYPH A334;Lo;0;L;;;;;N;;;;; +14579;ANATOLIAN HIEROGLYPH A335;Lo;0;L;;;;;N;;;;; +1457A;ANATOLIAN HIEROGLYPH A336;Lo;0;L;;;;;N;;;;; +1457B;ANATOLIAN HIEROGLYPH A336A;Lo;0;L;;;;;N;;;;; +1457C;ANATOLIAN HIEROGLYPH A336B;Lo;0;L;;;;;N;;;;; +1457D;ANATOLIAN HIEROGLYPH A336C;Lo;0;L;;;;;N;;;;; +1457E;ANATOLIAN HIEROGLYPH A337;Lo;0;L;;;;;N;;;;; +1457F;ANATOLIAN HIEROGLYPH A338;Lo;0;L;;;;;N;;;;; +14580;ANATOLIAN HIEROGLYPH A339;Lo;0;L;;;;;N;;;;; +14581;ANATOLIAN HIEROGLYPH A340;Lo;0;L;;;;;N;;;;; +14582;ANATOLIAN HIEROGLYPH A341;Lo;0;L;;;;;N;;;;; +14583;ANATOLIAN HIEROGLYPH A342;Lo;0;L;;;;;N;;;;; +14584;ANATOLIAN HIEROGLYPH A343;Lo;0;L;;;;;N;;;;; +14585;ANATOLIAN HIEROGLYPH A344;Lo;0;L;;;;;N;;;;; +14586;ANATOLIAN HIEROGLYPH A345;Lo;0;L;;;;;N;;;;; +14587;ANATOLIAN HIEROGLYPH A346;Lo;0;L;;;;;N;;;;; +14588;ANATOLIAN HIEROGLYPH A347;Lo;0;L;;;;;N;;;;; +14589;ANATOLIAN HIEROGLYPH A348;Lo;0;L;;;;;N;;;;; +1458A;ANATOLIAN HIEROGLYPH A349;Lo;0;L;;;;;N;;;;; +1458B;ANATOLIAN HIEROGLYPH A350;Lo;0;L;;;;;N;;;;; +1458C;ANATOLIAN HIEROGLYPH A351;Lo;0;L;;;;;N;;;;; +1458D;ANATOLIAN HIEROGLYPH A352;Lo;0;L;;;;;N;;;;; +1458E;ANATOLIAN HIEROGLYPH A353;Lo;0;L;;;;;N;;;;; +1458F;ANATOLIAN HIEROGLYPH A354;Lo;0;L;;;;;N;;;;; +14590;ANATOLIAN HIEROGLYPH A355;Lo;0;L;;;;;N;;;;; +14591;ANATOLIAN HIEROGLYPH A356;Lo;0;L;;;;;N;;;;; +14592;ANATOLIAN HIEROGLYPH A357;Lo;0;L;;;;;N;;;;; +14593;ANATOLIAN HIEROGLYPH A358;Lo;0;L;;;;;N;;;;; +14594;ANATOLIAN HIEROGLYPH A359;Lo;0;L;;;;;N;;;;; +14595;ANATOLIAN HIEROGLYPH A359A;Lo;0;L;;;;;N;;;;; +14596;ANATOLIAN HIEROGLYPH A360;Lo;0;L;;;;;N;;;;; +14597;ANATOLIAN HIEROGLYPH A361;Lo;0;L;;;;;N;;;;; +14598;ANATOLIAN HIEROGLYPH A362;Lo;0;L;;;;;N;;;;; +14599;ANATOLIAN HIEROGLYPH A363;Lo;0;L;;;;;N;;;;; +1459A;ANATOLIAN HIEROGLYPH A364;Lo;0;L;;;;;N;;;;; +1459B;ANATOLIAN HIEROGLYPH A364A;Lo;0;L;;;;;N;;;;; +1459C;ANATOLIAN HIEROGLYPH A365;Lo;0;L;;;;;N;;;;; +1459D;ANATOLIAN HIEROGLYPH A366;Lo;0;L;;;;;N;;;;; +1459E;ANATOLIAN HIEROGLYPH A367;Lo;0;L;;;;;N;;;;; +1459F;ANATOLIAN HIEROGLYPH A368;Lo;0;L;;;;;N;;;;; +145A0;ANATOLIAN HIEROGLYPH A368A;Lo;0;L;;;;;N;;;;; +145A1;ANATOLIAN HIEROGLYPH A369;Lo;0;L;;;;;N;;;;; +145A2;ANATOLIAN HIEROGLYPH A370;Lo;0;L;;;;;N;;;;; +145A3;ANATOLIAN HIEROGLYPH A371;Lo;0;L;;;;;N;;;;; +145A4;ANATOLIAN HIEROGLYPH A371A;Lo;0;L;;;;;N;;;;; +145A5;ANATOLIAN HIEROGLYPH A372;Lo;0;L;;;;;N;;;;; +145A6;ANATOLIAN HIEROGLYPH A373;Lo;0;L;;;;;N;;;;; +145A7;ANATOLIAN HIEROGLYPH A374;Lo;0;L;;;;;N;;;;; +145A8;ANATOLIAN HIEROGLYPH A375;Lo;0;L;;;;;N;;;;; +145A9;ANATOLIAN HIEROGLYPH A376;Lo;0;L;;;;;N;;;;; +145AA;ANATOLIAN HIEROGLYPH A377;Lo;0;L;;;;;N;;;;; +145AB;ANATOLIAN HIEROGLYPH A378;Lo;0;L;;;;;N;;;;; +145AC;ANATOLIAN HIEROGLYPH A379;Lo;0;L;;;;;N;;;;; +145AD;ANATOLIAN HIEROGLYPH A380;Lo;0;L;;;;;N;;;;; +145AE;ANATOLIAN HIEROGLYPH A381;Lo;0;L;;;;;N;;;;; +145AF;ANATOLIAN HIEROGLYPH A381A;Lo;0;L;;;;;N;;;;; +145B0;ANATOLIAN HIEROGLYPH A382;Lo;0;L;;;;;N;;;;; +145B1;ANATOLIAN HIEROGLYPH A383 RA OR RI;Lo;0;L;;;;;N;;;;; +145B2;ANATOLIAN HIEROGLYPH A383A;Lo;0;L;;;;;N;;;;; +145B3;ANATOLIAN HIEROGLYPH A384;Lo;0;L;;;;;N;;;;; +145B4;ANATOLIAN HIEROGLYPH A385;Lo;0;L;;;;;N;;;;; +145B5;ANATOLIAN HIEROGLYPH A386;Lo;0;L;;;;;N;;;;; +145B6;ANATOLIAN HIEROGLYPH A386A;Lo;0;L;;;;;N;;;;; +145B7;ANATOLIAN HIEROGLYPH A387;Lo;0;L;;;;;N;;;;; +145B8;ANATOLIAN HIEROGLYPH A388;Lo;0;L;;;;;N;;;;; +145B9;ANATOLIAN HIEROGLYPH A389;Lo;0;L;;;;;N;;;;; +145BA;ANATOLIAN HIEROGLYPH A390;Lo;0;L;;;;;N;;;;; +145BB;ANATOLIAN HIEROGLYPH A391;Lo;0;L;;;;;N;;;;; +145BC;ANATOLIAN HIEROGLYPH A392;Lo;0;L;;;;;N;;;;; +145BD;ANATOLIAN HIEROGLYPH A393 EIGHT;Lo;0;L;;;;;N;;;;; +145BE;ANATOLIAN HIEROGLYPH A394;Lo;0;L;;;;;N;;;;; +145BF;ANATOLIAN HIEROGLYPH A395;Lo;0;L;;;;;N;;;;; +145C0;ANATOLIAN HIEROGLYPH A396;Lo;0;L;;;;;N;;;;; +145C1;ANATOLIAN HIEROGLYPH A397;Lo;0;L;;;;;N;;;;; +145C2;ANATOLIAN HIEROGLYPH A398;Lo;0;L;;;;;N;;;;; +145C3;ANATOLIAN HIEROGLYPH A399;Lo;0;L;;;;;N;;;;; +145C4;ANATOLIAN HIEROGLYPH A400;Lo;0;L;;;;;N;;;;; +145C5;ANATOLIAN HIEROGLYPH A401;Lo;0;L;;;;;N;;;;; +145C6;ANATOLIAN HIEROGLYPH A402;Lo;0;L;;;;;N;;;;; +145C7;ANATOLIAN HIEROGLYPH A403;Lo;0;L;;;;;N;;;;; +145C8;ANATOLIAN HIEROGLYPH A404;Lo;0;L;;;;;N;;;;; +145C9;ANATOLIAN HIEROGLYPH A405;Lo;0;L;;;;;N;;;;; +145CA;ANATOLIAN HIEROGLYPH A406;Lo;0;L;;;;;N;;;;; +145CB;ANATOLIAN HIEROGLYPH A407;Lo;0;L;;;;;N;;;;; +145CC;ANATOLIAN HIEROGLYPH A408;Lo;0;L;;;;;N;;;;; +145CD;ANATOLIAN HIEROGLYPH A409;Lo;0;L;;;;;N;;;;; +145CE;ANATOLIAN HIEROGLYPH A410 BEGIN LOGOGRAM MARK;Lo;0;L;;;;;N;;;;; +145CF;ANATOLIAN HIEROGLYPH A410A END LOGOGRAM MARK;Lo;0;L;;;;;N;;;;; +145D0;ANATOLIAN HIEROGLYPH A411;Lo;0;L;;;;;N;;;;; +145D1;ANATOLIAN HIEROGLYPH A412;Lo;0;L;;;;;N;;;;; +145D2;ANATOLIAN HIEROGLYPH A413;Lo;0;L;;;;;N;;;;; +145D3;ANATOLIAN HIEROGLYPH A414;Lo;0;L;;;;;N;;;;; +145D4;ANATOLIAN HIEROGLYPH A415;Lo;0;L;;;;;N;;;;; +145D5;ANATOLIAN HIEROGLYPH A416;Lo;0;L;;;;;N;;;;; +145D6;ANATOLIAN HIEROGLYPH A417;Lo;0;L;;;;;N;;;;; +145D7;ANATOLIAN HIEROGLYPH A418;Lo;0;L;;;;;N;;;;; +145D8;ANATOLIAN HIEROGLYPH A419;Lo;0;L;;;;;N;;;;; +145D9;ANATOLIAN HIEROGLYPH A420;Lo;0;L;;;;;N;;;;; +145DA;ANATOLIAN HIEROGLYPH A421;Lo;0;L;;;;;N;;;;; +145DB;ANATOLIAN HIEROGLYPH A422;Lo;0;L;;;;;N;;;;; +145DC;ANATOLIAN HIEROGLYPH A423;Lo;0;L;;;;;N;;;;; +145DD;ANATOLIAN HIEROGLYPH A424;Lo;0;L;;;;;N;;;;; +145DE;ANATOLIAN HIEROGLYPH A425;Lo;0;L;;;;;N;;;;; +145DF;ANATOLIAN HIEROGLYPH A426;Lo;0;L;;;;;N;;;;; +145E0;ANATOLIAN HIEROGLYPH A427;Lo;0;L;;;;;N;;;;; +145E1;ANATOLIAN HIEROGLYPH A428;Lo;0;L;;;;;N;;;;; +145E2;ANATOLIAN HIEROGLYPH A429;Lo;0;L;;;;;N;;;;; +145E3;ANATOLIAN HIEROGLYPH A430;Lo;0;L;;;;;N;;;;; +145E4;ANATOLIAN HIEROGLYPH A431;Lo;0;L;;;;;N;;;;; +145E5;ANATOLIAN HIEROGLYPH A432;Lo;0;L;;;;;N;;;;; +145E6;ANATOLIAN HIEROGLYPH A433;Lo;0;L;;;;;N;;;;; +145E7;ANATOLIAN HIEROGLYPH A434;Lo;0;L;;;;;N;;;;; +145E8;ANATOLIAN HIEROGLYPH A435;Lo;0;L;;;;;N;;;;; +145E9;ANATOLIAN HIEROGLYPH A436;Lo;0;L;;;;;N;;;;; +145EA;ANATOLIAN HIEROGLYPH A437;Lo;0;L;;;;;N;;;;; +145EB;ANATOLIAN HIEROGLYPH A438;Lo;0;L;;;;;N;;;;; +145EC;ANATOLIAN HIEROGLYPH A439;Lo;0;L;;;;;N;;;;; +145ED;ANATOLIAN HIEROGLYPH A440;Lo;0;L;;;;;N;;;;; +145EE;ANATOLIAN HIEROGLYPH A441;Lo;0;L;;;;;N;;;;; +145EF;ANATOLIAN HIEROGLYPH A442;Lo;0;L;;;;;N;;;;; +145F0;ANATOLIAN HIEROGLYPH A443;Lo;0;L;;;;;N;;;;; +145F1;ANATOLIAN HIEROGLYPH A444;Lo;0;L;;;;;N;;;;; +145F2;ANATOLIAN HIEROGLYPH A445;Lo;0;L;;;;;N;;;;; +145F3;ANATOLIAN HIEROGLYPH A446;Lo;0;L;;;;;N;;;;; +145F4;ANATOLIAN HIEROGLYPH A447;Lo;0;L;;;;;N;;;;; +145F5;ANATOLIAN HIEROGLYPH A448;Lo;0;L;;;;;N;;;;; +145F6;ANATOLIAN HIEROGLYPH A449;Lo;0;L;;;;;N;;;;; +145F7;ANATOLIAN HIEROGLYPH A450;Lo;0;L;;;;;N;;;;; +145F8;ANATOLIAN HIEROGLYPH A450A;Lo;0;L;;;;;N;;;;; +145F9;ANATOLIAN HIEROGLYPH A451;Lo;0;L;;;;;N;;;;; +145FA;ANATOLIAN HIEROGLYPH A452;Lo;0;L;;;;;N;;;;; +145FB;ANATOLIAN HIEROGLYPH A453;Lo;0;L;;;;;N;;;;; +145FC;ANATOLIAN HIEROGLYPH A454;Lo;0;L;;;;;N;;;;; +145FD;ANATOLIAN HIEROGLYPH A455;Lo;0;L;;;;;N;;;;; +145FE;ANATOLIAN HIEROGLYPH A456;Lo;0;L;;;;;N;;;;; +145FF;ANATOLIAN HIEROGLYPH A457;Lo;0;L;;;;;N;;;;; +14600;ANATOLIAN HIEROGLYPH A457A;Lo;0;L;;;;;N;;;;; +14601;ANATOLIAN HIEROGLYPH A458;Lo;0;L;;;;;N;;;;; +14602;ANATOLIAN HIEROGLYPH A459;Lo;0;L;;;;;N;;;;; +14603;ANATOLIAN HIEROGLYPH A460;Lo;0;L;;;;;N;;;;; +14604;ANATOLIAN HIEROGLYPH A461;Lo;0;L;;;;;N;;;;; +14605;ANATOLIAN HIEROGLYPH A462;Lo;0;L;;;;;N;;;;; +14606;ANATOLIAN HIEROGLYPH A463;Lo;0;L;;;;;N;;;;; +14607;ANATOLIAN HIEROGLYPH A464;Lo;0;L;;;;;N;;;;; +14608;ANATOLIAN HIEROGLYPH A465;Lo;0;L;;;;;N;;;;; +14609;ANATOLIAN HIEROGLYPH A466;Lo;0;L;;;;;N;;;;; +1460A;ANATOLIAN HIEROGLYPH A467;Lo;0;L;;;;;N;;;;; +1460B;ANATOLIAN HIEROGLYPH A468;Lo;0;L;;;;;N;;;;; +1460C;ANATOLIAN HIEROGLYPH A469;Lo;0;L;;;;;N;;;;; +1460D;ANATOLIAN HIEROGLYPH A470;Lo;0;L;;;;;N;;;;; +1460E;ANATOLIAN HIEROGLYPH A471;Lo;0;L;;;;;N;;;;; +1460F;ANATOLIAN HIEROGLYPH A472;Lo;0;L;;;;;N;;;;; +14610;ANATOLIAN HIEROGLYPH A473;Lo;0;L;;;;;N;;;;; +14611;ANATOLIAN HIEROGLYPH A474;Lo;0;L;;;;;N;;;;; +14612;ANATOLIAN HIEROGLYPH A475;Lo;0;L;;;;;N;;;;; +14613;ANATOLIAN HIEROGLYPH A476;Lo;0;L;;;;;N;;;;; +14614;ANATOLIAN HIEROGLYPH A477;Lo;0;L;;;;;N;;;;; +14615;ANATOLIAN HIEROGLYPH A478;Lo;0;L;;;;;N;;;;; +14616;ANATOLIAN HIEROGLYPH A479;Lo;0;L;;;;;N;;;;; +14617;ANATOLIAN HIEROGLYPH A480;Lo;0;L;;;;;N;;;;; +14618;ANATOLIAN HIEROGLYPH A481;Lo;0;L;;;;;N;;;;; +14619;ANATOLIAN HIEROGLYPH A482;Lo;0;L;;;;;N;;;;; +1461A;ANATOLIAN HIEROGLYPH A483;Lo;0;L;;;;;N;;;;; +1461B;ANATOLIAN HIEROGLYPH A484;Lo;0;L;;;;;N;;;;; +1461C;ANATOLIAN HIEROGLYPH A485;Lo;0;L;;;;;N;;;;; +1461D;ANATOLIAN HIEROGLYPH A486;Lo;0;L;;;;;N;;;;; +1461E;ANATOLIAN HIEROGLYPH A487;Lo;0;L;;;;;N;;;;; +1461F;ANATOLIAN HIEROGLYPH A488;Lo;0;L;;;;;N;;;;; +14620;ANATOLIAN HIEROGLYPH A489;Lo;0;L;;;;;N;;;;; +14621;ANATOLIAN HIEROGLYPH A490;Lo;0;L;;;;;N;;;;; +14622;ANATOLIAN HIEROGLYPH A491;Lo;0;L;;;;;N;;;;; +14623;ANATOLIAN HIEROGLYPH A492;Lo;0;L;;;;;N;;;;; +14624;ANATOLIAN HIEROGLYPH A493;Lo;0;L;;;;;N;;;;; +14625;ANATOLIAN HIEROGLYPH A494;Lo;0;L;;;;;N;;;;; +14626;ANATOLIAN HIEROGLYPH A495;Lo;0;L;;;;;N;;;;; +14627;ANATOLIAN HIEROGLYPH A496;Lo;0;L;;;;;N;;;;; +14628;ANATOLIAN HIEROGLYPH A497;Lo;0;L;;;;;N;;;;; +14629;ANATOLIAN HIEROGLYPH A501;Lo;0;L;;;;;N;;;;; +1462A;ANATOLIAN HIEROGLYPH A502;Lo;0;L;;;;;N;;;;; +1462B;ANATOLIAN HIEROGLYPH A503;Lo;0;L;;;;;N;;;;; +1462C;ANATOLIAN HIEROGLYPH A504;Lo;0;L;;;;;N;;;;; +1462D;ANATOLIAN HIEROGLYPH A505;Lo;0;L;;;;;N;;;;; +1462E;ANATOLIAN HIEROGLYPH A506;Lo;0;L;;;;;N;;;;; +1462F;ANATOLIAN HIEROGLYPH A507;Lo;0;L;;;;;N;;;;; +14630;ANATOLIAN HIEROGLYPH A508;Lo;0;L;;;;;N;;;;; +14631;ANATOLIAN HIEROGLYPH A509;Lo;0;L;;;;;N;;;;; +14632;ANATOLIAN HIEROGLYPH A510;Lo;0;L;;;;;N;;;;; +14633;ANATOLIAN HIEROGLYPH A511;Lo;0;L;;;;;N;;;;; +14634;ANATOLIAN HIEROGLYPH A512;Lo;0;L;;;;;N;;;;; +14635;ANATOLIAN HIEROGLYPH A513;Lo;0;L;;;;;N;;;;; +14636;ANATOLIAN HIEROGLYPH A514;Lo;0;L;;;;;N;;;;; +14637;ANATOLIAN HIEROGLYPH A515;Lo;0;L;;;;;N;;;;; +14638;ANATOLIAN HIEROGLYPH A516;Lo;0;L;;;;;N;;;;; +14639;ANATOLIAN HIEROGLYPH A517;Lo;0;L;;;;;N;;;;; +1463A;ANATOLIAN HIEROGLYPH A518;Lo;0;L;;;;;N;;;;; +1463B;ANATOLIAN HIEROGLYPH A519;Lo;0;L;;;;;N;;;;; +1463C;ANATOLIAN HIEROGLYPH A520;Lo;0;L;;;;;N;;;;; +1463D;ANATOLIAN HIEROGLYPH A521;Lo;0;L;;;;;N;;;;; +1463E;ANATOLIAN HIEROGLYPH A522;Lo;0;L;;;;;N;;;;; +1463F;ANATOLIAN HIEROGLYPH A523;Lo;0;L;;;;;N;;;;; +14640;ANATOLIAN HIEROGLYPH A524;Lo;0;L;;;;;N;;;;; +14641;ANATOLIAN HIEROGLYPH A525;Lo;0;L;;;;;N;;;;; +14642;ANATOLIAN HIEROGLYPH A526;Lo;0;L;;;;;N;;;;; +14643;ANATOLIAN HIEROGLYPH A527;Lo;0;L;;;;;N;;;;; +14644;ANATOLIAN HIEROGLYPH A528;Lo;0;L;;;;;N;;;;; +14645;ANATOLIAN HIEROGLYPH A529;Lo;0;L;;;;;N;;;;; +14646;ANATOLIAN HIEROGLYPH A530;Lo;0;L;;;;;N;;;;; 16800;BAMUM LETTER PHASE-A NGKUE MFON;Lo;0;L;;;;;N;;;;; 16801;BAMUM LETTER PHASE-A GBIEE FON;Lo;0;L;;;;;N;;;;; 16802;BAMUM LETTER PHASE-A PON MFON PIPAEMGBIEE;Lo;0;L;;;;;N;;;;; @@ -22465,6 +24086,764 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 16F9D;MIAO LETTER REFORMED TONE-5;Lm;0;L;;;;;N;;;;; 16F9E;MIAO LETTER REFORMED TONE-6;Lm;0;L;;;;;N;;;;; 16F9F;MIAO LETTER REFORMED TONE-8;Lm;0;L;;;;;N;;;;; +16FE0;TANGUT ITERATION MARK;Lm;0;L;;;;;N;;;;; +17000;;Lo;0;L;;;;;N;;;;; +187EC;;Lo;0;L;;;;;N;;;;; +18800;TANGUT COMPONENT-001;Lo;0;L;;;;;N;;;;; +18801;TANGUT COMPONENT-002;Lo;0;L;;;;;N;;;;; +18802;TANGUT COMPONENT-003;Lo;0;L;;;;;N;;;;; +18803;TANGUT COMPONENT-004;Lo;0;L;;;;;N;;;;; +18804;TANGUT COMPONENT-005;Lo;0;L;;;;;N;;;;; +18805;TANGUT COMPONENT-006;Lo;0;L;;;;;N;;;;; +18806;TANGUT COMPONENT-007;Lo;0;L;;;;;N;;;;; +18807;TANGUT COMPONENT-008;Lo;0;L;;;;;N;;;;; +18808;TANGUT COMPONENT-009;Lo;0;L;;;;;N;;;;; +18809;TANGUT COMPONENT-010;Lo;0;L;;;;;N;;;;; +1880A;TANGUT COMPONENT-011;Lo;0;L;;;;;N;;;;; +1880B;TANGUT COMPONENT-012;Lo;0;L;;;;;N;;;;; +1880C;TANGUT COMPONENT-013;Lo;0;L;;;;;N;;;;; +1880D;TANGUT COMPONENT-014;Lo;0;L;;;;;N;;;;; +1880E;TANGUT COMPONENT-015;Lo;0;L;;;;;N;;;;; +1880F;TANGUT COMPONENT-016;Lo;0;L;;;;;N;;;;; +18810;TANGUT COMPONENT-017;Lo;0;L;;;;;N;;;;; +18811;TANGUT COMPONENT-018;Lo;0;L;;;;;N;;;;; +18812;TANGUT COMPONENT-019;Lo;0;L;;;;;N;;;;; +18813;TANGUT COMPONENT-020;Lo;0;L;;;;;N;;;;; +18814;TANGUT COMPONENT-021;Lo;0;L;;;;;N;;;;; +18815;TANGUT COMPONENT-022;Lo;0;L;;;;;N;;;;; +18816;TANGUT COMPONENT-023;Lo;0;L;;;;;N;;;;; +18817;TANGUT COMPONENT-024;Lo;0;L;;;;;N;;;;; +18818;TANGUT COMPONENT-025;Lo;0;L;;;;;N;;;;; +18819;TANGUT COMPONENT-026;Lo;0;L;;;;;N;;;;; +1881A;TANGUT COMPONENT-027;Lo;0;L;;;;;N;;;;; +1881B;TANGUT COMPONENT-028;Lo;0;L;;;;;N;;;;; +1881C;TANGUT COMPONENT-029;Lo;0;L;;;;;N;;;;; +1881D;TANGUT COMPONENT-030;Lo;0;L;;;;;N;;;;; +1881E;TANGUT COMPONENT-031;Lo;0;L;;;;;N;;;;; +1881F;TANGUT COMPONENT-032;Lo;0;L;;;;;N;;;;; +18820;TANGUT COMPONENT-033;Lo;0;L;;;;;N;;;;; +18821;TANGUT COMPONENT-034;Lo;0;L;;;;;N;;;;; +18822;TANGUT COMPONENT-035;Lo;0;L;;;;;N;;;;; +18823;TANGUT COMPONENT-036;Lo;0;L;;;;;N;;;;; +18824;TANGUT COMPONENT-037;Lo;0;L;;;;;N;;;;; +18825;TANGUT COMPONENT-038;Lo;0;L;;;;;N;;;;; +18826;TANGUT COMPONENT-039;Lo;0;L;;;;;N;;;;; +18827;TANGUT COMPONENT-040;Lo;0;L;;;;;N;;;;; +18828;TANGUT COMPONENT-041;Lo;0;L;;;;;N;;;;; +18829;TANGUT COMPONENT-042;Lo;0;L;;;;;N;;;;; +1882A;TANGUT COMPONENT-043;Lo;0;L;;;;;N;;;;; +1882B;TANGUT COMPONENT-044;Lo;0;L;;;;;N;;;;; +1882C;TANGUT COMPONENT-045;Lo;0;L;;;;;N;;;;; +1882D;TANGUT COMPONENT-046;Lo;0;L;;;;;N;;;;; +1882E;TANGUT COMPONENT-047;Lo;0;L;;;;;N;;;;; +1882F;TANGUT COMPONENT-048;Lo;0;L;;;;;N;;;;; +18830;TANGUT COMPONENT-049;Lo;0;L;;;;;N;;;;; +18831;TANGUT COMPONENT-050;Lo;0;L;;;;;N;;;;; +18832;TANGUT COMPONENT-051;Lo;0;L;;;;;N;;;;; +18833;TANGUT COMPONENT-052;Lo;0;L;;;;;N;;;;; +18834;TANGUT COMPONENT-053;Lo;0;L;;;;;N;;;;; +18835;TANGUT COMPONENT-054;Lo;0;L;;;;;N;;;;; +18836;TANGUT COMPONENT-055;Lo;0;L;;;;;N;;;;; +18837;TANGUT COMPONENT-056;Lo;0;L;;;;;N;;;;; +18838;TANGUT COMPONENT-057;Lo;0;L;;;;;N;;;;; +18839;TANGUT COMPONENT-058;Lo;0;L;;;;;N;;;;; +1883A;TANGUT COMPONENT-059;Lo;0;L;;;;;N;;;;; +1883B;TANGUT COMPONENT-060;Lo;0;L;;;;;N;;;;; +1883C;TANGUT COMPONENT-061;Lo;0;L;;;;;N;;;;; +1883D;TANGUT COMPONENT-062;Lo;0;L;;;;;N;;;;; +1883E;TANGUT COMPONENT-063;Lo;0;L;;;;;N;;;;; +1883F;TANGUT COMPONENT-064;Lo;0;L;;;;;N;;;;; +18840;TANGUT COMPONENT-065;Lo;0;L;;;;;N;;;;; +18841;TANGUT COMPONENT-066;Lo;0;L;;;;;N;;;;; +18842;TANGUT COMPONENT-067;Lo;0;L;;;;;N;;;;; +18843;TANGUT COMPONENT-068;Lo;0;L;;;;;N;;;;; +18844;TANGUT COMPONENT-069;Lo;0;L;;;;;N;;;;; +18845;TANGUT COMPONENT-070;Lo;0;L;;;;;N;;;;; +18846;TANGUT COMPONENT-071;Lo;0;L;;;;;N;;;;; +18847;TANGUT COMPONENT-072;Lo;0;L;;;;;N;;;;; +18848;TANGUT COMPONENT-073;Lo;0;L;;;;;N;;;;; +18849;TANGUT COMPONENT-074;Lo;0;L;;;;;N;;;;; +1884A;TANGUT COMPONENT-075;Lo;0;L;;;;;N;;;;; +1884B;TANGUT COMPONENT-076;Lo;0;L;;;;;N;;;;; +1884C;TANGUT COMPONENT-077;Lo;0;L;;;;;N;;;;; +1884D;TANGUT COMPONENT-078;Lo;0;L;;;;;N;;;;; +1884E;TANGUT COMPONENT-079;Lo;0;L;;;;;N;;;;; +1884F;TANGUT COMPONENT-080;Lo;0;L;;;;;N;;;;; +18850;TANGUT COMPONENT-081;Lo;0;L;;;;;N;;;;; +18851;TANGUT COMPONENT-082;Lo;0;L;;;;;N;;;;; +18852;TANGUT COMPONENT-083;Lo;0;L;;;;;N;;;;; +18853;TANGUT COMPONENT-084;Lo;0;L;;;;;N;;;;; +18854;TANGUT COMPONENT-085;Lo;0;L;;;;;N;;;;; +18855;TANGUT COMPONENT-086;Lo;0;L;;;;;N;;;;; +18856;TANGUT COMPONENT-087;Lo;0;L;;;;;N;;;;; +18857;TANGUT COMPONENT-088;Lo;0;L;;;;;N;;;;; +18858;TANGUT COMPONENT-089;Lo;0;L;;;;;N;;;;; +18859;TANGUT COMPONENT-090;Lo;0;L;;;;;N;;;;; +1885A;TANGUT COMPONENT-091;Lo;0;L;;;;;N;;;;; +1885B;TANGUT COMPONENT-092;Lo;0;L;;;;;N;;;;; +1885C;TANGUT COMPONENT-093;Lo;0;L;;;;;N;;;;; +1885D;TANGUT COMPONENT-094;Lo;0;L;;;;;N;;;;; +1885E;TANGUT COMPONENT-095;Lo;0;L;;;;;N;;;;; +1885F;TANGUT COMPONENT-096;Lo;0;L;;;;;N;;;;; +18860;TANGUT COMPONENT-097;Lo;0;L;;;;;N;;;;; +18861;TANGUT COMPONENT-098;Lo;0;L;;;;;N;;;;; +18862;TANGUT COMPONENT-099;Lo;0;L;;;;;N;;;;; +18863;TANGUT COMPONENT-100;Lo;0;L;;;;;N;;;;; +18864;TANGUT COMPONENT-101;Lo;0;L;;;;;N;;;;; +18865;TANGUT COMPONENT-102;Lo;0;L;;;;;N;;;;; +18866;TANGUT COMPONENT-103;Lo;0;L;;;;;N;;;;; +18867;TANGUT COMPONENT-104;Lo;0;L;;;;;N;;;;; +18868;TANGUT COMPONENT-105;Lo;0;L;;;;;N;;;;; +18869;TANGUT COMPONENT-106;Lo;0;L;;;;;N;;;;; +1886A;TANGUT COMPONENT-107;Lo;0;L;;;;;N;;;;; +1886B;TANGUT COMPONENT-108;Lo;0;L;;;;;N;;;;; +1886C;TANGUT COMPONENT-109;Lo;0;L;;;;;N;;;;; +1886D;TANGUT COMPONENT-110;Lo;0;L;;;;;N;;;;; +1886E;TANGUT COMPONENT-111;Lo;0;L;;;;;N;;;;; +1886F;TANGUT COMPONENT-112;Lo;0;L;;;;;N;;;;; +18870;TANGUT COMPONENT-113;Lo;0;L;;;;;N;;;;; +18871;TANGUT COMPONENT-114;Lo;0;L;;;;;N;;;;; +18872;TANGUT COMPONENT-115;Lo;0;L;;;;;N;;;;; +18873;TANGUT COMPONENT-116;Lo;0;L;;;;;N;;;;; +18874;TANGUT COMPONENT-117;Lo;0;L;;;;;N;;;;; +18875;TANGUT COMPONENT-118;Lo;0;L;;;;;N;;;;; +18876;TANGUT COMPONENT-119;Lo;0;L;;;;;N;;;;; +18877;TANGUT COMPONENT-120;Lo;0;L;;;;;N;;;;; +18878;TANGUT COMPONENT-121;Lo;0;L;;;;;N;;;;; +18879;TANGUT COMPONENT-122;Lo;0;L;;;;;N;;;;; +1887A;TANGUT COMPONENT-123;Lo;0;L;;;;;N;;;;; +1887B;TANGUT COMPONENT-124;Lo;0;L;;;;;N;;;;; +1887C;TANGUT COMPONENT-125;Lo;0;L;;;;;N;;;;; +1887D;TANGUT COMPONENT-126;Lo;0;L;;;;;N;;;;; +1887E;TANGUT COMPONENT-127;Lo;0;L;;;;;N;;;;; +1887F;TANGUT COMPONENT-128;Lo;0;L;;;;;N;;;;; +18880;TANGUT COMPONENT-129;Lo;0;L;;;;;N;;;;; +18881;TANGUT COMPONENT-130;Lo;0;L;;;;;N;;;;; +18882;TANGUT COMPONENT-131;Lo;0;L;;;;;N;;;;; +18883;TANGUT COMPONENT-132;Lo;0;L;;;;;N;;;;; +18884;TANGUT COMPONENT-133;Lo;0;L;;;;;N;;;;; +18885;TANGUT COMPONENT-134;Lo;0;L;;;;;N;;;;; +18886;TANGUT COMPONENT-135;Lo;0;L;;;;;N;;;;; +18887;TANGUT COMPONENT-136;Lo;0;L;;;;;N;;;;; +18888;TANGUT COMPONENT-137;Lo;0;L;;;;;N;;;;; +18889;TANGUT COMPONENT-138;Lo;0;L;;;;;N;;;;; +1888A;TANGUT COMPONENT-139;Lo;0;L;;;;;N;;;;; +1888B;TANGUT COMPONENT-140;Lo;0;L;;;;;N;;;;; +1888C;TANGUT COMPONENT-141;Lo;0;L;;;;;N;;;;; +1888D;TANGUT COMPONENT-142;Lo;0;L;;;;;N;;;;; +1888E;TANGUT COMPONENT-143;Lo;0;L;;;;;N;;;;; +1888F;TANGUT COMPONENT-144;Lo;0;L;;;;;N;;;;; +18890;TANGUT COMPONENT-145;Lo;0;L;;;;;N;;;;; +18891;TANGUT COMPONENT-146;Lo;0;L;;;;;N;;;;; +18892;TANGUT COMPONENT-147;Lo;0;L;;;;;N;;;;; +18893;TANGUT COMPONENT-148;Lo;0;L;;;;;N;;;;; +18894;TANGUT COMPONENT-149;Lo;0;L;;;;;N;;;;; +18895;TANGUT COMPONENT-150;Lo;0;L;;;;;N;;;;; +18896;TANGUT COMPONENT-151;Lo;0;L;;;;;N;;;;; +18897;TANGUT COMPONENT-152;Lo;0;L;;;;;N;;;;; +18898;TANGUT COMPONENT-153;Lo;0;L;;;;;N;;;;; +18899;TANGUT COMPONENT-154;Lo;0;L;;;;;N;;;;; +1889A;TANGUT COMPONENT-155;Lo;0;L;;;;;N;;;;; +1889B;TANGUT COMPONENT-156;Lo;0;L;;;;;N;;;;; +1889C;TANGUT COMPONENT-157;Lo;0;L;;;;;N;;;;; +1889D;TANGUT COMPONENT-158;Lo;0;L;;;;;N;;;;; +1889E;TANGUT COMPONENT-159;Lo;0;L;;;;;N;;;;; +1889F;TANGUT COMPONENT-160;Lo;0;L;;;;;N;;;;; +188A0;TANGUT COMPONENT-161;Lo;0;L;;;;;N;;;;; +188A1;TANGUT COMPONENT-162;Lo;0;L;;;;;N;;;;; +188A2;TANGUT COMPONENT-163;Lo;0;L;;;;;N;;;;; +188A3;TANGUT COMPONENT-164;Lo;0;L;;;;;N;;;;; +188A4;TANGUT COMPONENT-165;Lo;0;L;;;;;N;;;;; +188A5;TANGUT COMPONENT-166;Lo;0;L;;;;;N;;;;; +188A6;TANGUT COMPONENT-167;Lo;0;L;;;;;N;;;;; +188A7;TANGUT COMPONENT-168;Lo;0;L;;;;;N;;;;; +188A8;TANGUT COMPONENT-169;Lo;0;L;;;;;N;;;;; +188A9;TANGUT COMPONENT-170;Lo;0;L;;;;;N;;;;; +188AA;TANGUT COMPONENT-171;Lo;0;L;;;;;N;;;;; +188AB;TANGUT COMPONENT-172;Lo;0;L;;;;;N;;;;; +188AC;TANGUT COMPONENT-173;Lo;0;L;;;;;N;;;;; +188AD;TANGUT COMPONENT-174;Lo;0;L;;;;;N;;;;; +188AE;TANGUT COMPONENT-175;Lo;0;L;;;;;N;;;;; +188AF;TANGUT COMPONENT-176;Lo;0;L;;;;;N;;;;; +188B0;TANGUT COMPONENT-177;Lo;0;L;;;;;N;;;;; +188B1;TANGUT COMPONENT-178;Lo;0;L;;;;;N;;;;; +188B2;TANGUT COMPONENT-179;Lo;0;L;;;;;N;;;;; +188B3;TANGUT COMPONENT-180;Lo;0;L;;;;;N;;;;; +188B4;TANGUT COMPONENT-181;Lo;0;L;;;;;N;;;;; +188B5;TANGUT COMPONENT-182;Lo;0;L;;;;;N;;;;; +188B6;TANGUT COMPONENT-183;Lo;0;L;;;;;N;;;;; +188B7;TANGUT COMPONENT-184;Lo;0;L;;;;;N;;;;; +188B8;TANGUT COMPONENT-185;Lo;0;L;;;;;N;;;;; +188B9;TANGUT COMPONENT-186;Lo;0;L;;;;;N;;;;; +188BA;TANGUT COMPONENT-187;Lo;0;L;;;;;N;;;;; +188BB;TANGUT COMPONENT-188;Lo;0;L;;;;;N;;;;; +188BC;TANGUT COMPONENT-189;Lo;0;L;;;;;N;;;;; +188BD;TANGUT COMPONENT-190;Lo;0;L;;;;;N;;;;; +188BE;TANGUT COMPONENT-191;Lo;0;L;;;;;N;;;;; +188BF;TANGUT COMPONENT-192;Lo;0;L;;;;;N;;;;; +188C0;TANGUT COMPONENT-193;Lo;0;L;;;;;N;;;;; +188C1;TANGUT COMPONENT-194;Lo;0;L;;;;;N;;;;; +188C2;TANGUT COMPONENT-195;Lo;0;L;;;;;N;;;;; +188C3;TANGUT COMPONENT-196;Lo;0;L;;;;;N;;;;; +188C4;TANGUT COMPONENT-197;Lo;0;L;;;;;N;;;;; +188C5;TANGUT COMPONENT-198;Lo;0;L;;;;;N;;;;; +188C6;TANGUT COMPONENT-199;Lo;0;L;;;;;N;;;;; +188C7;TANGUT COMPONENT-200;Lo;0;L;;;;;N;;;;; +188C8;TANGUT COMPONENT-201;Lo;0;L;;;;;N;;;;; +188C9;TANGUT COMPONENT-202;Lo;0;L;;;;;N;;;;; +188CA;TANGUT COMPONENT-203;Lo;0;L;;;;;N;;;;; +188CB;TANGUT COMPONENT-204;Lo;0;L;;;;;N;;;;; +188CC;TANGUT COMPONENT-205;Lo;0;L;;;;;N;;;;; +188CD;TANGUT COMPONENT-206;Lo;0;L;;;;;N;;;;; +188CE;TANGUT COMPONENT-207;Lo;0;L;;;;;N;;;;; +188CF;TANGUT COMPONENT-208;Lo;0;L;;;;;N;;;;; +188D0;TANGUT COMPONENT-209;Lo;0;L;;;;;N;;;;; +188D1;TANGUT COMPONENT-210;Lo;0;L;;;;;N;;;;; +188D2;TANGUT COMPONENT-211;Lo;0;L;;;;;N;;;;; +188D3;TANGUT COMPONENT-212;Lo;0;L;;;;;N;;;;; +188D4;TANGUT COMPONENT-213;Lo;0;L;;;;;N;;;;; +188D5;TANGUT COMPONENT-214;Lo;0;L;;;;;N;;;;; +188D6;TANGUT COMPONENT-215;Lo;0;L;;;;;N;;;;; +188D7;TANGUT COMPONENT-216;Lo;0;L;;;;;N;;;;; +188D8;TANGUT COMPONENT-217;Lo;0;L;;;;;N;;;;; +188D9;TANGUT COMPONENT-218;Lo;0;L;;;;;N;;;;; +188DA;TANGUT COMPONENT-219;Lo;0;L;;;;;N;;;;; +188DB;TANGUT COMPONENT-220;Lo;0;L;;;;;N;;;;; +188DC;TANGUT COMPONENT-221;Lo;0;L;;;;;N;;;;; +188DD;TANGUT COMPONENT-222;Lo;0;L;;;;;N;;;;; +188DE;TANGUT COMPONENT-223;Lo;0;L;;;;;N;;;;; +188DF;TANGUT COMPONENT-224;Lo;0;L;;;;;N;;;;; +188E0;TANGUT COMPONENT-225;Lo;0;L;;;;;N;;;;; +188E1;TANGUT COMPONENT-226;Lo;0;L;;;;;N;;;;; +188E2;TANGUT COMPONENT-227;Lo;0;L;;;;;N;;;;; +188E3;TANGUT COMPONENT-228;Lo;0;L;;;;;N;;;;; +188E4;TANGUT COMPONENT-229;Lo;0;L;;;;;N;;;;; +188E5;TANGUT COMPONENT-230;Lo;0;L;;;;;N;;;;; +188E6;TANGUT COMPONENT-231;Lo;0;L;;;;;N;;;;; +188E7;TANGUT COMPONENT-232;Lo;0;L;;;;;N;;;;; +188E8;TANGUT COMPONENT-233;Lo;0;L;;;;;N;;;;; +188E9;TANGUT COMPONENT-234;Lo;0;L;;;;;N;;;;; +188EA;TANGUT COMPONENT-235;Lo;0;L;;;;;N;;;;; +188EB;TANGUT COMPONENT-236;Lo;0;L;;;;;N;;;;; +188EC;TANGUT COMPONENT-237;Lo;0;L;;;;;N;;;;; +188ED;TANGUT COMPONENT-238;Lo;0;L;;;;;N;;;;; +188EE;TANGUT COMPONENT-239;Lo;0;L;;;;;N;;;;; +188EF;TANGUT COMPONENT-240;Lo;0;L;;;;;N;;;;; +188F0;TANGUT COMPONENT-241;Lo;0;L;;;;;N;;;;; +188F1;TANGUT COMPONENT-242;Lo;0;L;;;;;N;;;;; +188F2;TANGUT COMPONENT-243;Lo;0;L;;;;;N;;;;; +188F3;TANGUT COMPONENT-244;Lo;0;L;;;;;N;;;;; +188F4;TANGUT COMPONENT-245;Lo;0;L;;;;;N;;;;; +188F5;TANGUT COMPONENT-246;Lo;0;L;;;;;N;;;;; +188F6;TANGUT COMPONENT-247;Lo;0;L;;;;;N;;;;; +188F7;TANGUT COMPONENT-248;Lo;0;L;;;;;N;;;;; +188F8;TANGUT COMPONENT-249;Lo;0;L;;;;;N;;;;; +188F9;TANGUT COMPONENT-250;Lo;0;L;;;;;N;;;;; +188FA;TANGUT COMPONENT-251;Lo;0;L;;;;;N;;;;; +188FB;TANGUT COMPONENT-252;Lo;0;L;;;;;N;;;;; +188FC;TANGUT COMPONENT-253;Lo;0;L;;;;;N;;;;; +188FD;TANGUT COMPONENT-254;Lo;0;L;;;;;N;;;;; +188FE;TANGUT COMPONENT-255;Lo;0;L;;;;;N;;;;; +188FF;TANGUT COMPONENT-256;Lo;0;L;;;;;N;;;;; +18900;TANGUT COMPONENT-257;Lo;0;L;;;;;N;;;;; +18901;TANGUT COMPONENT-258;Lo;0;L;;;;;N;;;;; +18902;TANGUT COMPONENT-259;Lo;0;L;;;;;N;;;;; +18903;TANGUT COMPONENT-260;Lo;0;L;;;;;N;;;;; +18904;TANGUT COMPONENT-261;Lo;0;L;;;;;N;;;;; +18905;TANGUT COMPONENT-262;Lo;0;L;;;;;N;;;;; +18906;TANGUT COMPONENT-263;Lo;0;L;;;;;N;;;;; +18907;TANGUT COMPONENT-264;Lo;0;L;;;;;N;;;;; +18908;TANGUT COMPONENT-265;Lo;0;L;;;;;N;;;;; +18909;TANGUT COMPONENT-266;Lo;0;L;;;;;N;;;;; +1890A;TANGUT COMPONENT-267;Lo;0;L;;;;;N;;;;; +1890B;TANGUT COMPONENT-268;Lo;0;L;;;;;N;;;;; +1890C;TANGUT COMPONENT-269;Lo;0;L;;;;;N;;;;; +1890D;TANGUT COMPONENT-270;Lo;0;L;;;;;N;;;;; +1890E;TANGUT COMPONENT-271;Lo;0;L;;;;;N;;;;; +1890F;TANGUT COMPONENT-272;Lo;0;L;;;;;N;;;;; +18910;TANGUT COMPONENT-273;Lo;0;L;;;;;N;;;;; +18911;TANGUT COMPONENT-274;Lo;0;L;;;;;N;;;;; +18912;TANGUT COMPONENT-275;Lo;0;L;;;;;N;;;;; +18913;TANGUT COMPONENT-276;Lo;0;L;;;;;N;;;;; +18914;TANGUT COMPONENT-277;Lo;0;L;;;;;N;;;;; +18915;TANGUT COMPONENT-278;Lo;0;L;;;;;N;;;;; +18916;TANGUT COMPONENT-279;Lo;0;L;;;;;N;;;;; +18917;TANGUT COMPONENT-280;Lo;0;L;;;;;N;;;;; +18918;TANGUT COMPONENT-281;Lo;0;L;;;;;N;;;;; +18919;TANGUT COMPONENT-282;Lo;0;L;;;;;N;;;;; +1891A;TANGUT COMPONENT-283;Lo;0;L;;;;;N;;;;; +1891B;TANGUT COMPONENT-284;Lo;0;L;;;;;N;;;;; +1891C;TANGUT COMPONENT-285;Lo;0;L;;;;;N;;;;; +1891D;TANGUT COMPONENT-286;Lo;0;L;;;;;N;;;;; +1891E;TANGUT COMPONENT-287;Lo;0;L;;;;;N;;;;; +1891F;TANGUT COMPONENT-288;Lo;0;L;;;;;N;;;;; +18920;TANGUT COMPONENT-289;Lo;0;L;;;;;N;;;;; +18921;TANGUT COMPONENT-290;Lo;0;L;;;;;N;;;;; +18922;TANGUT COMPONENT-291;Lo;0;L;;;;;N;;;;; +18923;TANGUT COMPONENT-292;Lo;0;L;;;;;N;;;;; +18924;TANGUT COMPONENT-293;Lo;0;L;;;;;N;;;;; +18925;TANGUT COMPONENT-294;Lo;0;L;;;;;N;;;;; +18926;TANGUT COMPONENT-295;Lo;0;L;;;;;N;;;;; +18927;TANGUT COMPONENT-296;Lo;0;L;;;;;N;;;;; +18928;TANGUT COMPONENT-297;Lo;0;L;;;;;N;;;;; +18929;TANGUT COMPONENT-298;Lo;0;L;;;;;N;;;;; +1892A;TANGUT COMPONENT-299;Lo;0;L;;;;;N;;;;; +1892B;TANGUT COMPONENT-300;Lo;0;L;;;;;N;;;;; +1892C;TANGUT COMPONENT-301;Lo;0;L;;;;;N;;;;; +1892D;TANGUT COMPONENT-302;Lo;0;L;;;;;N;;;;; +1892E;TANGUT COMPONENT-303;Lo;0;L;;;;;N;;;;; +1892F;TANGUT COMPONENT-304;Lo;0;L;;;;;N;;;;; +18930;TANGUT COMPONENT-305;Lo;0;L;;;;;N;;;;; +18931;TANGUT COMPONENT-306;Lo;0;L;;;;;N;;;;; +18932;TANGUT COMPONENT-307;Lo;0;L;;;;;N;;;;; +18933;TANGUT COMPONENT-308;Lo;0;L;;;;;N;;;;; +18934;TANGUT COMPONENT-309;Lo;0;L;;;;;N;;;;; +18935;TANGUT COMPONENT-310;Lo;0;L;;;;;N;;;;; +18936;TANGUT COMPONENT-311;Lo;0;L;;;;;N;;;;; +18937;TANGUT COMPONENT-312;Lo;0;L;;;;;N;;;;; +18938;TANGUT COMPONENT-313;Lo;0;L;;;;;N;;;;; +18939;TANGUT COMPONENT-314;Lo;0;L;;;;;N;;;;; +1893A;TANGUT COMPONENT-315;Lo;0;L;;;;;N;;;;; +1893B;TANGUT COMPONENT-316;Lo;0;L;;;;;N;;;;; +1893C;TANGUT COMPONENT-317;Lo;0;L;;;;;N;;;;; +1893D;TANGUT COMPONENT-318;Lo;0;L;;;;;N;;;;; +1893E;TANGUT COMPONENT-319;Lo;0;L;;;;;N;;;;; +1893F;TANGUT COMPONENT-320;Lo;0;L;;;;;N;;;;; +18940;TANGUT COMPONENT-321;Lo;0;L;;;;;N;;;;; +18941;TANGUT COMPONENT-322;Lo;0;L;;;;;N;;;;; +18942;TANGUT COMPONENT-323;Lo;0;L;;;;;N;;;;; +18943;TANGUT COMPONENT-324;Lo;0;L;;;;;N;;;;; +18944;TANGUT COMPONENT-325;Lo;0;L;;;;;N;;;;; +18945;TANGUT COMPONENT-326;Lo;0;L;;;;;N;;;;; +18946;TANGUT COMPONENT-327;Lo;0;L;;;;;N;;;;; +18947;TANGUT COMPONENT-328;Lo;0;L;;;;;N;;;;; +18948;TANGUT COMPONENT-329;Lo;0;L;;;;;N;;;;; +18949;TANGUT COMPONENT-330;Lo;0;L;;;;;N;;;;; +1894A;TANGUT COMPONENT-331;Lo;0;L;;;;;N;;;;; +1894B;TANGUT COMPONENT-332;Lo;0;L;;;;;N;;;;; +1894C;TANGUT COMPONENT-333;Lo;0;L;;;;;N;;;;; +1894D;TANGUT COMPONENT-334;Lo;0;L;;;;;N;;;;; +1894E;TANGUT COMPONENT-335;Lo;0;L;;;;;N;;;;; +1894F;TANGUT COMPONENT-336;Lo;0;L;;;;;N;;;;; +18950;TANGUT COMPONENT-337;Lo;0;L;;;;;N;;;;; +18951;TANGUT COMPONENT-338;Lo;0;L;;;;;N;;;;; +18952;TANGUT COMPONENT-339;Lo;0;L;;;;;N;;;;; +18953;TANGUT COMPONENT-340;Lo;0;L;;;;;N;;;;; +18954;TANGUT COMPONENT-341;Lo;0;L;;;;;N;;;;; +18955;TANGUT COMPONENT-342;Lo;0;L;;;;;N;;;;; +18956;TANGUT COMPONENT-343;Lo;0;L;;;;;N;;;;; +18957;TANGUT COMPONENT-344;Lo;0;L;;;;;N;;;;; +18958;TANGUT COMPONENT-345;Lo;0;L;;;;;N;;;;; +18959;TANGUT COMPONENT-346;Lo;0;L;;;;;N;;;;; +1895A;TANGUT COMPONENT-347;Lo;0;L;;;;;N;;;;; +1895B;TANGUT COMPONENT-348;Lo;0;L;;;;;N;;;;; +1895C;TANGUT COMPONENT-349;Lo;0;L;;;;;N;;;;; +1895D;TANGUT COMPONENT-350;Lo;0;L;;;;;N;;;;; +1895E;TANGUT COMPONENT-351;Lo;0;L;;;;;N;;;;; +1895F;TANGUT COMPONENT-352;Lo;0;L;;;;;N;;;;; +18960;TANGUT COMPONENT-353;Lo;0;L;;;;;N;;;;; +18961;TANGUT COMPONENT-354;Lo;0;L;;;;;N;;;;; +18962;TANGUT COMPONENT-355;Lo;0;L;;;;;N;;;;; +18963;TANGUT COMPONENT-356;Lo;0;L;;;;;N;;;;; +18964;TANGUT COMPONENT-357;Lo;0;L;;;;;N;;;;; +18965;TANGUT COMPONENT-358;Lo;0;L;;;;;N;;;;; +18966;TANGUT COMPONENT-359;Lo;0;L;;;;;N;;;;; +18967;TANGUT COMPONENT-360;Lo;0;L;;;;;N;;;;; +18968;TANGUT COMPONENT-361;Lo;0;L;;;;;N;;;;; +18969;TANGUT COMPONENT-362;Lo;0;L;;;;;N;;;;; +1896A;TANGUT COMPONENT-363;Lo;0;L;;;;;N;;;;; +1896B;TANGUT COMPONENT-364;Lo;0;L;;;;;N;;;;; +1896C;TANGUT COMPONENT-365;Lo;0;L;;;;;N;;;;; +1896D;TANGUT COMPONENT-366;Lo;0;L;;;;;N;;;;; +1896E;TANGUT COMPONENT-367;Lo;0;L;;;;;N;;;;; +1896F;TANGUT COMPONENT-368;Lo;0;L;;;;;N;;;;; +18970;TANGUT COMPONENT-369;Lo;0;L;;;;;N;;;;; +18971;TANGUT COMPONENT-370;Lo;0;L;;;;;N;;;;; +18972;TANGUT COMPONENT-371;Lo;0;L;;;;;N;;;;; +18973;TANGUT COMPONENT-372;Lo;0;L;;;;;N;;;;; +18974;TANGUT COMPONENT-373;Lo;0;L;;;;;N;;;;; +18975;TANGUT COMPONENT-374;Lo;0;L;;;;;N;;;;; +18976;TANGUT COMPONENT-375;Lo;0;L;;;;;N;;;;; +18977;TANGUT COMPONENT-376;Lo;0;L;;;;;N;;;;; +18978;TANGUT COMPONENT-377;Lo;0;L;;;;;N;;;;; +18979;TANGUT COMPONENT-378;Lo;0;L;;;;;N;;;;; +1897A;TANGUT COMPONENT-379;Lo;0;L;;;;;N;;;;; +1897B;TANGUT COMPONENT-380;Lo;0;L;;;;;N;;;;; +1897C;TANGUT COMPONENT-381;Lo;0;L;;;;;N;;;;; +1897D;TANGUT COMPONENT-382;Lo;0;L;;;;;N;;;;; +1897E;TANGUT COMPONENT-383;Lo;0;L;;;;;N;;;;; +1897F;TANGUT COMPONENT-384;Lo;0;L;;;;;N;;;;; +18980;TANGUT COMPONENT-385;Lo;0;L;;;;;N;;;;; +18981;TANGUT COMPONENT-386;Lo;0;L;;;;;N;;;;; +18982;TANGUT COMPONENT-387;Lo;0;L;;;;;N;;;;; +18983;TANGUT COMPONENT-388;Lo;0;L;;;;;N;;;;; +18984;TANGUT COMPONENT-389;Lo;0;L;;;;;N;;;;; +18985;TANGUT COMPONENT-390;Lo;0;L;;;;;N;;;;; +18986;TANGUT COMPONENT-391;Lo;0;L;;;;;N;;;;; +18987;TANGUT COMPONENT-392;Lo;0;L;;;;;N;;;;; +18988;TANGUT COMPONENT-393;Lo;0;L;;;;;N;;;;; +18989;TANGUT COMPONENT-394;Lo;0;L;;;;;N;;;;; +1898A;TANGUT COMPONENT-395;Lo;0;L;;;;;N;;;;; +1898B;TANGUT COMPONENT-396;Lo;0;L;;;;;N;;;;; +1898C;TANGUT COMPONENT-397;Lo;0;L;;;;;N;;;;; +1898D;TANGUT COMPONENT-398;Lo;0;L;;;;;N;;;;; +1898E;TANGUT COMPONENT-399;Lo;0;L;;;;;N;;;;; +1898F;TANGUT COMPONENT-400;Lo;0;L;;;;;N;;;;; +18990;TANGUT COMPONENT-401;Lo;0;L;;;;;N;;;;; +18991;TANGUT COMPONENT-402;Lo;0;L;;;;;N;;;;; +18992;TANGUT COMPONENT-403;Lo;0;L;;;;;N;;;;; +18993;TANGUT COMPONENT-404;Lo;0;L;;;;;N;;;;; +18994;TANGUT COMPONENT-405;Lo;0;L;;;;;N;;;;; +18995;TANGUT COMPONENT-406;Lo;0;L;;;;;N;;;;; +18996;TANGUT COMPONENT-407;Lo;0;L;;;;;N;;;;; +18997;TANGUT COMPONENT-408;Lo;0;L;;;;;N;;;;; +18998;TANGUT COMPONENT-409;Lo;0;L;;;;;N;;;;; +18999;TANGUT COMPONENT-410;Lo;0;L;;;;;N;;;;; +1899A;TANGUT COMPONENT-411;Lo;0;L;;;;;N;;;;; +1899B;TANGUT COMPONENT-412;Lo;0;L;;;;;N;;;;; +1899C;TANGUT COMPONENT-413;Lo;0;L;;;;;N;;;;; +1899D;TANGUT COMPONENT-414;Lo;0;L;;;;;N;;;;; +1899E;TANGUT COMPONENT-415;Lo;0;L;;;;;N;;;;; +1899F;TANGUT COMPONENT-416;Lo;0;L;;;;;N;;;;; +189A0;TANGUT COMPONENT-417;Lo;0;L;;;;;N;;;;; +189A1;TANGUT COMPONENT-418;Lo;0;L;;;;;N;;;;; +189A2;TANGUT COMPONENT-419;Lo;0;L;;;;;N;;;;; +189A3;TANGUT COMPONENT-420;Lo;0;L;;;;;N;;;;; +189A4;TANGUT COMPONENT-421;Lo;0;L;;;;;N;;;;; +189A5;TANGUT COMPONENT-422;Lo;0;L;;;;;N;;;;; +189A6;TANGUT COMPONENT-423;Lo;0;L;;;;;N;;;;; +189A7;TANGUT COMPONENT-424;Lo;0;L;;;;;N;;;;; +189A8;TANGUT COMPONENT-425;Lo;0;L;;;;;N;;;;; +189A9;TANGUT COMPONENT-426;Lo;0;L;;;;;N;;;;; +189AA;TANGUT COMPONENT-427;Lo;0;L;;;;;N;;;;; +189AB;TANGUT COMPONENT-428;Lo;0;L;;;;;N;;;;; +189AC;TANGUT COMPONENT-429;Lo;0;L;;;;;N;;;;; +189AD;TANGUT COMPONENT-430;Lo;0;L;;;;;N;;;;; +189AE;TANGUT COMPONENT-431;Lo;0;L;;;;;N;;;;; +189AF;TANGUT COMPONENT-432;Lo;0;L;;;;;N;;;;; +189B0;TANGUT COMPONENT-433;Lo;0;L;;;;;N;;;;; +189B1;TANGUT COMPONENT-434;Lo;0;L;;;;;N;;;;; +189B2;TANGUT COMPONENT-435;Lo;0;L;;;;;N;;;;; +189B3;TANGUT COMPONENT-436;Lo;0;L;;;;;N;;;;; +189B4;TANGUT COMPONENT-437;Lo;0;L;;;;;N;;;;; +189B5;TANGUT COMPONENT-438;Lo;0;L;;;;;N;;;;; +189B6;TANGUT COMPONENT-439;Lo;0;L;;;;;N;;;;; +189B7;TANGUT COMPONENT-440;Lo;0;L;;;;;N;;;;; +189B8;TANGUT COMPONENT-441;Lo;0;L;;;;;N;;;;; +189B9;TANGUT COMPONENT-442;Lo;0;L;;;;;N;;;;; +189BA;TANGUT COMPONENT-443;Lo;0;L;;;;;N;;;;; +189BB;TANGUT COMPONENT-444;Lo;0;L;;;;;N;;;;; +189BC;TANGUT COMPONENT-445;Lo;0;L;;;;;N;;;;; +189BD;TANGUT COMPONENT-446;Lo;0;L;;;;;N;;;;; +189BE;TANGUT COMPONENT-447;Lo;0;L;;;;;N;;;;; +189BF;TANGUT COMPONENT-448;Lo;0;L;;;;;N;;;;; +189C0;TANGUT COMPONENT-449;Lo;0;L;;;;;N;;;;; +189C1;TANGUT COMPONENT-450;Lo;0;L;;;;;N;;;;; +189C2;TANGUT COMPONENT-451;Lo;0;L;;;;;N;;;;; +189C3;TANGUT COMPONENT-452;Lo;0;L;;;;;N;;;;; +189C4;TANGUT COMPONENT-453;Lo;0;L;;;;;N;;;;; +189C5;TANGUT COMPONENT-454;Lo;0;L;;;;;N;;;;; +189C6;TANGUT COMPONENT-455;Lo;0;L;;;;;N;;;;; +189C7;TANGUT COMPONENT-456;Lo;0;L;;;;;N;;;;; +189C8;TANGUT COMPONENT-457;Lo;0;L;;;;;N;;;;; +189C9;TANGUT COMPONENT-458;Lo;0;L;;;;;N;;;;; +189CA;TANGUT COMPONENT-459;Lo;0;L;;;;;N;;;;; +189CB;TANGUT COMPONENT-460;Lo;0;L;;;;;N;;;;; +189CC;TANGUT COMPONENT-461;Lo;0;L;;;;;N;;;;; +189CD;TANGUT COMPONENT-462;Lo;0;L;;;;;N;;;;; +189CE;TANGUT COMPONENT-463;Lo;0;L;;;;;N;;;;; +189CF;TANGUT COMPONENT-464;Lo;0;L;;;;;N;;;;; +189D0;TANGUT COMPONENT-465;Lo;0;L;;;;;N;;;;; +189D1;TANGUT COMPONENT-466;Lo;0;L;;;;;N;;;;; +189D2;TANGUT COMPONENT-467;Lo;0;L;;;;;N;;;;; +189D3;TANGUT COMPONENT-468;Lo;0;L;;;;;N;;;;; +189D4;TANGUT COMPONENT-469;Lo;0;L;;;;;N;;;;; +189D5;TANGUT COMPONENT-470;Lo;0;L;;;;;N;;;;; +189D6;TANGUT COMPONENT-471;Lo;0;L;;;;;N;;;;; +189D7;TANGUT COMPONENT-472;Lo;0;L;;;;;N;;;;; +189D8;TANGUT COMPONENT-473;Lo;0;L;;;;;N;;;;; +189D9;TANGUT COMPONENT-474;Lo;0;L;;;;;N;;;;; +189DA;TANGUT COMPONENT-475;Lo;0;L;;;;;N;;;;; +189DB;TANGUT COMPONENT-476;Lo;0;L;;;;;N;;;;; +189DC;TANGUT COMPONENT-477;Lo;0;L;;;;;N;;;;; +189DD;TANGUT COMPONENT-478;Lo;0;L;;;;;N;;;;; +189DE;TANGUT COMPONENT-479;Lo;0;L;;;;;N;;;;; +189DF;TANGUT COMPONENT-480;Lo;0;L;;;;;N;;;;; +189E0;TANGUT COMPONENT-481;Lo;0;L;;;;;N;;;;; +189E1;TANGUT COMPONENT-482;Lo;0;L;;;;;N;;;;; +189E2;TANGUT COMPONENT-483;Lo;0;L;;;;;N;;;;; +189E3;TANGUT COMPONENT-484;Lo;0;L;;;;;N;;;;; +189E4;TANGUT COMPONENT-485;Lo;0;L;;;;;N;;;;; +189E5;TANGUT COMPONENT-486;Lo;0;L;;;;;N;;;;; +189E6;TANGUT COMPONENT-487;Lo;0;L;;;;;N;;;;; +189E7;TANGUT COMPONENT-488;Lo;0;L;;;;;N;;;;; +189E8;TANGUT COMPONENT-489;Lo;0;L;;;;;N;;;;; +189E9;TANGUT COMPONENT-490;Lo;0;L;;;;;N;;;;; +189EA;TANGUT COMPONENT-491;Lo;0;L;;;;;N;;;;; +189EB;TANGUT COMPONENT-492;Lo;0;L;;;;;N;;;;; +189EC;TANGUT COMPONENT-493;Lo;0;L;;;;;N;;;;; +189ED;TANGUT COMPONENT-494;Lo;0;L;;;;;N;;;;; +189EE;TANGUT COMPONENT-495;Lo;0;L;;;;;N;;;;; +189EF;TANGUT COMPONENT-496;Lo;0;L;;;;;N;;;;; +189F0;TANGUT COMPONENT-497;Lo;0;L;;;;;N;;;;; +189F1;TANGUT COMPONENT-498;Lo;0;L;;;;;N;;;;; +189F2;TANGUT COMPONENT-499;Lo;0;L;;;;;N;;;;; +189F3;TANGUT COMPONENT-500;Lo;0;L;;;;;N;;;;; +189F4;TANGUT COMPONENT-501;Lo;0;L;;;;;N;;;;; +189F5;TANGUT COMPONENT-502;Lo;0;L;;;;;N;;;;; +189F6;TANGUT COMPONENT-503;Lo;0;L;;;;;N;;;;; +189F7;TANGUT COMPONENT-504;Lo;0;L;;;;;N;;;;; +189F8;TANGUT COMPONENT-505;Lo;0;L;;;;;N;;;;; +189F9;TANGUT COMPONENT-506;Lo;0;L;;;;;N;;;;; +189FA;TANGUT COMPONENT-507;Lo;0;L;;;;;N;;;;; +189FB;TANGUT COMPONENT-508;Lo;0;L;;;;;N;;;;; +189FC;TANGUT COMPONENT-509;Lo;0;L;;;;;N;;;;; +189FD;TANGUT COMPONENT-510;Lo;0;L;;;;;N;;;;; +189FE;TANGUT COMPONENT-511;Lo;0;L;;;;;N;;;;; +189FF;TANGUT COMPONENT-512;Lo;0;L;;;;;N;;;;; +18A00;TANGUT COMPONENT-513;Lo;0;L;;;;;N;;;;; +18A01;TANGUT COMPONENT-514;Lo;0;L;;;;;N;;;;; +18A02;TANGUT COMPONENT-515;Lo;0;L;;;;;N;;;;; +18A03;TANGUT COMPONENT-516;Lo;0;L;;;;;N;;;;; +18A04;TANGUT COMPONENT-517;Lo;0;L;;;;;N;;;;; +18A05;TANGUT COMPONENT-518;Lo;0;L;;;;;N;;;;; +18A06;TANGUT COMPONENT-519;Lo;0;L;;;;;N;;;;; +18A07;TANGUT COMPONENT-520;Lo;0;L;;;;;N;;;;; +18A08;TANGUT COMPONENT-521;Lo;0;L;;;;;N;;;;; +18A09;TANGUT COMPONENT-522;Lo;0;L;;;;;N;;;;; +18A0A;TANGUT COMPONENT-523;Lo;0;L;;;;;N;;;;; +18A0B;TANGUT COMPONENT-524;Lo;0;L;;;;;N;;;;; +18A0C;TANGUT COMPONENT-525;Lo;0;L;;;;;N;;;;; +18A0D;TANGUT COMPONENT-526;Lo;0;L;;;;;N;;;;; +18A0E;TANGUT COMPONENT-527;Lo;0;L;;;;;N;;;;; +18A0F;TANGUT COMPONENT-528;Lo;0;L;;;;;N;;;;; +18A10;TANGUT COMPONENT-529;Lo;0;L;;;;;N;;;;; +18A11;TANGUT COMPONENT-530;Lo;0;L;;;;;N;;;;; +18A12;TANGUT COMPONENT-531;Lo;0;L;;;;;N;;;;; +18A13;TANGUT COMPONENT-532;Lo;0;L;;;;;N;;;;; +18A14;TANGUT COMPONENT-533;Lo;0;L;;;;;N;;;;; +18A15;TANGUT COMPONENT-534;Lo;0;L;;;;;N;;;;; +18A16;TANGUT COMPONENT-535;Lo;0;L;;;;;N;;;;; +18A17;TANGUT COMPONENT-536;Lo;0;L;;;;;N;;;;; +18A18;TANGUT COMPONENT-537;Lo;0;L;;;;;N;;;;; +18A19;TANGUT COMPONENT-538;Lo;0;L;;;;;N;;;;; +18A1A;TANGUT COMPONENT-539;Lo;0;L;;;;;N;;;;; +18A1B;TANGUT COMPONENT-540;Lo;0;L;;;;;N;;;;; +18A1C;TANGUT COMPONENT-541;Lo;0;L;;;;;N;;;;; +18A1D;TANGUT COMPONENT-542;Lo;0;L;;;;;N;;;;; +18A1E;TANGUT COMPONENT-543;Lo;0;L;;;;;N;;;;; +18A1F;TANGUT COMPONENT-544;Lo;0;L;;;;;N;;;;; +18A20;TANGUT COMPONENT-545;Lo;0;L;;;;;N;;;;; +18A21;TANGUT COMPONENT-546;Lo;0;L;;;;;N;;;;; +18A22;TANGUT COMPONENT-547;Lo;0;L;;;;;N;;;;; +18A23;TANGUT COMPONENT-548;Lo;0;L;;;;;N;;;;; +18A24;TANGUT COMPONENT-549;Lo;0;L;;;;;N;;;;; +18A25;TANGUT COMPONENT-550;Lo;0;L;;;;;N;;;;; +18A26;TANGUT COMPONENT-551;Lo;0;L;;;;;N;;;;; +18A27;TANGUT COMPONENT-552;Lo;0;L;;;;;N;;;;; +18A28;TANGUT COMPONENT-553;Lo;0;L;;;;;N;;;;; +18A29;TANGUT COMPONENT-554;Lo;0;L;;;;;N;;;;; +18A2A;TANGUT COMPONENT-555;Lo;0;L;;;;;N;;;;; +18A2B;TANGUT COMPONENT-556;Lo;0;L;;;;;N;;;;; +18A2C;TANGUT COMPONENT-557;Lo;0;L;;;;;N;;;;; +18A2D;TANGUT COMPONENT-558;Lo;0;L;;;;;N;;;;; +18A2E;TANGUT COMPONENT-559;Lo;0;L;;;;;N;;;;; +18A2F;TANGUT COMPONENT-560;Lo;0;L;;;;;N;;;;; +18A30;TANGUT COMPONENT-561;Lo;0;L;;;;;N;;;;; +18A31;TANGUT COMPONENT-562;Lo;0;L;;;;;N;;;;; +18A32;TANGUT COMPONENT-563;Lo;0;L;;;;;N;;;;; +18A33;TANGUT COMPONENT-564;Lo;0;L;;;;;N;;;;; +18A34;TANGUT COMPONENT-565;Lo;0;L;;;;;N;;;;; +18A35;TANGUT COMPONENT-566;Lo;0;L;;;;;N;;;;; +18A36;TANGUT COMPONENT-567;Lo;0;L;;;;;N;;;;; +18A37;TANGUT COMPONENT-568;Lo;0;L;;;;;N;;;;; +18A38;TANGUT COMPONENT-569;Lo;0;L;;;;;N;;;;; +18A39;TANGUT COMPONENT-570;Lo;0;L;;;;;N;;;;; +18A3A;TANGUT COMPONENT-571;Lo;0;L;;;;;N;;;;; +18A3B;TANGUT COMPONENT-572;Lo;0;L;;;;;N;;;;; +18A3C;TANGUT COMPONENT-573;Lo;0;L;;;;;N;;;;; +18A3D;TANGUT COMPONENT-574;Lo;0;L;;;;;N;;;;; +18A3E;TANGUT COMPONENT-575;Lo;0;L;;;;;N;;;;; +18A3F;TANGUT COMPONENT-576;Lo;0;L;;;;;N;;;;; +18A40;TANGUT COMPONENT-577;Lo;0;L;;;;;N;;;;; +18A41;TANGUT COMPONENT-578;Lo;0;L;;;;;N;;;;; +18A42;TANGUT COMPONENT-579;Lo;0;L;;;;;N;;;;; +18A43;TANGUT COMPONENT-580;Lo;0;L;;;;;N;;;;; +18A44;TANGUT COMPONENT-581;Lo;0;L;;;;;N;;;;; +18A45;TANGUT COMPONENT-582;Lo;0;L;;;;;N;;;;; +18A46;TANGUT COMPONENT-583;Lo;0;L;;;;;N;;;;; +18A47;TANGUT COMPONENT-584;Lo;0;L;;;;;N;;;;; +18A48;TANGUT COMPONENT-585;Lo;0;L;;;;;N;;;;; +18A49;TANGUT COMPONENT-586;Lo;0;L;;;;;N;;;;; +18A4A;TANGUT COMPONENT-587;Lo;0;L;;;;;N;;;;; +18A4B;TANGUT COMPONENT-588;Lo;0;L;;;;;N;;;;; +18A4C;TANGUT COMPONENT-589;Lo;0;L;;;;;N;;;;; +18A4D;TANGUT COMPONENT-590;Lo;0;L;;;;;N;;;;; +18A4E;TANGUT COMPONENT-591;Lo;0;L;;;;;N;;;;; +18A4F;TANGUT COMPONENT-592;Lo;0;L;;;;;N;;;;; +18A50;TANGUT COMPONENT-593;Lo;0;L;;;;;N;;;;; +18A51;TANGUT COMPONENT-594;Lo;0;L;;;;;N;;;;; +18A52;TANGUT COMPONENT-595;Lo;0;L;;;;;N;;;;; +18A53;TANGUT COMPONENT-596;Lo;0;L;;;;;N;;;;; +18A54;TANGUT COMPONENT-597;Lo;0;L;;;;;N;;;;; +18A55;TANGUT COMPONENT-598;Lo;0;L;;;;;N;;;;; +18A56;TANGUT COMPONENT-599;Lo;0;L;;;;;N;;;;; +18A57;TANGUT COMPONENT-600;Lo;0;L;;;;;N;;;;; +18A58;TANGUT COMPONENT-601;Lo;0;L;;;;;N;;;;; +18A59;TANGUT COMPONENT-602;Lo;0;L;;;;;N;;;;; +18A5A;TANGUT COMPONENT-603;Lo;0;L;;;;;N;;;;; +18A5B;TANGUT COMPONENT-604;Lo;0;L;;;;;N;;;;; +18A5C;TANGUT COMPONENT-605;Lo;0;L;;;;;N;;;;; +18A5D;TANGUT COMPONENT-606;Lo;0;L;;;;;N;;;;; +18A5E;TANGUT COMPONENT-607;Lo;0;L;;;;;N;;;;; +18A5F;TANGUT COMPONENT-608;Lo;0;L;;;;;N;;;;; +18A60;TANGUT COMPONENT-609;Lo;0;L;;;;;N;;;;; +18A61;TANGUT COMPONENT-610;Lo;0;L;;;;;N;;;;; +18A62;TANGUT COMPONENT-611;Lo;0;L;;;;;N;;;;; +18A63;TANGUT COMPONENT-612;Lo;0;L;;;;;N;;;;; +18A64;TANGUT COMPONENT-613;Lo;0;L;;;;;N;;;;; +18A65;TANGUT COMPONENT-614;Lo;0;L;;;;;N;;;;; +18A66;TANGUT COMPONENT-615;Lo;0;L;;;;;N;;;;; +18A67;TANGUT COMPONENT-616;Lo;0;L;;;;;N;;;;; +18A68;TANGUT COMPONENT-617;Lo;0;L;;;;;N;;;;; +18A69;TANGUT COMPONENT-618;Lo;0;L;;;;;N;;;;; +18A6A;TANGUT COMPONENT-619;Lo;0;L;;;;;N;;;;; +18A6B;TANGUT COMPONENT-620;Lo;0;L;;;;;N;;;;; +18A6C;TANGUT COMPONENT-621;Lo;0;L;;;;;N;;;;; +18A6D;TANGUT COMPONENT-622;Lo;0;L;;;;;N;;;;; +18A6E;TANGUT COMPONENT-623;Lo;0;L;;;;;N;;;;; +18A6F;TANGUT COMPONENT-624;Lo;0;L;;;;;N;;;;; +18A70;TANGUT COMPONENT-625;Lo;0;L;;;;;N;;;;; +18A71;TANGUT COMPONENT-626;Lo;0;L;;;;;N;;;;; +18A72;TANGUT COMPONENT-627;Lo;0;L;;;;;N;;;;; +18A73;TANGUT COMPONENT-628;Lo;0;L;;;;;N;;;;; +18A74;TANGUT COMPONENT-629;Lo;0;L;;;;;N;;;;; +18A75;TANGUT COMPONENT-630;Lo;0;L;;;;;N;;;;; +18A76;TANGUT COMPONENT-631;Lo;0;L;;;;;N;;;;; +18A77;TANGUT COMPONENT-632;Lo;0;L;;;;;N;;;;; +18A78;TANGUT COMPONENT-633;Lo;0;L;;;;;N;;;;; +18A79;TANGUT COMPONENT-634;Lo;0;L;;;;;N;;;;; +18A7A;TANGUT COMPONENT-635;Lo;0;L;;;;;N;;;;; +18A7B;TANGUT COMPONENT-636;Lo;0;L;;;;;N;;;;; +18A7C;TANGUT COMPONENT-637;Lo;0;L;;;;;N;;;;; +18A7D;TANGUT COMPONENT-638;Lo;0;L;;;;;N;;;;; +18A7E;TANGUT COMPONENT-639;Lo;0;L;;;;;N;;;;; +18A7F;TANGUT COMPONENT-640;Lo;0;L;;;;;N;;;;; +18A80;TANGUT COMPONENT-641;Lo;0;L;;;;;N;;;;; +18A81;TANGUT COMPONENT-642;Lo;0;L;;;;;N;;;;; +18A82;TANGUT COMPONENT-643;Lo;0;L;;;;;N;;;;; +18A83;TANGUT COMPONENT-644;Lo;0;L;;;;;N;;;;; +18A84;TANGUT COMPONENT-645;Lo;0;L;;;;;N;;;;; +18A85;TANGUT COMPONENT-646;Lo;0;L;;;;;N;;;;; +18A86;TANGUT COMPONENT-647;Lo;0;L;;;;;N;;;;; +18A87;TANGUT COMPONENT-648;Lo;0;L;;;;;N;;;;; +18A88;TANGUT COMPONENT-649;Lo;0;L;;;;;N;;;;; +18A89;TANGUT COMPONENT-650;Lo;0;L;;;;;N;;;;; +18A8A;TANGUT COMPONENT-651;Lo;0;L;;;;;N;;;;; +18A8B;TANGUT COMPONENT-652;Lo;0;L;;;;;N;;;;; +18A8C;TANGUT COMPONENT-653;Lo;0;L;;;;;N;;;;; +18A8D;TANGUT COMPONENT-654;Lo;0;L;;;;;N;;;;; +18A8E;TANGUT COMPONENT-655;Lo;0;L;;;;;N;;;;; +18A8F;TANGUT COMPONENT-656;Lo;0;L;;;;;N;;;;; +18A90;TANGUT COMPONENT-657;Lo;0;L;;;;;N;;;;; +18A91;TANGUT COMPONENT-658;Lo;0;L;;;;;N;;;;; +18A92;TANGUT COMPONENT-659;Lo;0;L;;;;;N;;;;; +18A93;TANGUT COMPONENT-660;Lo;0;L;;;;;N;;;;; +18A94;TANGUT COMPONENT-661;Lo;0;L;;;;;N;;;;; +18A95;TANGUT COMPONENT-662;Lo;0;L;;;;;N;;;;; +18A96;TANGUT COMPONENT-663;Lo;0;L;;;;;N;;;;; +18A97;TANGUT COMPONENT-664;Lo;0;L;;;;;N;;;;; +18A98;TANGUT COMPONENT-665;Lo;0;L;;;;;N;;;;; +18A99;TANGUT COMPONENT-666;Lo;0;L;;;;;N;;;;; +18A9A;TANGUT COMPONENT-667;Lo;0;L;;;;;N;;;;; +18A9B;TANGUT COMPONENT-668;Lo;0;L;;;;;N;;;;; +18A9C;TANGUT COMPONENT-669;Lo;0;L;;;;;N;;;;; +18A9D;TANGUT COMPONENT-670;Lo;0;L;;;;;N;;;;; +18A9E;TANGUT COMPONENT-671;Lo;0;L;;;;;N;;;;; +18A9F;TANGUT COMPONENT-672;Lo;0;L;;;;;N;;;;; +18AA0;TANGUT COMPONENT-673;Lo;0;L;;;;;N;;;;; +18AA1;TANGUT COMPONENT-674;Lo;0;L;;;;;N;;;;; +18AA2;TANGUT COMPONENT-675;Lo;0;L;;;;;N;;;;; +18AA3;TANGUT COMPONENT-676;Lo;0;L;;;;;N;;;;; +18AA4;TANGUT COMPONENT-677;Lo;0;L;;;;;N;;;;; +18AA5;TANGUT COMPONENT-678;Lo;0;L;;;;;N;;;;; +18AA6;TANGUT COMPONENT-679;Lo;0;L;;;;;N;;;;; +18AA7;TANGUT COMPONENT-680;Lo;0;L;;;;;N;;;;; +18AA8;TANGUT COMPONENT-681;Lo;0;L;;;;;N;;;;; +18AA9;TANGUT COMPONENT-682;Lo;0;L;;;;;N;;;;; +18AAA;TANGUT COMPONENT-683;Lo;0;L;;;;;N;;;;; +18AAB;TANGUT COMPONENT-684;Lo;0;L;;;;;N;;;;; +18AAC;TANGUT COMPONENT-685;Lo;0;L;;;;;N;;;;; +18AAD;TANGUT COMPONENT-686;Lo;0;L;;;;;N;;;;; +18AAE;TANGUT COMPONENT-687;Lo;0;L;;;;;N;;;;; +18AAF;TANGUT COMPONENT-688;Lo;0;L;;;;;N;;;;; +18AB0;TANGUT COMPONENT-689;Lo;0;L;;;;;N;;;;; +18AB1;TANGUT COMPONENT-690;Lo;0;L;;;;;N;;;;; +18AB2;TANGUT COMPONENT-691;Lo;0;L;;;;;N;;;;; +18AB3;TANGUT COMPONENT-692;Lo;0;L;;;;;N;;;;; +18AB4;TANGUT COMPONENT-693;Lo;0;L;;;;;N;;;;; +18AB5;TANGUT COMPONENT-694;Lo;0;L;;;;;N;;;;; +18AB6;TANGUT COMPONENT-695;Lo;0;L;;;;;N;;;;; +18AB7;TANGUT COMPONENT-696;Lo;0;L;;;;;N;;;;; +18AB8;TANGUT COMPONENT-697;Lo;0;L;;;;;N;;;;; +18AB9;TANGUT COMPONENT-698;Lo;0;L;;;;;N;;;;; +18ABA;TANGUT COMPONENT-699;Lo;0;L;;;;;N;;;;; +18ABB;TANGUT COMPONENT-700;Lo;0;L;;;;;N;;;;; +18ABC;TANGUT COMPONENT-701;Lo;0;L;;;;;N;;;;; +18ABD;TANGUT COMPONENT-702;Lo;0;L;;;;;N;;;;; +18ABE;TANGUT COMPONENT-703;Lo;0;L;;;;;N;;;;; +18ABF;TANGUT COMPONENT-704;Lo;0;L;;;;;N;;;;; +18AC0;TANGUT COMPONENT-705;Lo;0;L;;;;;N;;;;; +18AC1;TANGUT COMPONENT-706;Lo;0;L;;;;;N;;;;; +18AC2;TANGUT COMPONENT-707;Lo;0;L;;;;;N;;;;; +18AC3;TANGUT COMPONENT-708;Lo;0;L;;;;;N;;;;; +18AC4;TANGUT COMPONENT-709;Lo;0;L;;;;;N;;;;; +18AC5;TANGUT COMPONENT-710;Lo;0;L;;;;;N;;;;; +18AC6;TANGUT COMPONENT-711;Lo;0;L;;;;;N;;;;; +18AC7;TANGUT COMPONENT-712;Lo;0;L;;;;;N;;;;; +18AC8;TANGUT COMPONENT-713;Lo;0;L;;;;;N;;;;; +18AC9;TANGUT COMPONENT-714;Lo;0;L;;;;;N;;;;; +18ACA;TANGUT COMPONENT-715;Lo;0;L;;;;;N;;;;; +18ACB;TANGUT COMPONENT-716;Lo;0;L;;;;;N;;;;; +18ACC;TANGUT COMPONENT-717;Lo;0;L;;;;;N;;;;; +18ACD;TANGUT COMPONENT-718;Lo;0;L;;;;;N;;;;; +18ACE;TANGUT COMPONENT-719;Lo;0;L;;;;;N;;;;; +18ACF;TANGUT COMPONENT-720;Lo;0;L;;;;;N;;;;; +18AD0;TANGUT COMPONENT-721;Lo;0;L;;;;;N;;;;; +18AD1;TANGUT COMPONENT-722;Lo;0;L;;;;;N;;;;; +18AD2;TANGUT COMPONENT-723;Lo;0;L;;;;;N;;;;; +18AD3;TANGUT COMPONENT-724;Lo;0;L;;;;;N;;;;; +18AD4;TANGUT COMPONENT-725;Lo;0;L;;;;;N;;;;; +18AD5;TANGUT COMPONENT-726;Lo;0;L;;;;;N;;;;; +18AD6;TANGUT COMPONENT-727;Lo;0;L;;;;;N;;;;; +18AD7;TANGUT COMPONENT-728;Lo;0;L;;;;;N;;;;; +18AD8;TANGUT COMPONENT-729;Lo;0;L;;;;;N;;;;; +18AD9;TANGUT COMPONENT-730;Lo;0;L;;;;;N;;;;; +18ADA;TANGUT COMPONENT-731;Lo;0;L;;;;;N;;;;; +18ADB;TANGUT COMPONENT-732;Lo;0;L;;;;;N;;;;; +18ADC;TANGUT COMPONENT-733;Lo;0;L;;;;;N;;;;; +18ADD;TANGUT COMPONENT-734;Lo;0;L;;;;;N;;;;; +18ADE;TANGUT COMPONENT-735;Lo;0;L;;;;;N;;;;; +18ADF;TANGUT COMPONENT-736;Lo;0;L;;;;;N;;;;; +18AE0;TANGUT COMPONENT-737;Lo;0;L;;;;;N;;;;; +18AE1;TANGUT COMPONENT-738;Lo;0;L;;;;;N;;;;; +18AE2;TANGUT COMPONENT-739;Lo;0;L;;;;;N;;;;; +18AE3;TANGUT COMPONENT-740;Lo;0;L;;;;;N;;;;; +18AE4;TANGUT COMPONENT-741;Lo;0;L;;;;;N;;;;; +18AE5;TANGUT COMPONENT-742;Lo;0;L;;;;;N;;;;; +18AE6;TANGUT COMPONENT-743;Lo;0;L;;;;;N;;;;; +18AE7;TANGUT COMPONENT-744;Lo;0;L;;;;;N;;;;; +18AE8;TANGUT COMPONENT-745;Lo;0;L;;;;;N;;;;; +18AE9;TANGUT COMPONENT-746;Lo;0;L;;;;;N;;;;; +18AEA;TANGUT COMPONENT-747;Lo;0;L;;;;;N;;;;; +18AEB;TANGUT COMPONENT-748;Lo;0;L;;;;;N;;;;; +18AEC;TANGUT COMPONENT-749;Lo;0;L;;;;;N;;;;; +18AED;TANGUT COMPONENT-750;Lo;0;L;;;;;N;;;;; +18AEE;TANGUT COMPONENT-751;Lo;0;L;;;;;N;;;;; +18AEF;TANGUT COMPONENT-752;Lo;0;L;;;;;N;;;;; +18AF0;TANGUT COMPONENT-753;Lo;0;L;;;;;N;;;;; +18AF1;TANGUT COMPONENT-754;Lo;0;L;;;;;N;;;;; +18AF2;TANGUT COMPONENT-755;Lo;0;L;;;;;N;;;;; 1B000;KATAKANA LETTER ARCHAIC E;Lo;0;L;;;;;N;;;;; 1B001;HIRAGANA LETTER ARCHAIC YE;Lo;0;L;;;;;N;;;;; 1BC00;DUPLOYAN LETTER H;Lo;0;L;;;;;N;;;;; @@ -23080,6 +25459,17 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1D1DB;MUSICAL SYMBOL SCANDICUS FLEXUS;So;0;L;;;;;N;;;;; 1D1DC;MUSICAL SYMBOL TORCULUS RESUPINUS;So;0;L;;;;;N;;;;; 1D1DD;MUSICAL SYMBOL PES SUBPUNCTIS;So;0;L;;;;;N;;;;; +1D1DE;MUSICAL SYMBOL KIEVAN C CLEF;So;0;L;;;;;N;;;;; +1D1DF;MUSICAL SYMBOL KIEVAN END OF PIECE;So;0;L;;;;;N;;;;; +1D1E0;MUSICAL SYMBOL KIEVAN FINAL NOTE;So;0;L;;;;;N;;;;; +1D1E1;MUSICAL SYMBOL KIEVAN RECITATIVE MARK;So;0;L;;;;;N;;;;; +1D1E2;MUSICAL SYMBOL KIEVAN WHOLE NOTE;So;0;L;;;;;N;;;;; +1D1E3;MUSICAL SYMBOL KIEVAN HALF NOTE;So;0;L;;;;;N;;;;; +1D1E4;MUSICAL SYMBOL KIEVAN QUARTER NOTE STEM DOWN;So;0;L;;;;;N;;;;; +1D1E5;MUSICAL SYMBOL KIEVAN QUARTER NOTE STEM UP;So;0;L;;;;;N;;;;; +1D1E6;MUSICAL SYMBOL KIEVAN EIGHTH NOTE STEM DOWN;So;0;L;;;;;N;;;;; +1D1E7;MUSICAL SYMBOL KIEVAN EIGHTH NOTE STEM UP;So;0;L;;;;;N;;;;; +1D1E8;MUSICAL SYMBOL KIEVAN FLAT SIGN;So;0;L;;;;;N;;;;; 1D200;GREEK VOCAL NOTATION SYMBOL-1;So;0;ON;;;;;N;;;;; 1D201;GREEK VOCAL NOTATION SYMBOL-2;So;0;ON;;;;;N;;;;; 1D202;GREEK VOCAL NOTATION SYMBOL-3;So;0;ON;;;;;N;;;;; @@ -24251,6 +26641,716 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1D7FD;MATHEMATICAL MONOSPACE DIGIT SEVEN;Nd;0;EN; 0037;7;7;7;N;;;;; 1D7FE;MATHEMATICAL MONOSPACE DIGIT EIGHT;Nd;0;EN; 0038;8;8;8;N;;;;; 1D7FF;MATHEMATICAL MONOSPACE DIGIT NINE;Nd;0;EN; 0039;9;9;9;N;;;;; +1D800;SIGNWRITING HAND-FIST INDEX;So;0;L;;;;;N;;;;; +1D801;SIGNWRITING HAND-CIRCLE INDEX;So;0;L;;;;;N;;;;; +1D802;SIGNWRITING HAND-CUP INDEX;So;0;L;;;;;N;;;;; +1D803;SIGNWRITING HAND-OVAL INDEX;So;0;L;;;;;N;;;;; +1D804;SIGNWRITING HAND-HINGE INDEX;So;0;L;;;;;N;;;;; +1D805;SIGNWRITING HAND-ANGLE INDEX;So;0;L;;;;;N;;;;; +1D806;SIGNWRITING HAND-FIST INDEX BENT;So;0;L;;;;;N;;;;; +1D807;SIGNWRITING HAND-CIRCLE INDEX BENT;So;0;L;;;;;N;;;;; +1D808;SIGNWRITING HAND-FIST THUMB UNDER INDEX BENT;So;0;L;;;;;N;;;;; +1D809;SIGNWRITING HAND-FIST INDEX RAISED KNUCKLE;So;0;L;;;;;N;;;;; +1D80A;SIGNWRITING HAND-FIST INDEX CUPPED;So;0;L;;;;;N;;;;; +1D80B;SIGNWRITING HAND-FIST INDEX HINGED;So;0;L;;;;;N;;;;; +1D80C;SIGNWRITING HAND-FIST INDEX HINGED LOW;So;0;L;;;;;N;;;;; +1D80D;SIGNWRITING HAND-CIRCLE INDEX HINGE;So;0;L;;;;;N;;;;; +1D80E;SIGNWRITING HAND-FIST INDEX MIDDLE;So;0;L;;;;;N;;;;; +1D80F;SIGNWRITING HAND-CIRCLE INDEX MIDDLE;So;0;L;;;;;N;;;;; +1D810;SIGNWRITING HAND-FIST INDEX MIDDLE BENT;So;0;L;;;;;N;;;;; +1D811;SIGNWRITING HAND-FIST INDEX MIDDLE RAISED KNUCKLES;So;0;L;;;;;N;;;;; +1D812;SIGNWRITING HAND-FIST INDEX MIDDLE HINGED;So;0;L;;;;;N;;;;; +1D813;SIGNWRITING HAND-FIST INDEX UP MIDDLE HINGED;So;0;L;;;;;N;;;;; +1D814;SIGNWRITING HAND-FIST INDEX HINGED MIDDLE UP;So;0;L;;;;;N;;;;; +1D815;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED;So;0;L;;;;;N;;;;; +1D816;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED INDEX BENT;So;0;L;;;;;N;;;;; +1D817;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED MIDDLE BENT;So;0;L;;;;;N;;;;; +1D818;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED CUPPED;So;0;L;;;;;N;;;;; +1D819;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED HINGED;So;0;L;;;;;N;;;;; +1D81A;SIGNWRITING HAND-FIST INDEX MIDDLE CROSSED;So;0;L;;;;;N;;;;; +1D81B;SIGNWRITING HAND-CIRCLE INDEX MIDDLE CROSSED;So;0;L;;;;;N;;;;; +1D81C;SIGNWRITING HAND-FIST MIDDLE BENT OVER INDEX;So;0;L;;;;;N;;;;; +1D81D;SIGNWRITING HAND-FIST INDEX BENT OVER MIDDLE;So;0;L;;;;;N;;;;; +1D81E;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB;So;0;L;;;;;N;;;;; +1D81F;SIGNWRITING HAND-CIRCLE INDEX MIDDLE THUMB;So;0;L;;;;;N;;;;; +1D820;SIGNWRITING HAND-FIST INDEX MIDDLE STRAIGHT THUMB BENT;So;0;L;;;;;N;;;;; +1D821;SIGNWRITING HAND-FIST INDEX MIDDLE BENT THUMB STRAIGHT;So;0;L;;;;;N;;;;; +1D822;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB BENT;So;0;L;;;;;N;;;;; +1D823;SIGNWRITING HAND-FIST INDEX MIDDLE HINGED SPREAD THUMB SIDE;So;0;L;;;;;N;;;;; +1D824;SIGNWRITING HAND-FIST INDEX UP MIDDLE HINGED THUMB SIDE;So;0;L;;;;;N;;;;; +1D825;SIGNWRITING HAND-FIST INDEX UP MIDDLE HINGED THUMB CONJOINED;So;0;L;;;;;N;;;;; +1D826;SIGNWRITING HAND-FIST INDEX HINGED MIDDLE UP THUMB SIDE;So;0;L;;;;;N;;;;; +1D827;SIGNWRITING HAND-FIST INDEX MIDDLE UP SPREAD THUMB FORWARD;So;0;L;;;;;N;;;;; +1D828;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB CUPPED;So;0;L;;;;;N;;;;; +1D829;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB CIRCLED;So;0;L;;;;;N;;;;; +1D82A;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB HOOKED;So;0;L;;;;;N;;;;; +1D82B;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB HINGED;So;0;L;;;;;N;;;;; +1D82C;SIGNWRITING HAND-FIST THUMB BETWEEN INDEX MIDDLE STRAIGHT;So;0;L;;;;;N;;;;; +1D82D;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED THUMB SIDE;So;0;L;;;;;N;;;;; +1D82E;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED THUMB SIDE CONJOINED;So;0;L;;;;;N;;;;; +1D82F;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED THUMB SIDE BENT;So;0;L;;;;;N;;;;; +1D830;SIGNWRITING HAND-FIST MIDDLE THUMB HOOKED INDEX UP;So;0;L;;;;;N;;;;; +1D831;SIGNWRITING HAND-FIST INDEX THUMB HOOKED MIDDLE UP;So;0;L;;;;;N;;;;; +1D832;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED HINGED THUMB SIDE;So;0;L;;;;;N;;;;; +1D833;SIGNWRITING HAND-FIST INDEX MIDDLE CROSSED THUMB SIDE;So;0;L;;;;;N;;;;; +1D834;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED THUMB FORWARD;So;0;L;;;;;N;;;;; +1D835;SIGNWRITING HAND-FIST INDEX MIDDLE CONJOINED CUPPED THUMB FORWARD;So;0;L;;;;;N;;;;; +1D836;SIGNWRITING HAND-FIST MIDDLE THUMB CUPPED INDEX UP;So;0;L;;;;;N;;;;; +1D837;SIGNWRITING HAND-FIST INDEX THUMB CUPPED MIDDLE UP;So;0;L;;;;;N;;;;; +1D838;SIGNWRITING HAND-FIST MIDDLE THUMB CIRCLED INDEX UP;So;0;L;;;;;N;;;;; +1D839;SIGNWRITING HAND-FIST MIDDLE THUMB CIRCLED INDEX HINGED;So;0;L;;;;;N;;;;; +1D83A;SIGNWRITING HAND-FIST INDEX THUMB ANGLED OUT MIDDLE UP;So;0;L;;;;;N;;;;; +1D83B;SIGNWRITING HAND-FIST INDEX THUMB ANGLED IN MIDDLE UP;So;0;L;;;;;N;;;;; +1D83C;SIGNWRITING HAND-FIST INDEX THUMB CIRCLED MIDDLE UP;So;0;L;;;;;N;;;;; +1D83D;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB CONJOINED HINGED;So;0;L;;;;;N;;;;; +1D83E;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB ANGLED OUT;So;0;L;;;;;N;;;;; +1D83F;SIGNWRITING HAND-FIST INDEX MIDDLE THUMB ANGLED;So;0;L;;;;;N;;;;; +1D840;SIGNWRITING HAND-FIST MIDDLE THUMB ANGLED OUT INDEX UP;So;0;L;;;;;N;;;;; +1D841;SIGNWRITING HAND-FIST MIDDLE THUMB ANGLED OUT INDEX CROSSED;So;0;L;;;;;N;;;;; +1D842;SIGNWRITING HAND-FIST MIDDLE THUMB ANGLED INDEX UP;So;0;L;;;;;N;;;;; +1D843;SIGNWRITING HAND-FIST INDEX THUMB HOOKED MIDDLE HINGED;So;0;L;;;;;N;;;;; +1D844;SIGNWRITING HAND-FLAT FOUR FINGERS;So;0;L;;;;;N;;;;; +1D845;SIGNWRITING HAND-FLAT FOUR FINGERS BENT;So;0;L;;;;;N;;;;; +1D846;SIGNWRITING HAND-FLAT FOUR FINGERS HINGED;So;0;L;;;;;N;;;;; +1D847;SIGNWRITING HAND-FLAT FOUR FINGERS CONJOINED;So;0;L;;;;;N;;;;; +1D848;SIGNWRITING HAND-FLAT FOUR FINGERS CONJOINED SPLIT;So;0;L;;;;;N;;;;; +1D849;SIGNWRITING HAND-CLAW FOUR FINGERS CONJOINED;So;0;L;;;;;N;;;;; +1D84A;SIGNWRITING HAND-FIST FOUR FINGERS CONJOINED BENT;So;0;L;;;;;N;;;;; +1D84B;SIGNWRITING HAND-HINGE FOUR FINGERS CONJOINED;So;0;L;;;;;N;;;;; +1D84C;SIGNWRITING HAND-FLAT FIVE FINGERS SPREAD;So;0;L;;;;;N;;;;; +1D84D;SIGNWRITING HAND-FLAT HEEL FIVE FINGERS SPREAD;So;0;L;;;;;N;;;;; +1D84E;SIGNWRITING HAND-FLAT FIVE FINGERS SPREAD FOUR BENT;So;0;L;;;;;N;;;;; +1D84F;SIGNWRITING HAND-FLAT HEEL FIVE FINGERS SPREAD FOUR BENT;So;0;L;;;;;N;;;;; +1D850;SIGNWRITING HAND-FLAT FIVE FINGERS SPREAD BENT;So;0;L;;;;;N;;;;; +1D851;SIGNWRITING HAND-FLAT HEEL FIVE FINGERS SPREAD BENT;So;0;L;;;;;N;;;;; +1D852;SIGNWRITING HAND-FLAT FIVE FINGERS SPREAD THUMB FORWARD;So;0;L;;;;;N;;;;; +1D853;SIGNWRITING HAND-CUP FIVE FINGERS SPREAD;So;0;L;;;;;N;;;;; +1D854;SIGNWRITING HAND-CUP FIVE FINGERS SPREAD OPEN;So;0;L;;;;;N;;;;; +1D855;SIGNWRITING HAND-HINGE FIVE FINGERS SPREAD OPEN;So;0;L;;;;;N;;;;; +1D856;SIGNWRITING HAND-OVAL FIVE FINGERS SPREAD;So;0;L;;;;;N;;;;; +1D857;SIGNWRITING HAND-FLAT FIVE FINGERS SPREAD HINGED;So;0;L;;;;;N;;;;; +1D858;SIGNWRITING HAND-FLAT FIVE FINGERS SPREAD HINGED THUMB SIDE;So;0;L;;;;;N;;;;; +1D859;SIGNWRITING HAND-FLAT FIVE FINGERS SPREAD HINGED NO THUMB;So;0;L;;;;;N;;;;; +1D85A;SIGNWRITING HAND-FLAT;So;0;L;;;;;N;;;;; +1D85B;SIGNWRITING HAND-FLAT BETWEEN PALM FACINGS;So;0;L;;;;;N;;;;; +1D85C;SIGNWRITING HAND-FLAT HEEL;So;0;L;;;;;N;;;;; +1D85D;SIGNWRITING HAND-FLAT THUMB SIDE;So;0;L;;;;;N;;;;; +1D85E;SIGNWRITING HAND-FLAT HEEL THUMB SIDE;So;0;L;;;;;N;;;;; +1D85F;SIGNWRITING HAND-FLAT THUMB BENT;So;0;L;;;;;N;;;;; +1D860;SIGNWRITING HAND-FLAT THUMB FORWARD;So;0;L;;;;;N;;;;; +1D861;SIGNWRITING HAND-FLAT SPLIT INDEX THUMB SIDE;So;0;L;;;;;N;;;;; +1D862;SIGNWRITING HAND-FLAT SPLIT CENTRE;So;0;L;;;;;N;;;;; +1D863;SIGNWRITING HAND-FLAT SPLIT CENTRE THUMB SIDE;So;0;L;;;;;N;;;;; +1D864;SIGNWRITING HAND-FLAT SPLIT CENTRE THUMB SIDE BENT;So;0;L;;;;;N;;;;; +1D865;SIGNWRITING HAND-FLAT SPLIT LITTLE;So;0;L;;;;;N;;;;; +1D866;SIGNWRITING HAND-CLAW;So;0;L;;;;;N;;;;; +1D867;SIGNWRITING HAND-CLAW THUMB SIDE;So;0;L;;;;;N;;;;; +1D868;SIGNWRITING HAND-CLAW NO THUMB;So;0;L;;;;;N;;;;; +1D869;SIGNWRITING HAND-CLAW THUMB FORWARD;So;0;L;;;;;N;;;;; +1D86A;SIGNWRITING HAND-HOOK CURLICUE;So;0;L;;;;;N;;;;; +1D86B;SIGNWRITING HAND-HOOK;So;0;L;;;;;N;;;;; +1D86C;SIGNWRITING HAND-CUP OPEN;So;0;L;;;;;N;;;;; +1D86D;SIGNWRITING HAND-CUP;So;0;L;;;;;N;;;;; +1D86E;SIGNWRITING HAND-CUP OPEN THUMB SIDE;So;0;L;;;;;N;;;;; +1D86F;SIGNWRITING HAND-CUP THUMB SIDE;So;0;L;;;;;N;;;;; +1D870;SIGNWRITING HAND-CUP OPEN NO THUMB;So;0;L;;;;;N;;;;; +1D871;SIGNWRITING HAND-CUP NO THUMB;So;0;L;;;;;N;;;;; +1D872;SIGNWRITING HAND-CUP OPEN THUMB FORWARD;So;0;L;;;;;N;;;;; +1D873;SIGNWRITING HAND-CUP THUMB FORWARD;So;0;L;;;;;N;;;;; +1D874;SIGNWRITING HAND-CURLICUE OPEN;So;0;L;;;;;N;;;;; +1D875;SIGNWRITING HAND-CURLICUE;So;0;L;;;;;N;;;;; +1D876;SIGNWRITING HAND-CIRCLE;So;0;L;;;;;N;;;;; +1D877;SIGNWRITING HAND-OVAL;So;0;L;;;;;N;;;;; +1D878;SIGNWRITING HAND-OVAL THUMB SIDE;So;0;L;;;;;N;;;;; +1D879;SIGNWRITING HAND-OVAL NO THUMB;So;0;L;;;;;N;;;;; +1D87A;SIGNWRITING HAND-OVAL THUMB FORWARD;So;0;L;;;;;N;;;;; +1D87B;SIGNWRITING HAND-HINGE OPEN;So;0;L;;;;;N;;;;; +1D87C;SIGNWRITING HAND-HINGE OPEN THUMB FORWARD;So;0;L;;;;;N;;;;; +1D87D;SIGNWRITING HAND-HINGE;So;0;L;;;;;N;;;;; +1D87E;SIGNWRITING HAND-HINGE SMALL;So;0;L;;;;;N;;;;; +1D87F;SIGNWRITING HAND-HINGE OPEN THUMB SIDE;So;0;L;;;;;N;;;;; +1D880;SIGNWRITING HAND-HINGE THUMB SIDE;So;0;L;;;;;N;;;;; +1D881;SIGNWRITING HAND-HINGE OPEN NO THUMB;So;0;L;;;;;N;;;;; +1D882;SIGNWRITING HAND-HINGE NO THUMB;So;0;L;;;;;N;;;;; +1D883;SIGNWRITING HAND-HINGE THUMB SIDE TOUCHING INDEX;So;0;L;;;;;N;;;;; +1D884;SIGNWRITING HAND-HINGE THUMB BETWEEN MIDDLE RING;So;0;L;;;;;N;;;;; +1D885;SIGNWRITING HAND-ANGLE;So;0;L;;;;;N;;;;; +1D886;SIGNWRITING HAND-FIST INDEX MIDDLE RING;So;0;L;;;;;N;;;;; +1D887;SIGNWRITING HAND-CIRCLE INDEX MIDDLE RING;So;0;L;;;;;N;;;;; +1D888;SIGNWRITING HAND-HINGE INDEX MIDDLE RING;So;0;L;;;;;N;;;;; +1D889;SIGNWRITING HAND-ANGLE INDEX MIDDLE RING;So;0;L;;;;;N;;;;; +1D88A;SIGNWRITING HAND-HINGE LITTLE;So;0;L;;;;;N;;;;; +1D88B;SIGNWRITING HAND-FIST INDEX MIDDLE RING BENT;So;0;L;;;;;N;;;;; +1D88C;SIGNWRITING HAND-FIST INDEX MIDDLE RING CONJOINED;So;0;L;;;;;N;;;;; +1D88D;SIGNWRITING HAND-HINGE INDEX MIDDLE RING CONJOINED;So;0;L;;;;;N;;;;; +1D88E;SIGNWRITING HAND-FIST LITTLE DOWN;So;0;L;;;;;N;;;;; +1D88F;SIGNWRITING HAND-FIST LITTLE DOWN RIPPLE STRAIGHT;So;0;L;;;;;N;;;;; +1D890;SIGNWRITING HAND-FIST LITTLE DOWN RIPPLE CURVED;So;0;L;;;;;N;;;;; +1D891;SIGNWRITING HAND-FIST LITTLE DOWN OTHERS CIRCLED;So;0;L;;;;;N;;;;; +1D892;SIGNWRITING HAND-FIST LITTLE UP;So;0;L;;;;;N;;;;; +1D893;SIGNWRITING HAND-FIST THUMB UNDER LITTLE UP;So;0;L;;;;;N;;;;; +1D894;SIGNWRITING HAND-CIRCLE LITTLE UP;So;0;L;;;;;N;;;;; +1D895;SIGNWRITING HAND-OVAL LITTLE UP;So;0;L;;;;;N;;;;; +1D896;SIGNWRITING HAND-ANGLE LITTLE UP;So;0;L;;;;;N;;;;; +1D897;SIGNWRITING HAND-FIST LITTLE RAISED KNUCKLE;So;0;L;;;;;N;;;;; +1D898;SIGNWRITING HAND-FIST LITTLE BENT;So;0;L;;;;;N;;;;; +1D899;SIGNWRITING HAND-FIST LITTLE TOUCHES THUMB;So;0;L;;;;;N;;;;; +1D89A;SIGNWRITING HAND-FIST LITTLE THUMB;So;0;L;;;;;N;;;;; +1D89B;SIGNWRITING HAND-HINGE LITTLE THUMB;So;0;L;;;;;N;;;;; +1D89C;SIGNWRITING HAND-FIST LITTLE INDEX THUMB;So;0;L;;;;;N;;;;; +1D89D;SIGNWRITING HAND-HINGE LITTLE INDEX THUMB;So;0;L;;;;;N;;;;; +1D89E;SIGNWRITING HAND-ANGLE LITTLE INDEX THUMB INDEX THUMB OUT;So;0;L;;;;;N;;;;; +1D89F;SIGNWRITING HAND-ANGLE LITTLE INDEX THUMB INDEX THUMB;So;0;L;;;;;N;;;;; +1D8A0;SIGNWRITING HAND-FIST LITTLE INDEX;So;0;L;;;;;N;;;;; +1D8A1;SIGNWRITING HAND-CIRCLE LITTLE INDEX;So;0;L;;;;;N;;;;; +1D8A2;SIGNWRITING HAND-HINGE LITTLE INDEX;So;0;L;;;;;N;;;;; +1D8A3;SIGNWRITING HAND-ANGLE LITTLE INDEX;So;0;L;;;;;N;;;;; +1D8A4;SIGNWRITING HAND-FIST INDEX MIDDLE LITTLE;So;0;L;;;;;N;;;;; +1D8A5;SIGNWRITING HAND-CIRCLE INDEX MIDDLE LITTLE;So;0;L;;;;;N;;;;; +1D8A6;SIGNWRITING HAND-HINGE INDEX MIDDLE LITTLE;So;0;L;;;;;N;;;;; +1D8A7;SIGNWRITING HAND-HINGE RING;So;0;L;;;;;N;;;;; +1D8A8;SIGNWRITING HAND-ANGLE INDEX MIDDLE LITTLE;So;0;L;;;;;N;;;;; +1D8A9;SIGNWRITING HAND-FIST INDEX MIDDLE CROSS LITTLE;So;0;L;;;;;N;;;;; +1D8AA;SIGNWRITING HAND-CIRCLE INDEX MIDDLE CROSS LITTLE;So;0;L;;;;;N;;;;; +1D8AB;SIGNWRITING HAND-FIST RING DOWN;So;0;L;;;;;N;;;;; +1D8AC;SIGNWRITING HAND-HINGE RING DOWN INDEX THUMB HOOK MIDDLE;So;0;L;;;;;N;;;;; +1D8AD;SIGNWRITING HAND-ANGLE RING DOWN MIDDLE THUMB INDEX CROSS;So;0;L;;;;;N;;;;; +1D8AE;SIGNWRITING HAND-FIST RING UP;So;0;L;;;;;N;;;;; +1D8AF;SIGNWRITING HAND-FIST RING RAISED KNUCKLE;So;0;L;;;;;N;;;;; +1D8B0;SIGNWRITING HAND-FIST RING LITTLE;So;0;L;;;;;N;;;;; +1D8B1;SIGNWRITING HAND-CIRCLE RING LITTLE;So;0;L;;;;;N;;;;; +1D8B2;SIGNWRITING HAND-OVAL RING LITTLE;So;0;L;;;;;N;;;;; +1D8B3;SIGNWRITING HAND-ANGLE RING LITTLE;So;0;L;;;;;N;;;;; +1D8B4;SIGNWRITING HAND-FIST RING MIDDLE;So;0;L;;;;;N;;;;; +1D8B5;SIGNWRITING HAND-FIST RING MIDDLE CONJOINED;So;0;L;;;;;N;;;;; +1D8B6;SIGNWRITING HAND-FIST RING MIDDLE RAISED KNUCKLES;So;0;L;;;;;N;;;;; +1D8B7;SIGNWRITING HAND-FIST RING INDEX;So;0;L;;;;;N;;;;; +1D8B8;SIGNWRITING HAND-FIST RING THUMB;So;0;L;;;;;N;;;;; +1D8B9;SIGNWRITING HAND-HOOK RING THUMB;So;0;L;;;;;N;;;;; +1D8BA;SIGNWRITING HAND-FIST INDEX RING LITTLE;So;0;L;;;;;N;;;;; +1D8BB;SIGNWRITING HAND-CIRCLE INDEX RING LITTLE;So;0;L;;;;;N;;;;; +1D8BC;SIGNWRITING HAND-CURLICUE INDEX RING LITTLE ON;So;0;L;;;;;N;;;;; +1D8BD;SIGNWRITING HAND-HOOK INDEX RING LITTLE OUT;So;0;L;;;;;N;;;;; +1D8BE;SIGNWRITING HAND-HOOK INDEX RING LITTLE IN;So;0;L;;;;;N;;;;; +1D8BF;SIGNWRITING HAND-HOOK INDEX RING LITTLE UNDER;So;0;L;;;;;N;;;;; +1D8C0;SIGNWRITING HAND-CUP INDEX RING LITTLE;So;0;L;;;;;N;;;;; +1D8C1;SIGNWRITING HAND-HINGE INDEX RING LITTLE;So;0;L;;;;;N;;;;; +1D8C2;SIGNWRITING HAND-ANGLE INDEX RING LITTLE OUT;So;0;L;;;;;N;;;;; +1D8C3;SIGNWRITING HAND-ANGLE INDEX RING LITTLE;So;0;L;;;;;N;;;;; +1D8C4;SIGNWRITING HAND-FIST MIDDLE DOWN;So;0;L;;;;;N;;;;; +1D8C5;SIGNWRITING HAND-HINGE MIDDLE;So;0;L;;;;;N;;;;; +1D8C6;SIGNWRITING HAND-FIST MIDDLE UP;So;0;L;;;;;N;;;;; +1D8C7;SIGNWRITING HAND-CIRCLE MIDDLE UP;So;0;L;;;;;N;;;;; +1D8C8;SIGNWRITING HAND-FIST MIDDLE RAISED KNUCKLE;So;0;L;;;;;N;;;;; +1D8C9;SIGNWRITING HAND-FIST MIDDLE UP THUMB SIDE;So;0;L;;;;;N;;;;; +1D8CA;SIGNWRITING HAND-HOOK MIDDLE THUMB;So;0;L;;;;;N;;;;; +1D8CB;SIGNWRITING HAND-FIST MIDDLE THUMB LITTLE;So;0;L;;;;;N;;;;; +1D8CC;SIGNWRITING HAND-FIST MIDDLE LITTLE;So;0;L;;;;;N;;;;; +1D8CD;SIGNWRITING HAND-FIST MIDDLE RING LITTLE;So;0;L;;;;;N;;;;; +1D8CE;SIGNWRITING HAND-CIRCLE MIDDLE RING LITTLE;So;0;L;;;;;N;;;;; +1D8CF;SIGNWRITING HAND-CURLICUE MIDDLE RING LITTLE ON;So;0;L;;;;;N;;;;; +1D8D0;SIGNWRITING HAND-CUP MIDDLE RING LITTLE;So;0;L;;;;;N;;;;; +1D8D1;SIGNWRITING HAND-HINGE MIDDLE RING LITTLE;So;0;L;;;;;N;;;;; +1D8D2;SIGNWRITING HAND-ANGLE MIDDLE RING LITTLE OUT;So;0;L;;;;;N;;;;; +1D8D3;SIGNWRITING HAND-ANGLE MIDDLE RING LITTLE IN;So;0;L;;;;;N;;;;; +1D8D4;SIGNWRITING HAND-ANGLE MIDDLE RING LITTLE;So;0;L;;;;;N;;;;; +1D8D5;SIGNWRITING HAND-CIRCLE MIDDLE RING LITTLE BENT;So;0;L;;;;;N;;;;; +1D8D6;SIGNWRITING HAND-CLAW MIDDLE RING LITTLE CONJOINED;So;0;L;;;;;N;;;;; +1D8D7;SIGNWRITING HAND-CLAW MIDDLE RING LITTLE CONJOINED SIDE;So;0;L;;;;;N;;;;; +1D8D8;SIGNWRITING HAND-HOOK MIDDLE RING LITTLE CONJOINED OUT;So;0;L;;;;;N;;;;; +1D8D9;SIGNWRITING HAND-HOOK MIDDLE RING LITTLE CONJOINED IN;So;0;L;;;;;N;;;;; +1D8DA;SIGNWRITING HAND-HOOK MIDDLE RING LITTLE CONJOINED;So;0;L;;;;;N;;;;; +1D8DB;SIGNWRITING HAND-HINGE INDEX HINGED;So;0;L;;;;;N;;;;; +1D8DC;SIGNWRITING HAND-FIST INDEX THUMB SIDE;So;0;L;;;;;N;;;;; +1D8DD;SIGNWRITING HAND-HINGE INDEX THUMB SIDE;So;0;L;;;;;N;;;;; +1D8DE;SIGNWRITING HAND-FIST INDEX THUMB SIDE THUMB DIAGONAL;So;0;L;;;;;N;;;;; +1D8DF;SIGNWRITING HAND-FIST INDEX THUMB SIDE THUMB CONJOINED;So;0;L;;;;;N;;;;; +1D8E0;SIGNWRITING HAND-FIST INDEX THUMB SIDE THUMB BENT;So;0;L;;;;;N;;;;; +1D8E1;SIGNWRITING HAND-FIST INDEX THUMB SIDE INDEX BENT;So;0;L;;;;;N;;;;; +1D8E2;SIGNWRITING HAND-FIST INDEX THUMB SIDE BOTH BENT;So;0;L;;;;;N;;;;; +1D8E3;SIGNWRITING HAND-FIST INDEX THUMB SIDE INDEX HINGE;So;0;L;;;;;N;;;;; +1D8E4;SIGNWRITING HAND-FIST INDEX THUMB FORWARD INDEX STRAIGHT;So;0;L;;;;;N;;;;; +1D8E5;SIGNWRITING HAND-FIST INDEX THUMB FORWARD INDEX BENT;So;0;L;;;;;N;;;;; +1D8E6;SIGNWRITING HAND-FIST INDEX THUMB HOOK;So;0;L;;;;;N;;;;; +1D8E7;SIGNWRITING HAND-FIST INDEX THUMB CURLICUE;So;0;L;;;;;N;;;;; +1D8E8;SIGNWRITING HAND-FIST INDEX THUMB CURVE THUMB INSIDE;So;0;L;;;;;N;;;;; +1D8E9;SIGNWRITING HAND-CLAW INDEX THUMB CURVE THUMB INSIDE;So;0;L;;;;;N;;;;; +1D8EA;SIGNWRITING HAND-FIST INDEX THUMB CURVE THUMB UNDER;So;0;L;;;;;N;;;;; +1D8EB;SIGNWRITING HAND-FIST INDEX THUMB CIRCLE;So;0;L;;;;;N;;;;; +1D8EC;SIGNWRITING HAND-CUP INDEX THUMB;So;0;L;;;;;N;;;;; +1D8ED;SIGNWRITING HAND-CUP INDEX THUMB OPEN;So;0;L;;;;;N;;;;; +1D8EE;SIGNWRITING HAND-HINGE INDEX THUMB OPEN;So;0;L;;;;;N;;;;; +1D8EF;SIGNWRITING HAND-HINGE INDEX THUMB LARGE;So;0;L;;;;;N;;;;; +1D8F0;SIGNWRITING HAND-HINGE INDEX THUMB;So;0;L;;;;;N;;;;; +1D8F1;SIGNWRITING HAND-HINGE INDEX THUMB SMALL;So;0;L;;;;;N;;;;; +1D8F2;SIGNWRITING HAND-ANGLE INDEX THUMB OUT;So;0;L;;;;;N;;;;; +1D8F3;SIGNWRITING HAND-ANGLE INDEX THUMB IN;So;0;L;;;;;N;;;;; +1D8F4;SIGNWRITING HAND-ANGLE INDEX THUMB;So;0;L;;;;;N;;;;; +1D8F5;SIGNWRITING HAND-FIST THUMB;So;0;L;;;;;N;;;;; +1D8F6;SIGNWRITING HAND-FIST THUMB HEEL;So;0;L;;;;;N;;;;; +1D8F7;SIGNWRITING HAND-FIST THUMB SIDE DIAGONAL;So;0;L;;;;;N;;;;; +1D8F8;SIGNWRITING HAND-FIST THUMB SIDE CONJOINED;So;0;L;;;;;N;;;;; +1D8F9;SIGNWRITING HAND-FIST THUMB SIDE BENT;So;0;L;;;;;N;;;;; +1D8FA;SIGNWRITING HAND-FIST THUMB FORWARD;So;0;L;;;;;N;;;;; +1D8FB;SIGNWRITING HAND-FIST THUMB BETWEEN INDEX MIDDLE;So;0;L;;;;;N;;;;; +1D8FC;SIGNWRITING HAND-FIST THUMB BETWEEN MIDDLE RING;So;0;L;;;;;N;;;;; +1D8FD;SIGNWRITING HAND-FIST THUMB BETWEEN RING LITTLE;So;0;L;;;;;N;;;;; +1D8FE;SIGNWRITING HAND-FIST THUMB UNDER TWO FINGERS;So;0;L;;;;;N;;;;; +1D8FF;SIGNWRITING HAND-FIST THUMB OVER TWO FINGERS;So;0;L;;;;;N;;;;; +1D900;SIGNWRITING HAND-FIST THUMB UNDER THREE FINGERS;So;0;L;;;;;N;;;;; +1D901;SIGNWRITING HAND-FIST THUMB UNDER FOUR FINGERS;So;0;L;;;;;N;;;;; +1D902;SIGNWRITING HAND-FIST THUMB OVER FOUR RAISED KNUCKLES;So;0;L;;;;;N;;;;; +1D903;SIGNWRITING HAND-FIST;So;0;L;;;;;N;;;;; +1D904;SIGNWRITING HAND-FIST HEEL;So;0;L;;;;;N;;;;; +1D905;SIGNWRITING TOUCH SINGLE;So;0;L;;;;;N;;;;; +1D906;SIGNWRITING TOUCH MULTIPLE;So;0;L;;;;;N;;;;; +1D907;SIGNWRITING TOUCH BETWEEN;So;0;L;;;;;N;;;;; +1D908;SIGNWRITING GRASP SINGLE;So;0;L;;;;;N;;;;; +1D909;SIGNWRITING GRASP MULTIPLE;So;0;L;;;;;N;;;;; +1D90A;SIGNWRITING GRASP BETWEEN;So;0;L;;;;;N;;;;; +1D90B;SIGNWRITING STRIKE SINGLE;So;0;L;;;;;N;;;;; +1D90C;SIGNWRITING STRIKE MULTIPLE;So;0;L;;;;;N;;;;; +1D90D;SIGNWRITING STRIKE BETWEEN;So;0;L;;;;;N;;;;; +1D90E;SIGNWRITING BRUSH SINGLE;So;0;L;;;;;N;;;;; +1D90F;SIGNWRITING BRUSH MULTIPLE;So;0;L;;;;;N;;;;; +1D910;SIGNWRITING BRUSH BETWEEN;So;0;L;;;;;N;;;;; +1D911;SIGNWRITING RUB SINGLE;So;0;L;;;;;N;;;;; +1D912;SIGNWRITING RUB MULTIPLE;So;0;L;;;;;N;;;;; +1D913;SIGNWRITING RUB BETWEEN;So;0;L;;;;;N;;;;; +1D914;SIGNWRITING SURFACE SYMBOLS;So;0;L;;;;;N;;;;; +1D915;SIGNWRITING SURFACE BETWEEN;So;0;L;;;;;N;;;;; +1D916;SIGNWRITING SQUEEZE LARGE SINGLE;So;0;L;;;;;N;;;;; +1D917;SIGNWRITING SQUEEZE SMALL SINGLE;So;0;L;;;;;N;;;;; +1D918;SIGNWRITING SQUEEZE LARGE MULTIPLE;So;0;L;;;;;N;;;;; +1D919;SIGNWRITING SQUEEZE SMALL MULTIPLE;So;0;L;;;;;N;;;;; +1D91A;SIGNWRITING SQUEEZE SEQUENTIAL;So;0;L;;;;;N;;;;; +1D91B;SIGNWRITING FLICK LARGE SINGLE;So;0;L;;;;;N;;;;; +1D91C;SIGNWRITING FLICK SMALL SINGLE;So;0;L;;;;;N;;;;; +1D91D;SIGNWRITING FLICK LARGE MULTIPLE;So;0;L;;;;;N;;;;; +1D91E;SIGNWRITING FLICK SMALL MULTIPLE;So;0;L;;;;;N;;;;; +1D91F;SIGNWRITING FLICK SEQUENTIAL;So;0;L;;;;;N;;;;; +1D920;SIGNWRITING SQUEEZE FLICK ALTERNATING;So;0;L;;;;;N;;;;; +1D921;SIGNWRITING MOVEMENT-HINGE UP DOWN LARGE;So;0;L;;;;;N;;;;; +1D922;SIGNWRITING MOVEMENT-HINGE UP DOWN SMALL;So;0;L;;;;;N;;;;; +1D923;SIGNWRITING MOVEMENT-HINGE UP SEQUENTIAL;So;0;L;;;;;N;;;;; +1D924;SIGNWRITING MOVEMENT-HINGE DOWN SEQUENTIAL;So;0;L;;;;;N;;;;; +1D925;SIGNWRITING MOVEMENT-HINGE UP DOWN ALTERNATING LARGE;So;0;L;;;;;N;;;;; +1D926;SIGNWRITING MOVEMENT-HINGE UP DOWN ALTERNATING SMALL;So;0;L;;;;;N;;;;; +1D927;SIGNWRITING MOVEMENT-HINGE SIDE TO SIDE SCISSORS;So;0;L;;;;;N;;;;; +1D928;SIGNWRITING MOVEMENT-WALLPLANE FINGER CONTACT;So;0;L;;;;;N;;;;; +1D929;SIGNWRITING MOVEMENT-FLOORPLANE FINGER CONTACT;So;0;L;;;;;N;;;;; +1D92A;SIGNWRITING MOVEMENT-WALLPLANE SINGLE STRAIGHT SMALL;So;0;L;;;;;N;;;;; +1D92B;SIGNWRITING MOVEMENT-WALLPLANE SINGLE STRAIGHT MEDIUM;So;0;L;;;;;N;;;;; +1D92C;SIGNWRITING MOVEMENT-WALLPLANE SINGLE STRAIGHT LARGE;So;0;L;;;;;N;;;;; +1D92D;SIGNWRITING MOVEMENT-WALLPLANE SINGLE STRAIGHT LARGEST;So;0;L;;;;;N;;;;; +1D92E;SIGNWRITING MOVEMENT-WALLPLANE SINGLE WRIST FLEX;So;0;L;;;;;N;;;;; +1D92F;SIGNWRITING MOVEMENT-WALLPLANE DOUBLE STRAIGHT;So;0;L;;;;;N;;;;; +1D930;SIGNWRITING MOVEMENT-WALLPLANE DOUBLE WRIST FLEX;So;0;L;;;;;N;;;;; +1D931;SIGNWRITING MOVEMENT-WALLPLANE DOUBLE ALTERNATING;So;0;L;;;;;N;;;;; +1D932;SIGNWRITING MOVEMENT-WALLPLANE DOUBLE ALTERNATING WRIST FLEX;So;0;L;;;;;N;;;;; +1D933;SIGNWRITING MOVEMENT-WALLPLANE CROSS;So;0;L;;;;;N;;;;; +1D934;SIGNWRITING MOVEMENT-WALLPLANE TRIPLE STRAIGHT MOVEMENT;So;0;L;;;;;N;;;;; +1D935;SIGNWRITING MOVEMENT-WALLPLANE TRIPLE WRIST FLEX;So;0;L;;;;;N;;;;; +1D936;SIGNWRITING MOVEMENT-WALLPLANE TRIPLE ALTERNATING;So;0;L;;;;;N;;;;; +1D937;SIGNWRITING MOVEMENT-WALLPLANE TRIPLE ALTERNATING WRIST FLEX;So;0;L;;;;;N;;;;; +1D938;SIGNWRITING MOVEMENT-WALLPLANE BEND SMALL;So;0;L;;;;;N;;;;; +1D939;SIGNWRITING MOVEMENT-WALLPLANE BEND MEDIUM;So;0;L;;;;;N;;;;; +1D93A;SIGNWRITING MOVEMENT-WALLPLANE BEND LARGE;So;0;L;;;;;N;;;;; +1D93B;SIGNWRITING MOVEMENT-WALLPLANE CORNER SMALL;So;0;L;;;;;N;;;;; +1D93C;SIGNWRITING MOVEMENT-WALLPLANE CORNER MEDIUM;So;0;L;;;;;N;;;;; +1D93D;SIGNWRITING MOVEMENT-WALLPLANE CORNER LARGE;So;0;L;;;;;N;;;;; +1D93E;SIGNWRITING MOVEMENT-WALLPLANE CORNER ROTATION;So;0;L;;;;;N;;;;; +1D93F;SIGNWRITING MOVEMENT-WALLPLANE CHECK SMALL;So;0;L;;;;;N;;;;; +1D940;SIGNWRITING MOVEMENT-WALLPLANE CHECK MEDIUM;So;0;L;;;;;N;;;;; +1D941;SIGNWRITING MOVEMENT-WALLPLANE CHECK LARGE;So;0;L;;;;;N;;;;; +1D942;SIGNWRITING MOVEMENT-WALLPLANE BOX SMALL;So;0;L;;;;;N;;;;; +1D943;SIGNWRITING MOVEMENT-WALLPLANE BOX MEDIUM;So;0;L;;;;;N;;;;; +1D944;SIGNWRITING MOVEMENT-WALLPLANE BOX LARGE;So;0;L;;;;;N;;;;; +1D945;SIGNWRITING MOVEMENT-WALLPLANE ZIGZAG SMALL;So;0;L;;;;;N;;;;; +1D946;SIGNWRITING MOVEMENT-WALLPLANE ZIGZAG MEDIUM;So;0;L;;;;;N;;;;; +1D947;SIGNWRITING MOVEMENT-WALLPLANE ZIGZAG LARGE;So;0;L;;;;;N;;;;; +1D948;SIGNWRITING MOVEMENT-WALLPLANE PEAKS SMALL;So;0;L;;;;;N;;;;; +1D949;SIGNWRITING MOVEMENT-WALLPLANE PEAKS MEDIUM;So;0;L;;;;;N;;;;; +1D94A;SIGNWRITING MOVEMENT-WALLPLANE PEAKS LARGE;So;0;L;;;;;N;;;;; +1D94B;SIGNWRITING TRAVEL-WALLPLANE ROTATION-WALLPLANE SINGLE;So;0;L;;;;;N;;;;; +1D94C;SIGNWRITING TRAVEL-WALLPLANE ROTATION-WALLPLANE DOUBLE;So;0;L;;;;;N;;;;; +1D94D;SIGNWRITING TRAVEL-WALLPLANE ROTATION-WALLPLANE ALTERNATING;So;0;L;;;;;N;;;;; +1D94E;SIGNWRITING TRAVEL-WALLPLANE ROTATION-FLOORPLANE SINGLE;So;0;L;;;;;N;;;;; +1D94F;SIGNWRITING TRAVEL-WALLPLANE ROTATION-FLOORPLANE DOUBLE;So;0;L;;;;;N;;;;; +1D950;SIGNWRITING TRAVEL-WALLPLANE ROTATION-FLOORPLANE ALTERNATING;So;0;L;;;;;N;;;;; +1D951;SIGNWRITING TRAVEL-WALLPLANE SHAKING;So;0;L;;;;;N;;;;; +1D952;SIGNWRITING TRAVEL-WALLPLANE ARM SPIRAL SINGLE;So;0;L;;;;;N;;;;; +1D953;SIGNWRITING TRAVEL-WALLPLANE ARM SPIRAL DOUBLE;So;0;L;;;;;N;;;;; +1D954;SIGNWRITING TRAVEL-WALLPLANE ARM SPIRAL TRIPLE;So;0;L;;;;;N;;;;; +1D955;SIGNWRITING MOVEMENT-DIAGONAL AWAY SMALL;So;0;L;;;;;N;;;;; +1D956;SIGNWRITING MOVEMENT-DIAGONAL AWAY MEDIUM;So;0;L;;;;;N;;;;; +1D957;SIGNWRITING MOVEMENT-DIAGONAL AWAY LARGE;So;0;L;;;;;N;;;;; +1D958;SIGNWRITING MOVEMENT-DIAGONAL AWAY LARGEST;So;0;L;;;;;N;;;;; +1D959;SIGNWRITING MOVEMENT-DIAGONAL TOWARDS SMALL;So;0;L;;;;;N;;;;; +1D95A;SIGNWRITING MOVEMENT-DIAGONAL TOWARDS MEDIUM;So;0;L;;;;;N;;;;; +1D95B;SIGNWRITING MOVEMENT-DIAGONAL TOWARDS LARGE;So;0;L;;;;;N;;;;; +1D95C;SIGNWRITING MOVEMENT-DIAGONAL TOWARDS LARGEST;So;0;L;;;;;N;;;;; +1D95D;SIGNWRITING MOVEMENT-DIAGONAL BETWEEN AWAY SMALL;So;0;L;;;;;N;;;;; +1D95E;SIGNWRITING MOVEMENT-DIAGONAL BETWEEN AWAY MEDIUM;So;0;L;;;;;N;;;;; +1D95F;SIGNWRITING MOVEMENT-DIAGONAL BETWEEN AWAY LARGE;So;0;L;;;;;N;;;;; +1D960;SIGNWRITING MOVEMENT-DIAGONAL BETWEEN AWAY LARGEST;So;0;L;;;;;N;;;;; +1D961;SIGNWRITING MOVEMENT-DIAGONAL BETWEEN TOWARDS SMALL;So;0;L;;;;;N;;;;; +1D962;SIGNWRITING MOVEMENT-DIAGONAL BETWEEN TOWARDS MEDIUM;So;0;L;;;;;N;;;;; +1D963;SIGNWRITING MOVEMENT-DIAGONAL BETWEEN TOWARDS LARGE;So;0;L;;;;;N;;;;; +1D964;SIGNWRITING MOVEMENT-DIAGONAL BETWEEN TOWARDS LARGEST;So;0;L;;;;;N;;;;; +1D965;SIGNWRITING MOVEMENT-FLOORPLANE SINGLE STRAIGHT SMALL;So;0;L;;;;;N;;;;; +1D966;SIGNWRITING MOVEMENT-FLOORPLANE SINGLE STRAIGHT MEDIUM;So;0;L;;;;;N;;;;; +1D967;SIGNWRITING MOVEMENT-FLOORPLANE SINGLE STRAIGHT LARGE;So;0;L;;;;;N;;;;; +1D968;SIGNWRITING MOVEMENT-FLOORPLANE SINGLE STRAIGHT LARGEST;So;0;L;;;;;N;;;;; +1D969;SIGNWRITING MOVEMENT-FLOORPLANE SINGLE WRIST FLEX;So;0;L;;;;;N;;;;; +1D96A;SIGNWRITING MOVEMENT-FLOORPLANE DOUBLE STRAIGHT;So;0;L;;;;;N;;;;; +1D96B;SIGNWRITING MOVEMENT-FLOORPLANE DOUBLE WRIST FLEX;So;0;L;;;;;N;;;;; +1D96C;SIGNWRITING MOVEMENT-FLOORPLANE DOUBLE ALTERNATING;So;0;L;;;;;N;;;;; +1D96D;SIGNWRITING MOVEMENT-FLOORPLANE DOUBLE ALTERNATING WRIST FLEX;So;0;L;;;;;N;;;;; +1D96E;SIGNWRITING MOVEMENT-FLOORPLANE CROSS;So;0;L;;;;;N;;;;; +1D96F;SIGNWRITING MOVEMENT-FLOORPLANE TRIPLE STRAIGHT MOVEMENT;So;0;L;;;;;N;;;;; +1D970;SIGNWRITING MOVEMENT-FLOORPLANE TRIPLE WRIST FLEX;So;0;L;;;;;N;;;;; +1D971;SIGNWRITING MOVEMENT-FLOORPLANE TRIPLE ALTERNATING MOVEMENT;So;0;L;;;;;N;;;;; +1D972;SIGNWRITING MOVEMENT-FLOORPLANE TRIPLE ALTERNATING WRIST FLEX;So;0;L;;;;;N;;;;; +1D973;SIGNWRITING MOVEMENT-FLOORPLANE BEND;So;0;L;;;;;N;;;;; +1D974;SIGNWRITING MOVEMENT-FLOORPLANE CORNER SMALL;So;0;L;;;;;N;;;;; +1D975;SIGNWRITING MOVEMENT-FLOORPLANE CORNER MEDIUM;So;0;L;;;;;N;;;;; +1D976;SIGNWRITING MOVEMENT-FLOORPLANE CORNER LARGE;So;0;L;;;;;N;;;;; +1D977;SIGNWRITING MOVEMENT-FLOORPLANE CHECK;So;0;L;;;;;N;;;;; +1D978;SIGNWRITING MOVEMENT-FLOORPLANE BOX SMALL;So;0;L;;;;;N;;;;; +1D979;SIGNWRITING MOVEMENT-FLOORPLANE BOX MEDIUM;So;0;L;;;;;N;;;;; +1D97A;SIGNWRITING MOVEMENT-FLOORPLANE BOX LARGE;So;0;L;;;;;N;;;;; +1D97B;SIGNWRITING MOVEMENT-FLOORPLANE ZIGZAG SMALL;So;0;L;;;;;N;;;;; +1D97C;SIGNWRITING MOVEMENT-FLOORPLANE ZIGZAG MEDIUM;So;0;L;;;;;N;;;;; +1D97D;SIGNWRITING MOVEMENT-FLOORPLANE ZIGZAG LARGE;So;0;L;;;;;N;;;;; +1D97E;SIGNWRITING MOVEMENT-FLOORPLANE PEAKS SMALL;So;0;L;;;;;N;;;;; +1D97F;SIGNWRITING MOVEMENT-FLOORPLANE PEAKS MEDIUM;So;0;L;;;;;N;;;;; +1D980;SIGNWRITING MOVEMENT-FLOORPLANE PEAKS LARGE;So;0;L;;;;;N;;;;; +1D981;SIGNWRITING TRAVEL-FLOORPLANE ROTATION-FLOORPLANE SINGLE;So;0;L;;;;;N;;;;; +1D982;SIGNWRITING TRAVEL-FLOORPLANE ROTATION-FLOORPLANE DOUBLE;So;0;L;;;;;N;;;;; +1D983;SIGNWRITING TRAVEL-FLOORPLANE ROTATION-FLOORPLANE ALTERNATING;So;0;L;;;;;N;;;;; +1D984;SIGNWRITING TRAVEL-FLOORPLANE ROTATION-WALLPLANE SINGLE;So;0;L;;;;;N;;;;; +1D985;SIGNWRITING TRAVEL-FLOORPLANE ROTATION-WALLPLANE DOUBLE;So;0;L;;;;;N;;;;; +1D986;SIGNWRITING TRAVEL-FLOORPLANE ROTATION-WALLPLANE ALTERNATING;So;0;L;;;;;N;;;;; +1D987;SIGNWRITING TRAVEL-FLOORPLANE SHAKING;So;0;L;;;;;N;;;;; +1D988;SIGNWRITING MOVEMENT-WALLPLANE CURVE QUARTER SMALL;So;0;L;;;;;N;;;;; +1D989;SIGNWRITING MOVEMENT-WALLPLANE CURVE QUARTER MEDIUM;So;0;L;;;;;N;;;;; +1D98A;SIGNWRITING MOVEMENT-WALLPLANE CURVE QUARTER LARGE;So;0;L;;;;;N;;;;; +1D98B;SIGNWRITING MOVEMENT-WALLPLANE CURVE QUARTER LARGEST;So;0;L;;;;;N;;;;; +1D98C;SIGNWRITING MOVEMENT-WALLPLANE CURVE HALF-CIRCLE SMALL;So;0;L;;;;;N;;;;; +1D98D;SIGNWRITING MOVEMENT-WALLPLANE CURVE HALF-CIRCLE MEDIUM;So;0;L;;;;;N;;;;; +1D98E;SIGNWRITING MOVEMENT-WALLPLANE CURVE HALF-CIRCLE LARGE;So;0;L;;;;;N;;;;; +1D98F;SIGNWRITING MOVEMENT-WALLPLANE CURVE HALF-CIRCLE LARGEST;So;0;L;;;;;N;;;;; +1D990;SIGNWRITING MOVEMENT-WALLPLANE CURVE THREE-QUARTER CIRCLE SMALL;So;0;L;;;;;N;;;;; +1D991;SIGNWRITING MOVEMENT-WALLPLANE CURVE THREE-QUARTER CIRCLE MEDIUM;So;0;L;;;;;N;;;;; +1D992;SIGNWRITING MOVEMENT-WALLPLANE HUMP SMALL;So;0;L;;;;;N;;;;; +1D993;SIGNWRITING MOVEMENT-WALLPLANE HUMP MEDIUM;So;0;L;;;;;N;;;;; +1D994;SIGNWRITING MOVEMENT-WALLPLANE HUMP LARGE;So;0;L;;;;;N;;;;; +1D995;SIGNWRITING MOVEMENT-WALLPLANE LOOP SMALL;So;0;L;;;;;N;;;;; +1D996;SIGNWRITING MOVEMENT-WALLPLANE LOOP MEDIUM;So;0;L;;;;;N;;;;; +1D997;SIGNWRITING MOVEMENT-WALLPLANE LOOP LARGE;So;0;L;;;;;N;;;;; +1D998;SIGNWRITING MOVEMENT-WALLPLANE LOOP SMALL DOUBLE;So;0;L;;;;;N;;;;; +1D999;SIGNWRITING MOVEMENT-WALLPLANE WAVE CURVE DOUBLE SMALL;So;0;L;;;;;N;;;;; +1D99A;SIGNWRITING MOVEMENT-WALLPLANE WAVE CURVE DOUBLE MEDIUM;So;0;L;;;;;N;;;;; +1D99B;SIGNWRITING MOVEMENT-WALLPLANE WAVE CURVE DOUBLE LARGE;So;0;L;;;;;N;;;;; +1D99C;SIGNWRITING MOVEMENT-WALLPLANE WAVE CURVE TRIPLE SMALL;So;0;L;;;;;N;;;;; +1D99D;SIGNWRITING MOVEMENT-WALLPLANE WAVE CURVE TRIPLE MEDIUM;So;0;L;;;;;N;;;;; +1D99E;SIGNWRITING MOVEMENT-WALLPLANE WAVE CURVE TRIPLE LARGE;So;0;L;;;;;N;;;;; +1D99F;SIGNWRITING MOVEMENT-WALLPLANE CURVE THEN STRAIGHT;So;0;L;;;;;N;;;;; +1D9A0;SIGNWRITING MOVEMENT-WALLPLANE CURVED CROSS SMALL;So;0;L;;;;;N;;;;; +1D9A1;SIGNWRITING MOVEMENT-WALLPLANE CURVED CROSS MEDIUM;So;0;L;;;;;N;;;;; +1D9A2;SIGNWRITING ROTATION-WALLPLANE SINGLE;So;0;L;;;;;N;;;;; +1D9A3;SIGNWRITING ROTATION-WALLPLANE DOUBLE;So;0;L;;;;;N;;;;; +1D9A4;SIGNWRITING ROTATION-WALLPLANE ALTERNATE;So;0;L;;;;;N;;;;; +1D9A5;SIGNWRITING MOVEMENT-WALLPLANE SHAKING;So;0;L;;;;;N;;;;; +1D9A6;SIGNWRITING MOVEMENT-WALLPLANE CURVE HITTING FRONT WALL;So;0;L;;;;;N;;;;; +1D9A7;SIGNWRITING MOVEMENT-WALLPLANE HUMP HITTING FRONT WALL;So;0;L;;;;;N;;;;; +1D9A8;SIGNWRITING MOVEMENT-WALLPLANE LOOP HITTING FRONT WALL;So;0;L;;;;;N;;;;; +1D9A9;SIGNWRITING MOVEMENT-WALLPLANE WAVE HITTING FRONT WALL;So;0;L;;;;;N;;;;; +1D9AA;SIGNWRITING ROTATION-WALLPLANE SINGLE HITTING FRONT WALL;So;0;L;;;;;N;;;;; +1D9AB;SIGNWRITING ROTATION-WALLPLANE DOUBLE HITTING FRONT WALL;So;0;L;;;;;N;;;;; +1D9AC;SIGNWRITING ROTATION-WALLPLANE ALTERNATING HITTING FRONT WALL;So;0;L;;;;;N;;;;; +1D9AD;SIGNWRITING MOVEMENT-WALLPLANE CURVE HITTING CHEST;So;0;L;;;;;N;;;;; +1D9AE;SIGNWRITING MOVEMENT-WALLPLANE HUMP HITTING CHEST;So;0;L;;;;;N;;;;; +1D9AF;SIGNWRITING MOVEMENT-WALLPLANE LOOP HITTING CHEST;So;0;L;;;;;N;;;;; +1D9B0;SIGNWRITING MOVEMENT-WALLPLANE WAVE HITTING CHEST;So;0;L;;;;;N;;;;; +1D9B1;SIGNWRITING ROTATION-WALLPLANE SINGLE HITTING CHEST;So;0;L;;;;;N;;;;; +1D9B2;SIGNWRITING ROTATION-WALLPLANE DOUBLE HITTING CHEST;So;0;L;;;;;N;;;;; +1D9B3;SIGNWRITING ROTATION-WALLPLANE ALTERNATING HITTING CHEST;So;0;L;;;;;N;;;;; +1D9B4;SIGNWRITING MOVEMENT-WALLPLANE WAVE DIAGONAL PATH SMALL;So;0;L;;;;;N;;;;; +1D9B5;SIGNWRITING MOVEMENT-WALLPLANE WAVE DIAGONAL PATH MEDIUM;So;0;L;;;;;N;;;;; +1D9B6;SIGNWRITING MOVEMENT-WALLPLANE WAVE DIAGONAL PATH LARGE;So;0;L;;;;;N;;;;; +1D9B7;SIGNWRITING MOVEMENT-FLOORPLANE CURVE HITTING CEILING SMALL;So;0;L;;;;;N;;;;; +1D9B8;SIGNWRITING MOVEMENT-FLOORPLANE CURVE HITTING CEILING LARGE;So;0;L;;;;;N;;;;; +1D9B9;SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING CEILING SMALL DOUBLE;So;0;L;;;;;N;;;;; +1D9BA;SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING CEILING LARGE DOUBLE;So;0;L;;;;;N;;;;; +1D9BB;SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING CEILING SMALL TRIPLE;So;0;L;;;;;N;;;;; +1D9BC;SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING CEILING LARGE TRIPLE;So;0;L;;;;;N;;;;; +1D9BD;SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING CEILING SMALL SINGLE;So;0;L;;;;;N;;;;; +1D9BE;SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING CEILING LARGE SINGLE;So;0;L;;;;;N;;;;; +1D9BF;SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING CEILING SMALL DOUBLE;So;0;L;;;;;N;;;;; +1D9C0;SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING CEILING LARGE DOUBLE;So;0;L;;;;;N;;;;; +1D9C1;SIGNWRITING MOVEMENT-FLOORPLANE WAVE HITTING CEILING SMALL;So;0;L;;;;;N;;;;; +1D9C2;SIGNWRITING MOVEMENT-FLOORPLANE WAVE HITTING CEILING LARGE;So;0;L;;;;;N;;;;; +1D9C3;SIGNWRITING ROTATION-FLOORPLANE SINGLE HITTING CEILING;So;0;L;;;;;N;;;;; +1D9C4;SIGNWRITING ROTATION-FLOORPLANE DOUBLE HITTING CEILING;So;0;L;;;;;N;;;;; +1D9C5;SIGNWRITING ROTATION-FLOORPLANE ALTERNATING HITTING CEILING;So;0;L;;;;;N;;;;; +1D9C6;SIGNWRITING MOVEMENT-FLOORPLANE CURVE HITTING FLOOR SMALL;So;0;L;;;;;N;;;;; +1D9C7;SIGNWRITING MOVEMENT-FLOORPLANE CURVE HITTING FLOOR LARGE;So;0;L;;;;;N;;;;; +1D9C8;SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING FLOOR SMALL DOUBLE;So;0;L;;;;;N;;;;; +1D9C9;SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING FLOOR LARGE DOUBLE;So;0;L;;;;;N;;;;; +1D9CA;SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING FLOOR TRIPLE SMALL TRIPLE;So;0;L;;;;;N;;;;; +1D9CB;SIGNWRITING MOVEMENT-FLOORPLANE HUMP HITTING FLOOR TRIPLE LARGE TRIPLE;So;0;L;;;;;N;;;;; +1D9CC;SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING FLOOR SMALL SINGLE;So;0;L;;;;;N;;;;; +1D9CD;SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING FLOOR LARGE SINGLE;So;0;L;;;;;N;;;;; +1D9CE;SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING FLOOR SMALL DOUBLE;So;0;L;;;;;N;;;;; +1D9CF;SIGNWRITING MOVEMENT-FLOORPLANE LOOP HITTING FLOOR LARGE DOUBLE;So;0;L;;;;;N;;;;; +1D9D0;SIGNWRITING MOVEMENT-FLOORPLANE WAVE HITTING FLOOR SMALL;So;0;L;;;;;N;;;;; +1D9D1;SIGNWRITING MOVEMENT-FLOORPLANE WAVE HITTING FLOOR LARGE;So;0;L;;;;;N;;;;; +1D9D2;SIGNWRITING ROTATION-FLOORPLANE SINGLE HITTING FLOOR;So;0;L;;;;;N;;;;; +1D9D3;SIGNWRITING ROTATION-FLOORPLANE DOUBLE HITTING FLOOR;So;0;L;;;;;N;;;;; +1D9D4;SIGNWRITING ROTATION-FLOORPLANE ALTERNATING HITTING FLOOR;So;0;L;;;;;N;;;;; +1D9D5;SIGNWRITING MOVEMENT-FLOORPLANE CURVE SMALL;So;0;L;;;;;N;;;;; +1D9D6;SIGNWRITING MOVEMENT-FLOORPLANE CURVE MEDIUM;So;0;L;;;;;N;;;;; +1D9D7;SIGNWRITING MOVEMENT-FLOORPLANE CURVE LARGE;So;0;L;;;;;N;;;;; +1D9D8;SIGNWRITING MOVEMENT-FLOORPLANE CURVE LARGEST;So;0;L;;;;;N;;;;; +1D9D9;SIGNWRITING MOVEMENT-FLOORPLANE CURVE COMBINED;So;0;L;;;;;N;;;;; +1D9DA;SIGNWRITING MOVEMENT-FLOORPLANE HUMP SMALL;So;0;L;;;;;N;;;;; +1D9DB;SIGNWRITING MOVEMENT-FLOORPLANE LOOP SMALL;So;0;L;;;;;N;;;;; +1D9DC;SIGNWRITING MOVEMENT-FLOORPLANE WAVE SNAKE;So;0;L;;;;;N;;;;; +1D9DD;SIGNWRITING MOVEMENT-FLOORPLANE WAVE SMALL;So;0;L;;;;;N;;;;; +1D9DE;SIGNWRITING MOVEMENT-FLOORPLANE WAVE LARGE;So;0;L;;;;;N;;;;; +1D9DF;SIGNWRITING ROTATION-FLOORPLANE SINGLE;So;0;L;;;;;N;;;;; +1D9E0;SIGNWRITING ROTATION-FLOORPLANE DOUBLE;So;0;L;;;;;N;;;;; +1D9E1;SIGNWRITING ROTATION-FLOORPLANE ALTERNATING;So;0;L;;;;;N;;;;; +1D9E2;SIGNWRITING MOVEMENT-FLOORPLANE SHAKING PARALLEL;So;0;L;;;;;N;;;;; +1D9E3;SIGNWRITING MOVEMENT-WALLPLANE ARM CIRCLE SMALL SINGLE;So;0;L;;;;;N;;;;; +1D9E4;SIGNWRITING MOVEMENT-WALLPLANE ARM CIRCLE MEDIUM SINGLE;So;0;L;;;;;N;;;;; +1D9E5;SIGNWRITING MOVEMENT-WALLPLANE ARM CIRCLE SMALL DOUBLE;So;0;L;;;;;N;;;;; +1D9E6;SIGNWRITING MOVEMENT-WALLPLANE ARM CIRCLE MEDIUM DOUBLE;So;0;L;;;;;N;;;;; +1D9E7;SIGNWRITING MOVEMENT-FLOORPLANE ARM CIRCLE HITTING WALL SMALL SINGLE;So;0;L;;;;;N;;;;; +1D9E8;SIGNWRITING MOVEMENT-FLOORPLANE ARM CIRCLE HITTING WALL MEDIUM SINGLE;So;0;L;;;;;N;;;;; +1D9E9;SIGNWRITING MOVEMENT-FLOORPLANE ARM CIRCLE HITTING WALL LARGE SINGLE;So;0;L;;;;;N;;;;; +1D9EA;SIGNWRITING MOVEMENT-FLOORPLANE ARM CIRCLE HITTING WALL SMALL DOUBLE;So;0;L;;;;;N;;;;; +1D9EB;SIGNWRITING MOVEMENT-FLOORPLANE ARM CIRCLE HITTING WALL MEDIUM DOUBLE;So;0;L;;;;;N;;;;; +1D9EC;SIGNWRITING MOVEMENT-FLOORPLANE ARM CIRCLE HITTING WALL LARGE DOUBLE;So;0;L;;;;;N;;;;; +1D9ED;SIGNWRITING MOVEMENT-WALLPLANE WRIST CIRCLE FRONT SINGLE;So;0;L;;;;;N;;;;; +1D9EE;SIGNWRITING MOVEMENT-WALLPLANE WRIST CIRCLE FRONT DOUBLE;So;0;L;;;;;N;;;;; +1D9EF;SIGNWRITING MOVEMENT-FLOORPLANE WRIST CIRCLE HITTING WALL SINGLE;So;0;L;;;;;N;;;;; +1D9F0;SIGNWRITING MOVEMENT-FLOORPLANE WRIST CIRCLE HITTING WALL DOUBLE;So;0;L;;;;;N;;;;; +1D9F1;SIGNWRITING MOVEMENT-WALLPLANE FINGER CIRCLES SINGLE;So;0;L;;;;;N;;;;; +1D9F2;SIGNWRITING MOVEMENT-WALLPLANE FINGER CIRCLES DOUBLE;So;0;L;;;;;N;;;;; +1D9F3;SIGNWRITING MOVEMENT-FLOORPLANE FINGER CIRCLES HITTING WALL SINGLE;So;0;L;;;;;N;;;;; +1D9F4;SIGNWRITING MOVEMENT-FLOORPLANE FINGER CIRCLES HITTING WALL DOUBLE;So;0;L;;;;;N;;;;; +1D9F5;SIGNWRITING DYNAMIC ARROWHEAD SMALL;So;0;L;;;;;N;;;;; +1D9F6;SIGNWRITING DYNAMIC ARROWHEAD LARGE;So;0;L;;;;;N;;;;; +1D9F7;SIGNWRITING DYNAMIC FAST;So;0;L;;;;;N;;;;; +1D9F8;SIGNWRITING DYNAMIC SLOW;So;0;L;;;;;N;;;;; +1D9F9;SIGNWRITING DYNAMIC TENSE;So;0;L;;;;;N;;;;; +1D9FA;SIGNWRITING DYNAMIC RELAXED;So;0;L;;;;;N;;;;; +1D9FB;SIGNWRITING DYNAMIC SIMULTANEOUS;So;0;L;;;;;N;;;;; +1D9FC;SIGNWRITING DYNAMIC SIMULTANEOUS ALTERNATING;So;0;L;;;;;N;;;;; +1D9FD;SIGNWRITING DYNAMIC EVERY OTHER TIME;So;0;L;;;;;N;;;;; +1D9FE;SIGNWRITING DYNAMIC GRADUAL;So;0;L;;;;;N;;;;; +1D9FF;SIGNWRITING HEAD;So;0;L;;;;;N;;;;; +1DA00;SIGNWRITING HEAD RIM;Mn;0;NSM;;;;;N;;;;; +1DA01;SIGNWRITING HEAD MOVEMENT-WALLPLANE STRAIGHT;Mn;0;NSM;;;;;N;;;;; +1DA02;SIGNWRITING HEAD MOVEMENT-WALLPLANE TILT;Mn;0;NSM;;;;;N;;;;; +1DA03;SIGNWRITING HEAD MOVEMENT-FLOORPLANE STRAIGHT;Mn;0;NSM;;;;;N;;;;; +1DA04;SIGNWRITING HEAD MOVEMENT-WALLPLANE CURVE;Mn;0;NSM;;;;;N;;;;; +1DA05;SIGNWRITING HEAD MOVEMENT-FLOORPLANE CURVE;Mn;0;NSM;;;;;N;;;;; +1DA06;SIGNWRITING HEAD MOVEMENT CIRCLE;Mn;0;NSM;;;;;N;;;;; +1DA07;SIGNWRITING FACE DIRECTION POSITION NOSE FORWARD TILTING;Mn;0;NSM;;;;;N;;;;; +1DA08;SIGNWRITING FACE DIRECTION POSITION NOSE UP OR DOWN;Mn;0;NSM;;;;;N;;;;; +1DA09;SIGNWRITING FACE DIRECTION POSITION NOSE UP OR DOWN TILTING;Mn;0;NSM;;;;;N;;;;; +1DA0A;SIGNWRITING EYEBROWS STRAIGHT UP;Mn;0;NSM;;;;;N;;;;; +1DA0B;SIGNWRITING EYEBROWS STRAIGHT NEUTRAL;Mn;0;NSM;;;;;N;;;;; +1DA0C;SIGNWRITING EYEBROWS STRAIGHT DOWN;Mn;0;NSM;;;;;N;;;;; +1DA0D;SIGNWRITING DREAMY EYEBROWS NEUTRAL DOWN;Mn;0;NSM;;;;;N;;;;; +1DA0E;SIGNWRITING DREAMY EYEBROWS DOWN NEUTRAL;Mn;0;NSM;;;;;N;;;;; +1DA0F;SIGNWRITING DREAMY EYEBROWS UP NEUTRAL;Mn;0;NSM;;;;;N;;;;; +1DA10;SIGNWRITING DREAMY EYEBROWS NEUTRAL UP;Mn;0;NSM;;;;;N;;;;; +1DA11;SIGNWRITING FOREHEAD NEUTRAL;Mn;0;NSM;;;;;N;;;;; +1DA12;SIGNWRITING FOREHEAD CONTACT;Mn;0;NSM;;;;;N;;;;; +1DA13;SIGNWRITING FOREHEAD WRINKLED;Mn;0;NSM;;;;;N;;;;; +1DA14;SIGNWRITING EYES OPEN;Mn;0;NSM;;;;;N;;;;; +1DA15;SIGNWRITING EYES SQUEEZED;Mn;0;NSM;;;;;N;;;;; +1DA16;SIGNWRITING EYES CLOSED;Mn;0;NSM;;;;;N;;;;; +1DA17;SIGNWRITING EYE BLINK SINGLE;Mn;0;NSM;;;;;N;;;;; +1DA18;SIGNWRITING EYE BLINK MULTIPLE;Mn;0;NSM;;;;;N;;;;; +1DA19;SIGNWRITING EYES HALF OPEN;Mn;0;NSM;;;;;N;;;;; +1DA1A;SIGNWRITING EYES WIDE OPEN;Mn;0;NSM;;;;;N;;;;; +1DA1B;SIGNWRITING EYES HALF CLOSED;Mn;0;NSM;;;;;N;;;;; +1DA1C;SIGNWRITING EYES WIDENING MOVEMENT;Mn;0;NSM;;;;;N;;;;; +1DA1D;SIGNWRITING EYE WINK;Mn;0;NSM;;;;;N;;;;; +1DA1E;SIGNWRITING EYELASHES UP;Mn;0;NSM;;;;;N;;;;; +1DA1F;SIGNWRITING EYELASHES DOWN;Mn;0;NSM;;;;;N;;;;; +1DA20;SIGNWRITING EYELASHES FLUTTERING;Mn;0;NSM;;;;;N;;;;; +1DA21;SIGNWRITING EYEGAZE-WALLPLANE STRAIGHT;Mn;0;NSM;;;;;N;;;;; +1DA22;SIGNWRITING EYEGAZE-WALLPLANE STRAIGHT DOUBLE;Mn;0;NSM;;;;;N;;;;; +1DA23;SIGNWRITING EYEGAZE-WALLPLANE STRAIGHT ALTERNATING;Mn;0;NSM;;;;;N;;;;; +1DA24;SIGNWRITING EYEGAZE-FLOORPLANE STRAIGHT;Mn;0;NSM;;;;;N;;;;; +1DA25;SIGNWRITING EYEGAZE-FLOORPLANE STRAIGHT DOUBLE;Mn;0;NSM;;;;;N;;;;; +1DA26;SIGNWRITING EYEGAZE-FLOORPLANE STRAIGHT ALTERNATING;Mn;0;NSM;;;;;N;;;;; +1DA27;SIGNWRITING EYEGAZE-WALLPLANE CURVED;Mn;0;NSM;;;;;N;;;;; +1DA28;SIGNWRITING EYEGAZE-FLOORPLANE CURVED;Mn;0;NSM;;;;;N;;;;; +1DA29;SIGNWRITING EYEGAZE-WALLPLANE CIRCLING;Mn;0;NSM;;;;;N;;;;; +1DA2A;SIGNWRITING CHEEKS PUFFED;Mn;0;NSM;;;;;N;;;;; +1DA2B;SIGNWRITING CHEEKS NEUTRAL;Mn;0;NSM;;;;;N;;;;; +1DA2C;SIGNWRITING CHEEKS SUCKED;Mn;0;NSM;;;;;N;;;;; +1DA2D;SIGNWRITING TENSE CHEEKS HIGH;Mn;0;NSM;;;;;N;;;;; +1DA2E;SIGNWRITING TENSE CHEEKS MIDDLE;Mn;0;NSM;;;;;N;;;;; +1DA2F;SIGNWRITING TENSE CHEEKS LOW;Mn;0;NSM;;;;;N;;;;; +1DA30;SIGNWRITING EARS;Mn;0;NSM;;;;;N;;;;; +1DA31;SIGNWRITING NOSE NEUTRAL;Mn;0;NSM;;;;;N;;;;; +1DA32;SIGNWRITING NOSE CONTACT;Mn;0;NSM;;;;;N;;;;; +1DA33;SIGNWRITING NOSE WRINKLES;Mn;0;NSM;;;;;N;;;;; +1DA34;SIGNWRITING NOSE WIGGLES;Mn;0;NSM;;;;;N;;;;; +1DA35;SIGNWRITING AIR BLOWING OUT;Mn;0;NSM;;;;;N;;;;; +1DA36;SIGNWRITING AIR SUCKING IN;Mn;0;NSM;;;;;N;;;;; +1DA37;SIGNWRITING AIR BLOW SMALL ROTATIONS;So;0;L;;;;;N;;;;; +1DA38;SIGNWRITING AIR SUCK SMALL ROTATIONS;So;0;L;;;;;N;;;;; +1DA39;SIGNWRITING BREATH INHALE;So;0;L;;;;;N;;;;; +1DA3A;SIGNWRITING BREATH EXHALE;So;0;L;;;;;N;;;;; +1DA3B;SIGNWRITING MOUTH CLOSED NEUTRAL;Mn;0;NSM;;;;;N;;;;; +1DA3C;SIGNWRITING MOUTH CLOSED FORWARD;Mn;0;NSM;;;;;N;;;;; +1DA3D;SIGNWRITING MOUTH CLOSED CONTACT;Mn;0;NSM;;;;;N;;;;; +1DA3E;SIGNWRITING MOUTH SMILE;Mn;0;NSM;;;;;N;;;;; +1DA3F;SIGNWRITING MOUTH SMILE WRINKLED;Mn;0;NSM;;;;;N;;;;; +1DA40;SIGNWRITING MOUTH SMILE OPEN;Mn;0;NSM;;;;;N;;;;; +1DA41;SIGNWRITING MOUTH FROWN;Mn;0;NSM;;;;;N;;;;; +1DA42;SIGNWRITING MOUTH FROWN WRINKLED;Mn;0;NSM;;;;;N;;;;; +1DA43;SIGNWRITING MOUTH FROWN OPEN;Mn;0;NSM;;;;;N;;;;; +1DA44;SIGNWRITING MOUTH OPEN CIRCLE;Mn;0;NSM;;;;;N;;;;; +1DA45;SIGNWRITING MOUTH OPEN FORWARD;Mn;0;NSM;;;;;N;;;;; +1DA46;SIGNWRITING MOUTH OPEN WRINKLED;Mn;0;NSM;;;;;N;;;;; +1DA47;SIGNWRITING MOUTH OPEN OVAL;Mn;0;NSM;;;;;N;;;;; +1DA48;SIGNWRITING MOUTH OPEN OVAL WRINKLED;Mn;0;NSM;;;;;N;;;;; +1DA49;SIGNWRITING MOUTH OPEN OVAL YAWN;Mn;0;NSM;;;;;N;;;;; +1DA4A;SIGNWRITING MOUTH OPEN RECTANGLE;Mn;0;NSM;;;;;N;;;;; +1DA4B;SIGNWRITING MOUTH OPEN RECTANGLE WRINKLED;Mn;0;NSM;;;;;N;;;;; +1DA4C;SIGNWRITING MOUTH OPEN RECTANGLE YAWN;Mn;0;NSM;;;;;N;;;;; +1DA4D;SIGNWRITING MOUTH KISS;Mn;0;NSM;;;;;N;;;;; +1DA4E;SIGNWRITING MOUTH KISS FORWARD;Mn;0;NSM;;;;;N;;;;; +1DA4F;SIGNWRITING MOUTH KISS WRINKLED;Mn;0;NSM;;;;;N;;;;; +1DA50;SIGNWRITING MOUTH TENSE;Mn;0;NSM;;;;;N;;;;; +1DA51;SIGNWRITING MOUTH TENSE FORWARD;Mn;0;NSM;;;;;N;;;;; +1DA52;SIGNWRITING MOUTH TENSE SUCKED;Mn;0;NSM;;;;;N;;;;; +1DA53;SIGNWRITING LIPS PRESSED TOGETHER;Mn;0;NSM;;;;;N;;;;; +1DA54;SIGNWRITING LIP LOWER OVER UPPER;Mn;0;NSM;;;;;N;;;;; +1DA55;SIGNWRITING LIP UPPER OVER LOWER;Mn;0;NSM;;;;;N;;;;; +1DA56;SIGNWRITING MOUTH CORNERS;Mn;0;NSM;;;;;N;;;;; +1DA57;SIGNWRITING MOUTH WRINKLES SINGLE;Mn;0;NSM;;;;;N;;;;; +1DA58;SIGNWRITING MOUTH WRINKLES DOUBLE;Mn;0;NSM;;;;;N;;;;; +1DA59;SIGNWRITING TONGUE STICKING OUT FAR;Mn;0;NSM;;;;;N;;;;; +1DA5A;SIGNWRITING TONGUE LICKING LIPS;Mn;0;NSM;;;;;N;;;;; +1DA5B;SIGNWRITING TONGUE TIP BETWEEN LIPS;Mn;0;NSM;;;;;N;;;;; +1DA5C;SIGNWRITING TONGUE TIP TOUCHING INSIDE MOUTH;Mn;0;NSM;;;;;N;;;;; +1DA5D;SIGNWRITING TONGUE INSIDE MOUTH RELAXED;Mn;0;NSM;;;;;N;;;;; +1DA5E;SIGNWRITING TONGUE MOVES AGAINST CHEEK;Mn;0;NSM;;;;;N;;;;; +1DA5F;SIGNWRITING TONGUE CENTRE STICKING OUT;Mn;0;NSM;;;;;N;;;;; +1DA60;SIGNWRITING TONGUE CENTRE INSIDE MOUTH;Mn;0;NSM;;;;;N;;;;; +1DA61;SIGNWRITING TEETH;Mn;0;NSM;;;;;N;;;;; +1DA62;SIGNWRITING TEETH MOVEMENT;Mn;0;NSM;;;;;N;;;;; +1DA63;SIGNWRITING TEETH ON TONGUE;Mn;0;NSM;;;;;N;;;;; +1DA64;SIGNWRITING TEETH ON TONGUE MOVEMENT;Mn;0;NSM;;;;;N;;;;; +1DA65;SIGNWRITING TEETH ON LIPS;Mn;0;NSM;;;;;N;;;;; +1DA66;SIGNWRITING TEETH ON LIPS MOVEMENT;Mn;0;NSM;;;;;N;;;;; +1DA67;SIGNWRITING TEETH BITE LIPS;Mn;0;NSM;;;;;N;;;;; +1DA68;SIGNWRITING MOVEMENT-WALLPLANE JAW;Mn;0;NSM;;;;;N;;;;; +1DA69;SIGNWRITING MOVEMENT-FLOORPLANE JAW;Mn;0;NSM;;;;;N;;;;; +1DA6A;SIGNWRITING NECK;Mn;0;NSM;;;;;N;;;;; +1DA6B;SIGNWRITING HAIR;Mn;0;NSM;;;;;N;;;;; +1DA6C;SIGNWRITING EXCITEMENT;Mn;0;NSM;;;;;N;;;;; +1DA6D;SIGNWRITING SHOULDER HIP SPINE;So;0;L;;;;;N;;;;; +1DA6E;SIGNWRITING SHOULDER HIP POSITIONS;So;0;L;;;;;N;;;;; +1DA6F;SIGNWRITING WALLPLANE SHOULDER HIP MOVE;So;0;L;;;;;N;;;;; +1DA70;SIGNWRITING FLOORPLANE SHOULDER HIP MOVE;So;0;L;;;;;N;;;;; +1DA71;SIGNWRITING SHOULDER TILTING FROM WAIST;So;0;L;;;;;N;;;;; +1DA72;SIGNWRITING TORSO-WALLPLANE STRAIGHT STRETCH;So;0;L;;;;;N;;;;; +1DA73;SIGNWRITING TORSO-WALLPLANE CURVED BEND;So;0;L;;;;;N;;;;; +1DA74;SIGNWRITING TORSO-FLOORPLANE TWISTING;So;0;L;;;;;N;;;;; +1DA75;SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS;Mn;0;NSM;;;;;N;;;;; +1DA76;SIGNWRITING LIMB COMBINATION;So;0;L;;;;;N;;;;; +1DA77;SIGNWRITING LIMB LENGTH-1;So;0;L;;;;;N;;;;; +1DA78;SIGNWRITING LIMB LENGTH-2;So;0;L;;;;;N;;;;; +1DA79;SIGNWRITING LIMB LENGTH-3;So;0;L;;;;;N;;;;; +1DA7A;SIGNWRITING LIMB LENGTH-4;So;0;L;;;;;N;;;;; +1DA7B;SIGNWRITING LIMB LENGTH-5;So;0;L;;;;;N;;;;; +1DA7C;SIGNWRITING LIMB LENGTH-6;So;0;L;;;;;N;;;;; +1DA7D;SIGNWRITING LIMB LENGTH-7;So;0;L;;;;;N;;;;; +1DA7E;SIGNWRITING FINGER;So;0;L;;;;;N;;;;; +1DA7F;SIGNWRITING LOCATION-WALLPLANE SPACE;So;0;L;;;;;N;;;;; +1DA80;SIGNWRITING LOCATION-FLOORPLANE SPACE;So;0;L;;;;;N;;;;; +1DA81;SIGNWRITING LOCATION HEIGHT;So;0;L;;;;;N;;;;; +1DA82;SIGNWRITING LOCATION WIDTH;So;0;L;;;;;N;;;;; +1DA83;SIGNWRITING LOCATION DEPTH;So;0;L;;;;;N;;;;; +1DA84;SIGNWRITING LOCATION HEAD NECK;Mn;0;NSM;;;;;N;;;;; +1DA85;SIGNWRITING LOCATION TORSO;So;0;L;;;;;N;;;;; +1DA86;SIGNWRITING LOCATION LIMBS DIGITS;So;0;L;;;;;N;;;;; +1DA87;SIGNWRITING COMMA;Po;0;L;;;;;N;;;;; +1DA88;SIGNWRITING FULL STOP;Po;0;L;;;;;N;;;;; +1DA89;SIGNWRITING SEMICOLON;Po;0;L;;;;;N;;;;; +1DA8A;SIGNWRITING COLON;Po;0;L;;;;;N;;;;; +1DA8B;SIGNWRITING PARENTHESIS;Po;0;L;;;;;N;;;;; +1DA9B;SIGNWRITING FILL MODIFIER-2;Mn;0;NSM;;;;;N;;;;; +1DA9C;SIGNWRITING FILL MODIFIER-3;Mn;0;NSM;;;;;N;;;;; +1DA9D;SIGNWRITING FILL MODIFIER-4;Mn;0;NSM;;;;;N;;;;; +1DA9E;SIGNWRITING FILL MODIFIER-5;Mn;0;NSM;;;;;N;;;;; +1DA9F;SIGNWRITING FILL MODIFIER-6;Mn;0;NSM;;;;;N;;;;; +1DAA1;SIGNWRITING ROTATION MODIFIER-2;Mn;0;NSM;;;;;N;;;;; +1DAA2;SIGNWRITING ROTATION MODIFIER-3;Mn;0;NSM;;;;;N;;;;; +1DAA3;SIGNWRITING ROTATION MODIFIER-4;Mn;0;NSM;;;;;N;;;;; +1DAA4;SIGNWRITING ROTATION MODIFIER-5;Mn;0;NSM;;;;;N;;;;; +1DAA5;SIGNWRITING ROTATION MODIFIER-6;Mn;0;NSM;;;;;N;;;;; +1DAA6;SIGNWRITING ROTATION MODIFIER-7;Mn;0;NSM;;;;;N;;;;; +1DAA7;SIGNWRITING ROTATION MODIFIER-8;Mn;0;NSM;;;;;N;;;;; +1DAA8;SIGNWRITING ROTATION MODIFIER-9;Mn;0;NSM;;;;;N;;;;; +1DAA9;SIGNWRITING ROTATION MODIFIER-10;Mn;0;NSM;;;;;N;;;;; +1DAAA;SIGNWRITING ROTATION MODIFIER-11;Mn;0;NSM;;;;;N;;;;; +1DAAB;SIGNWRITING ROTATION MODIFIER-12;Mn;0;NSM;;;;;N;;;;; +1DAAC;SIGNWRITING ROTATION MODIFIER-13;Mn;0;NSM;;;;;N;;;;; +1DAAD;SIGNWRITING ROTATION MODIFIER-14;Mn;0;NSM;;;;;N;;;;; +1DAAE;SIGNWRITING ROTATION MODIFIER-15;Mn;0;NSM;;;;;N;;;;; +1DAAF;SIGNWRITING ROTATION MODIFIER-16;Mn;0;NSM;;;;;N;;;;; +1E000;COMBINING GLAGOLITIC LETTER AZU;Mn;230;NSM;;;;;N;;;;; +1E001;COMBINING GLAGOLITIC LETTER BUKY;Mn;230;NSM;;;;;N;;;;; +1E002;COMBINING GLAGOLITIC LETTER VEDE;Mn;230;NSM;;;;;N;;;;; +1E003;COMBINING GLAGOLITIC LETTER GLAGOLI;Mn;230;NSM;;;;;N;;;;; +1E004;COMBINING GLAGOLITIC LETTER DOBRO;Mn;230;NSM;;;;;N;;;;; +1E005;COMBINING GLAGOLITIC LETTER YESTU;Mn;230;NSM;;;;;N;;;;; +1E006;COMBINING GLAGOLITIC LETTER ZHIVETE;Mn;230;NSM;;;;;N;;;;; +1E008;COMBINING GLAGOLITIC LETTER ZEMLJA;Mn;230;NSM;;;;;N;;;;; +1E009;COMBINING GLAGOLITIC LETTER IZHE;Mn;230;NSM;;;;;N;;;;; +1E00A;COMBINING GLAGOLITIC LETTER INITIAL IZHE;Mn;230;NSM;;;;;N;;;;; +1E00B;COMBINING GLAGOLITIC LETTER I;Mn;230;NSM;;;;;N;;;;; +1E00C;COMBINING GLAGOLITIC LETTER DJERVI;Mn;230;NSM;;;;;N;;;;; +1E00D;COMBINING GLAGOLITIC LETTER KAKO;Mn;230;NSM;;;;;N;;;;; +1E00E;COMBINING GLAGOLITIC LETTER LJUDIJE;Mn;230;NSM;;;;;N;;;;; +1E00F;COMBINING GLAGOLITIC LETTER MYSLITE;Mn;230;NSM;;;;;N;;;;; +1E010;COMBINING GLAGOLITIC LETTER NASHI;Mn;230;NSM;;;;;N;;;;; +1E011;COMBINING GLAGOLITIC LETTER ONU;Mn;230;NSM;;;;;N;;;;; +1E012;COMBINING GLAGOLITIC LETTER POKOJI;Mn;230;NSM;;;;;N;;;;; +1E013;COMBINING GLAGOLITIC LETTER RITSI;Mn;230;NSM;;;;;N;;;;; +1E014;COMBINING GLAGOLITIC LETTER SLOVO;Mn;230;NSM;;;;;N;;;;; +1E015;COMBINING GLAGOLITIC LETTER TVRIDO;Mn;230;NSM;;;;;N;;;;; +1E016;COMBINING GLAGOLITIC LETTER UKU;Mn;230;NSM;;;;;N;;;;; +1E017;COMBINING GLAGOLITIC LETTER FRITU;Mn;230;NSM;;;;;N;;;;; +1E018;COMBINING GLAGOLITIC LETTER HERU;Mn;230;NSM;;;;;N;;;;; +1E01B;COMBINING GLAGOLITIC LETTER SHTA;Mn;230;NSM;;;;;N;;;;; +1E01C;COMBINING GLAGOLITIC LETTER TSI;Mn;230;NSM;;;;;N;;;;; +1E01D;COMBINING GLAGOLITIC LETTER CHRIVI;Mn;230;NSM;;;;;N;;;;; +1E01E;COMBINING GLAGOLITIC LETTER SHA;Mn;230;NSM;;;;;N;;;;; +1E01F;COMBINING GLAGOLITIC LETTER YERU;Mn;230;NSM;;;;;N;;;;; +1E020;COMBINING GLAGOLITIC LETTER YERI;Mn;230;NSM;;;;;N;;;;; +1E021;COMBINING GLAGOLITIC LETTER YATI;Mn;230;NSM;;;;;N;;;;; +1E023;COMBINING GLAGOLITIC LETTER YU;Mn;230;NSM;;;;;N;;;;; +1E024;COMBINING GLAGOLITIC LETTER SMALL YUS;Mn;230;NSM;;;;;N;;;;; +1E026;COMBINING GLAGOLITIC LETTER YO;Mn;230;NSM;;;;;N;;;;; +1E027;COMBINING GLAGOLITIC LETTER IOTATED SMALL YUS;Mn;230;NSM;;;;;N;;;;; +1E028;COMBINING GLAGOLITIC LETTER BIG YUS;Mn;230;NSM;;;;;N;;;;; +1E029;COMBINING GLAGOLITIC LETTER IOTATED BIG YUS;Mn;230;NSM;;;;;N;;;;; +1E02A;COMBINING GLAGOLITIC LETTER FITA;Mn;230;NSM;;;;;N;;;;; 1E800;MENDE KIKAKUI SYLLABLE M001 KI;Lo;0;R;;;;;N;;;;; 1E801;MENDE KIKAKUI SYLLABLE M002 KA;Lo;0;R;;;;;N;;;;; 1E802;MENDE KIKAKUI SYLLABLE M003 KU;Lo;0;R;;;;;N;;;;; @@ -24464,6 +27564,93 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1E8D4;MENDE KIKAKUI COMBINING NUMBER TEN THOUSANDS;Mn;220;NSM;;;;;N;;;;; 1E8D5;MENDE KIKAKUI COMBINING NUMBER HUNDRED THOUSANDS;Mn;220;NSM;;;;;N;;;;; 1E8D6;MENDE KIKAKUI COMBINING NUMBER MILLIONS;Mn;220;NSM;;;;;N;;;;; +1E900;ADLAM CAPITAL LETTER ALIF;Lu;0;R;;;;;N;;;;1E922; +1E901;ADLAM CAPITAL LETTER DAALI;Lu;0;R;;;;;N;;;;1E923; +1E902;ADLAM CAPITAL LETTER LAAM;Lu;0;R;;;;;N;;;;1E924; +1E903;ADLAM CAPITAL LETTER MIIM;Lu;0;R;;;;;N;;;;1E925; +1E904;ADLAM CAPITAL LETTER BA;Lu;0;R;;;;;N;;;;1E926; +1E905;ADLAM CAPITAL LETTER SINNYIIYHE;Lu;0;R;;;;;N;;;;1E927; +1E906;ADLAM CAPITAL LETTER PE;Lu;0;R;;;;;N;;;;1E928; +1E907;ADLAM CAPITAL LETTER BHE;Lu;0;R;;;;;N;;;;1E929; +1E908;ADLAM CAPITAL LETTER RA;Lu;0;R;;;;;N;;;;1E92A; +1E909;ADLAM CAPITAL LETTER E;Lu;0;R;;;;;N;;;;1E92B; +1E90A;ADLAM CAPITAL LETTER FA;Lu;0;R;;;;;N;;;;1E92C; +1E90B;ADLAM CAPITAL LETTER I;Lu;0;R;;;;;N;;;;1E92D; +1E90C;ADLAM CAPITAL LETTER O;Lu;0;R;;;;;N;;;;1E92E; +1E90D;ADLAM CAPITAL LETTER DHA;Lu;0;R;;;;;N;;;;1E92F; +1E90E;ADLAM CAPITAL LETTER YHE;Lu;0;R;;;;;N;;;;1E930; +1E90F;ADLAM CAPITAL LETTER WAW;Lu;0;R;;;;;N;;;;1E931; +1E910;ADLAM CAPITAL LETTER NUN;Lu;0;R;;;;;N;;;;1E932; +1E911;ADLAM CAPITAL LETTER KAF;Lu;0;R;;;;;N;;;;1E933; +1E912;ADLAM CAPITAL LETTER YA;Lu;0;R;;;;;N;;;;1E934; +1E913;ADLAM CAPITAL LETTER U;Lu;0;R;;;;;N;;;;1E935; +1E914;ADLAM CAPITAL LETTER JIIM;Lu;0;R;;;;;N;;;;1E936; +1E915;ADLAM CAPITAL LETTER CHI;Lu;0;R;;;;;N;;;;1E937; +1E916;ADLAM CAPITAL LETTER HA;Lu;0;R;;;;;N;;;;1E938; +1E917;ADLAM CAPITAL LETTER QAAF;Lu;0;R;;;;;N;;;;1E939; +1E918;ADLAM CAPITAL LETTER GA;Lu;0;R;;;;;N;;;;1E93A; +1E919;ADLAM CAPITAL LETTER NYA;Lu;0;R;;;;;N;;;;1E93B; +1E91A;ADLAM CAPITAL LETTER TU;Lu;0;R;;;;;N;;;;1E93C; +1E91B;ADLAM CAPITAL LETTER NHA;Lu;0;R;;;;;N;;;;1E93D; +1E91C;ADLAM CAPITAL LETTER VA;Lu;0;R;;;;;N;;;;1E93E; +1E91D;ADLAM CAPITAL LETTER KHA;Lu;0;R;;;;;N;;;;1E93F; +1E91E;ADLAM CAPITAL LETTER GBE;Lu;0;R;;;;;N;;;;1E940; +1E91F;ADLAM CAPITAL LETTER ZAL;Lu;0;R;;;;;N;;;;1E941; +1E920;ADLAM CAPITAL LETTER KPO;Lu;0;R;;;;;N;;;;1E942; +1E921;ADLAM CAPITAL LETTER SHA;Lu;0;R;;;;;N;;;;1E943; +1E922;ADLAM SMALL LETTER ALIF;Ll;0;R;;;;;N;;;1E900;;1E900 +1E923;ADLAM SMALL LETTER DAALI;Ll;0;R;;;;;N;;;1E901;;1E901 +1E924;ADLAM SMALL LETTER LAAM;Ll;0;R;;;;;N;;;1E902;;1E902 +1E925;ADLAM SMALL LETTER MIIM;Ll;0;R;;;;;N;;;1E903;;1E903 +1E926;ADLAM SMALL LETTER BA;Ll;0;R;;;;;N;;;1E904;;1E904 +1E927;ADLAM SMALL LETTER SINNYIIYHE;Ll;0;R;;;;;N;;;1E905;;1E905 +1E928;ADLAM SMALL LETTER PE;Ll;0;R;;;;;N;;;1E906;;1E906 +1E929;ADLAM SMALL LETTER BHE;Ll;0;R;;;;;N;;;1E907;;1E907 +1E92A;ADLAM SMALL LETTER RA;Ll;0;R;;;;;N;;;1E908;;1E908 +1E92B;ADLAM SMALL LETTER E;Ll;0;R;;;;;N;;;1E909;;1E909 +1E92C;ADLAM SMALL LETTER FA;Ll;0;R;;;;;N;;;1E90A;;1E90A +1E92D;ADLAM SMALL LETTER I;Ll;0;R;;;;;N;;;1E90B;;1E90B +1E92E;ADLAM SMALL LETTER O;Ll;0;R;;;;;N;;;1E90C;;1E90C +1E92F;ADLAM SMALL LETTER DHA;Ll;0;R;;;;;N;;;1E90D;;1E90D +1E930;ADLAM SMALL LETTER YHE;Ll;0;R;;;;;N;;;1E90E;;1E90E +1E931;ADLAM SMALL LETTER WAW;Ll;0;R;;;;;N;;;1E90F;;1E90F +1E932;ADLAM SMALL LETTER NUN;Ll;0;R;;;;;N;;;1E910;;1E910 +1E933;ADLAM SMALL LETTER KAF;Ll;0;R;;;;;N;;;1E911;;1E911 +1E934;ADLAM SMALL LETTER YA;Ll;0;R;;;;;N;;;1E912;;1E912 +1E935;ADLAM SMALL LETTER U;Ll;0;R;;;;;N;;;1E913;;1E913 +1E936;ADLAM SMALL LETTER JIIM;Ll;0;R;;;;;N;;;1E914;;1E914 +1E937;ADLAM SMALL LETTER CHI;Ll;0;R;;;;;N;;;1E915;;1E915 +1E938;ADLAM SMALL LETTER HA;Ll;0;R;;;;;N;;;1E916;;1E916 +1E939;ADLAM SMALL LETTER QAAF;Ll;0;R;;;;;N;;;1E917;;1E917 +1E93A;ADLAM SMALL LETTER GA;Ll;0;R;;;;;N;;;1E918;;1E918 +1E93B;ADLAM SMALL LETTER NYA;Ll;0;R;;;;;N;;;1E919;;1E919 +1E93C;ADLAM SMALL LETTER TU;Ll;0;R;;;;;N;;;1E91A;;1E91A +1E93D;ADLAM SMALL LETTER NHA;Ll;0;R;;;;;N;;;1E91B;;1E91B +1E93E;ADLAM SMALL LETTER VA;Ll;0;R;;;;;N;;;1E91C;;1E91C +1E93F;ADLAM SMALL LETTER KHA;Ll;0;R;;;;;N;;;1E91D;;1E91D +1E940;ADLAM SMALL LETTER GBE;Ll;0;R;;;;;N;;;1E91E;;1E91E +1E941;ADLAM SMALL LETTER ZAL;Ll;0;R;;;;;N;;;1E91F;;1E91F +1E942;ADLAM SMALL LETTER KPO;Ll;0;R;;;;;N;;;1E920;;1E920 +1E943;ADLAM SMALL LETTER SHA;Ll;0;R;;;;;N;;;1E921;;1E921 +1E944;ADLAM ALIF LENGTHENER;Mn;230;NSM;;;;;N;;;;; +1E945;ADLAM VOWEL LENGTHENER;Mn;230;NSM;;;;;N;;;;; +1E946;ADLAM GEMINATION MARK;Mn;230;NSM;;;;;N;;;;; +1E947;ADLAM HAMZA;Mn;230;NSM;;;;;N;;;;; +1E948;ADLAM CONSONANT MODIFIER;Mn;230;NSM;;;;;N;;;;; +1E949;ADLAM GEMINATE CONSONANT MODIFIER;Mn;230;NSM;;;;;N;;;;; +1E94A;ADLAM NUKTA;Mn;7;NSM;;;;;N;;;;; +1E950;ADLAM DIGIT ZERO;Nd;0;R;;0;0;0;N;;;;; +1E951;ADLAM DIGIT ONE;Nd;0;R;;1;1;1;N;;;;; +1E952;ADLAM DIGIT TWO;Nd;0;R;;2;2;2;N;;;;; +1E953;ADLAM DIGIT THREE;Nd;0;R;;3;3;3;N;;;;; +1E954;ADLAM DIGIT FOUR;Nd;0;R;;4;4;4;N;;;;; +1E955;ADLAM DIGIT FIVE;Nd;0;R;;5;5;5;N;;;;; +1E956;ADLAM DIGIT SIX;Nd;0;R;;6;6;6;N;;;;; +1E957;ADLAM DIGIT SEVEN;Nd;0;R;;7;7;7;N;;;;; +1E958;ADLAM DIGIT EIGHT;Nd;0;R;;8;8;8;N;;;;; +1E959;ADLAM DIGIT NINE;Nd;0;R;;9;9;9;N;;;;; +1E95E;ADLAM INITIAL EXCLAMATION MARK;Po;0;R;;;;;N;;;;; +1E95F;ADLAM INITIAL QUESTION MARK;Po;0;R;;;;;N;;;;; 1EE00;ARABIC MATHEMATICAL ALEF;Lo;0;AL; 0627;;;;N;;;;; 1EE01;ARABIC MATHEMATICAL BEH;Lo;0;AL; 0628;;;;N;;;;; 1EE02;ARABIC MATHEMATICAL JEEM;Lo;0;AL; 062C;;;;N;;;;; @@ -24980,6 +28167,24 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F198;SQUARED SOS;So;0;L;;;;;N;;;;; 1F199;SQUARED UP WITH EXCLAMATION MARK;So;0;L;;;;;N;;;;; 1F19A;SQUARED VS;So;0;L;;;;;N;;;;; +1F19B;SQUARED THREE D;So;0;L;;;;;N;;;;; +1F19C;SQUARED SECOND SCREEN;So;0;L;;;;;N;;;;; +1F19D;SQUARED TWO K;So;0;L;;;;;N;;;;; +1F19E;SQUARED FOUR K;So;0;L;;;;;N;;;;; +1F19F;SQUARED EIGHT K;So;0;L;;;;;N;;;;; +1F1A0;SQUARED FIVE POINT ONE;So;0;L;;;;;N;;;;; +1F1A1;SQUARED SEVEN POINT ONE;So;0;L;;;;;N;;;;; +1F1A2;SQUARED TWENTY-TWO POINT TWO;So;0;L;;;;;N;;;;; +1F1A3;SQUARED SIXTY P;So;0;L;;;;;N;;;;; +1F1A4;SQUARED ONE HUNDRED TWENTY P;So;0;L;;;;;N;;;;; +1F1A5;SQUARED LATIN SMALL LETTER D;So;0;L;;;;;N;;;;; +1F1A6;SQUARED HC;So;0;L;;;;;N;;;;; +1F1A7;SQUARED HDR;So;0;L;;;;;N;;;;; +1F1A8;SQUARED HI-RES;So;0;L;;;;;N;;;;; +1F1A9;SQUARED LOSSLESS;So;0;L;;;;;N;;;;; +1F1AA;SQUARED SHV;So;0;L;;;;;N;;;;; +1F1AB;SQUARED UHD;So;0;L;;;;;N;;;;; +1F1AC;SQUARED VOD;So;0;L;;;;;N;;;;; 1F1E6;REGIONAL INDICATOR SYMBOL LETTER A;So;0;L;;;;;N;;;;; 1F1E7;REGIONAL INDICATOR SYMBOL LETTER B;So;0;L;;;;;N;;;;; 1F1E8;REGIONAL INDICATOR SYMBOL LETTER C;So;0;L;;;;;N;;;;; @@ -25052,6 +28257,7 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F238;SQUARED CJK UNIFIED IDEOGRAPH-7533;So;0;L; 7533;;;;N;;;;; 1F239;SQUARED CJK UNIFIED IDEOGRAPH-5272;So;0;L; 5272;;;;N;;;;; 1F23A;SQUARED CJK UNIFIED IDEOGRAPH-55B6;So;0;L; 55B6;;;;N;;;;; +1F23B;SQUARED CJK UNIFIED IDEOGRAPH-914D;So;0;L; 914D;;;;N;;;;; 1F240;TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C;So;0;L; 3014 672C 3015;;;;N;;;;; 1F241;TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-4E09;So;0;L; 3014 4E09 3015;;;;N;;;;; 1F242;TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-4E8C;So;0;L; 3014 4E8C 3015;;;;N;;;;; @@ -25108,6 +28314,9 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F32A;CLOUD WITH TORNADO;So;0;ON;;;;;N;;;;; 1F32B;FOG;So;0;ON;;;;;N;;;;; 1F32C;WIND BLOWING FACE;So;0;ON;;;;;N;;;;; +1F32D;HOT DOG;So;0;ON;;;;;N;;;;; +1F32E;TACO;So;0;ON;;;;;N;;;;; +1F32F;BURRITO;So;0;ON;;;;;N;;;;; 1F330;CHESTNUT;So;0;ON;;;;;N;;;;; 1F331;SEEDLING;So;0;ON;;;;;N;;;;; 1F332;EVERGREEN TREE;So;0;ON;;;;;N;;;;; @@ -25186,6 +28395,8 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F37B;CLINKING BEER MUGS;So;0;ON;;;;;N;;;;; 1F37C;BABY BOTTLE;So;0;ON;;;;;N;;;;; 1F37D;FORK AND KNIFE WITH PLATE;So;0;ON;;;;;N;;;;; +1F37E;BOTTLE WITH POPPING CORK;So;0;ON;;;;;N;;;;; +1F37F;POPCORN;So;0;ON;;;;;N;;;;; 1F380;RIBBON;So;0;ON;;;;;N;;;;; 1F381;WRAPPED PRESENT;So;0;ON;;;;;N;;;;; 1F382;BIRTHDAY CAKE;So;0;ON;;;;;N;;;;; @@ -25265,6 +28476,11 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F3CC;GOLFER;So;0;ON;;;;;N;;;;; 1F3CD;RACING MOTORCYCLE;So;0;ON;;;;;N;;;;; 1F3CE;RACING CAR;So;0;ON;;;;;N;;;;; +1F3CF;CRICKET BAT AND BALL;So;0;ON;;;;;N;;;;; +1F3D0;VOLLEYBALL;So;0;ON;;;;;N;;;;; +1F3D1;FIELD HOCKEY STICK AND BALL;So;0;ON;;;;;N;;;;; +1F3D2;ICE HOCKEY STICK AND PUCK;So;0;ON;;;;;N;;;;; +1F3D3;TABLE TENNIS PADDLE AND BALL;So;0;ON;;;;;N;;;;; 1F3D4;SNOW CAPPED MOUNTAIN;So;0;ON;;;;;N;;;;; 1F3D5;CAMPING;So;0;ON;;;;;N;;;;; 1F3D6;BEACH WITH UMBRELLA;So;0;ON;;;;;N;;;;; @@ -25301,6 +28517,14 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F3F5;ROSETTE;So;0;ON;;;;;N;;;;; 1F3F6;BLACK ROSETTE;So;0;ON;;;;;N;;;;; 1F3F7;LABEL;So;0;ON;;;;;N;;;;; +1F3F8;BADMINTON RACQUET AND SHUTTLECOCK;So;0;ON;;;;;N;;;;; +1F3F9;BOW AND ARROW;So;0;ON;;;;;N;;;;; +1F3FA;AMPHORA;So;0;ON;;;;;N;;;;; +1F3FB;EMOJI MODIFIER FITZPATRICK TYPE-1-2;Sk;0;ON;;;;;N;;;;; +1F3FC;EMOJI MODIFIER FITZPATRICK TYPE-3;Sk;0;ON;;;;;N;;;;; +1F3FD;EMOJI MODIFIER FITZPATRICK TYPE-4;Sk;0;ON;;;;;N;;;;; +1F3FE;EMOJI MODIFIER FITZPATRICK TYPE-5;Sk;0;ON;;;;;N;;;;; +1F3FF;EMOJI MODIFIER FITZPATRICK TYPE-6;Sk;0;ON;;;;;N;;;;; 1F400;RAT;So;0;ON;;;;;N;;;;; 1F401;MOUSE;So;0;ON;;;;;N;;;;; 1F402;OX;So;0;ON;;;;;N;;;;; @@ -25556,6 +28780,7 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F4FC;VIDEOCASSETTE;So;0;ON;;;;;N;;;;; 1F4FD;FILM PROJECTOR;So;0;ON;;;;;N;;;;; 1F4FE;PORTABLE STEREO;So;0;ON;;;;;N;;;;; +1F4FF;PRAYER BEADS;So;0;ON;;;;;N;;;;; 1F500;TWISTED RIGHTWARDS ARROWS;So;0;ON;;;;;N;;;;; 1F501;CLOCKWISE RIGHTWARDS AND LEFTWARDS OPEN CIRCLE ARROWS;So;0;ON;;;;;N;;;;; 1F502;CLOCKWISE RIGHTWARDS AND LEFTWARDS OPEN CIRCLE ARROWS WITH CIRCLED ONE OVERLAY;So;0;ON;;;;;N;;;;; @@ -25631,6 +28856,11 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F548;CELTIC CROSS;So;0;ON;;;;;N;;;;; 1F549;OM SYMBOL;So;0;ON;;;;;N;;;;; 1F54A;DOVE OF PEACE;So;0;ON;;;;;N;;;;; +1F54B;KAABA;So;0;ON;;;;;N;;;;; +1F54C;MOSQUE;So;0;ON;;;;;N;;;;; +1F54D;SYNAGOGUE;So;0;ON;;;;;N;;;;; +1F54E;MENORAH WITH NINE BRANCHES;So;0;ON;;;;;N;;;;; +1F54F;BOWL OF HYGIEIA;So;0;ON;;;;;N;;;;; 1F550;CLOCK FACE ONE OCLOCK;So;0;ON;;;;;N;;;;; 1F551;CLOCK FACE TWO OCLOCK;So;0;ON;;;;;N;;;;; 1F552;CLOCK FACE THREE OCLOCK;So;0;ON;;;;;N;;;;; @@ -25673,6 +28903,7 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F577;SPIDER;So;0;ON;;;;;N;;;;; 1F578;SPIDER WEB;So;0;ON;;;;;N;;;;; 1F579;JOYSTICK;So;0;ON;;;;;N;;;;; +1F57A;MAN DANCING;So;0;ON;;;;;N;;;;; 1F57B;LEFT HAND TELEPHONE RECEIVER;So;0;ON;;;;;N;;;;; 1F57C;TELEPHONE RECEIVER WITH PAGE;So;0;ON;;;;;N;;;;; 1F57D;RIGHT HAND TELEPHONE RECEIVER;So;0;ON;;;;;N;;;;; @@ -25714,6 +28945,7 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F5A1;SIDEWAYS BLACK DOWN POINTING INDEX;So;0;ON;;;;;N;;;;; 1F5A2;BLACK UP POINTING BACKHAND INDEX;So;0;ON;;;;;N;;;;; 1F5A3;BLACK DOWN POINTING BACKHAND INDEX;So;0;ON;;;;;N;;;;; +1F5A4;BLACK HEART;So;0;ON;;;;;N;;;;; 1F5A5;DESKTOP COMPUTER;So;0;ON;;;;;N;;;;; 1F5A6;KEYBOARD AND MOUSE;So;0;ON;;;;;N;;;;; 1F5A7;THREE NETWORKED COMPUTERS;So;0;ON;;;;;N;;;;; @@ -25872,6 +29104,8 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F640;WEARY CAT FACE;So;0;ON;;;;;N;;;;; 1F641;SLIGHTLY FROWNING FACE;So;0;ON;;;;;N;;;;; 1F642;SLIGHTLY SMILING FACE;So;0;ON;;;;;N;;;;; +1F643;UPSIDE-DOWN FACE;So;0;ON;;;;;N;;;;; +1F644;FACE WITH ROLLING EYES;So;0;ON;;;;;N;;;;; 1F645;FACE WITH NO GOOD GESTURE;So;0;ON;;;;;N;;;;; 1F646;FACE WITH OK GESTURE;So;0;ON;;;;;N;;;;; 1F647;PERSON BOWING DEEPLY;So;0;ON;;;;;N;;;;; @@ -26011,6 +29245,9 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F6CD;SHOPPING BAGS;So;0;ON;;;;;N;;;;; 1F6CE;BELLHOP BELL;So;0;ON;;;;;N;;;;; 1F6CF;BED;So;0;ON;;;;;N;;;;; +1F6D0;PLACE OF WORSHIP;So;0;ON;;;;;N;;;;; +1F6D1;OCTAGONAL SIGN;So;0;ON;;;;;N;;;;; +1F6D2;SHOPPING TROLLEY;So;0;ON;;;;;N;;;;; 1F6E0;HAMMER AND WRENCH;So;0;ON;;;;;N;;;;; 1F6E1;SHIELD;So;0;ON;;;;;N;;;;; 1F6E2;OIL DRUM;So;0;ON;;;;;N;;;;; @@ -26028,6 +29265,9 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F6F1;ONCOMING FIRE ENGINE;So;0;ON;;;;;N;;;;; 1F6F2;DIESEL LOCOMOTIVE;So;0;ON;;;;;N;;;;; 1F6F3;PASSENGER SHIP;So;0;ON;;;;;N;;;;; +1F6F4;SCOOTER;So;0;ON;;;;;N;;;;; +1F6F5;MOTOR SCOOTER;So;0;ON;;;;;N;;;;; +1F6F6;CANOE;So;0;ON;;;;;N;;;;; 1F700;ALCHEMICAL SYMBOL FOR QUINTESSENCE;So;0;ON;;;;;N;;;;; 1F701;ALCHEMICAL SYMBOL FOR AIR;So;0;ON;;;;;N;;;;; 1F702;ALCHEMICAL SYMBOL FOR FIRE;So;0;ON;;;;;N;;;;; @@ -26377,12 +29617,96 @@ FFFD;REPLACEMENT CHARACTER;So;0;ON;;;;;N;;;;; 1F8AB;RIGHTWARDS FRONT-TILTED SHADOWED WHITE ARROW;So;0;ON;;;;;N;;;;; 1F8AC;WHITE ARROW SHAFT WIDTH ONE;So;0;ON;;;;;N;;;;; 1F8AD;WHITE ARROW SHAFT WIDTH TWO THIRDS;So;0;ON;;;;;N;;;;; +1F910;ZIPPER-MOUTH FACE;So;0;ON;;;;;N;;;;; +1F911;MONEY-MOUTH FACE;So;0;ON;;;;;N;;;;; +1F912;FACE WITH THERMOMETER;So;0;ON;;;;;N;;;;; +1F913;NERD FACE;So;0;ON;;;;;N;;;;; +1F914;THINKING FACE;So;0;ON;;;;;N;;;;; +1F915;FACE WITH HEAD-BANDAGE;So;0;ON;;;;;N;;;;; +1F916;ROBOT FACE;So;0;ON;;;;;N;;;;; +1F917;HUGGING FACE;So;0;ON;;;;;N;;;;; +1F918;SIGN OF THE HORNS;So;0;ON;;;;;N;;;;; +1F919;CALL ME HAND;So;0;ON;;;;;N;;;;; +1F91A;RAISED BACK OF HAND;So;0;ON;;;;;N;;;;; +1F91B;LEFT-FACING FIST;So;0;ON;;;;;N;;;;; +1F91C;RIGHT-FACING FIST;So;0;ON;;;;;N;;;;; +1F91D;HANDSHAKE;So;0;ON;;;;;N;;;;; +1F91E;HAND WITH INDEX AND MIDDLE FINGERS CROSSED;So;0;ON;;;;;N;;;;; +1F920;FACE WITH COWBOY HAT;So;0;ON;;;;;N;;;;; +1F921;CLOWN FACE;So;0;ON;;;;;N;;;;; +1F922;NAUSEATED FACE;So;0;ON;;;;;N;;;;; +1F923;ROLLING ON THE FLOOR LAUGHING;So;0;ON;;;;;N;;;;; +1F924;DROOLING FACE;So;0;ON;;;;;N;;;;; +1F925;LYING FACE;So;0;ON;;;;;N;;;;; +1F926;FACE PALM;So;0;ON;;;;;N;;;;; +1F927;SNEEZING FACE;So;0;ON;;;;;N;;;;; +1F930;PREGNANT WOMAN;So;0;ON;;;;;N;;;;; +1F933;SELFIE;So;0;ON;;;;;N;;;;; +1F934;PRINCE;So;0;ON;;;;;N;;;;; +1F935;MAN IN TUXEDO;So;0;ON;;;;;N;;;;; +1F936;MOTHER CHRISTMAS;So;0;ON;;;;;N;;;;; +1F937;SHRUG;So;0;ON;;;;;N;;;;; +1F938;PERSON DOING CARTWHEEL;So;0;ON;;;;;N;;;;; +1F939;JUGGLING;So;0;ON;;;;;N;;;;; +1F93A;FENCER;So;0;ON;;;;;N;;;;; +1F93B;MODERN PENTATHLON;So;0;ON;;;;;N;;;;; +1F93C;WRESTLERS;So;0;ON;;;;;N;;;;; +1F93D;WATER POLO;So;0;ON;;;;;N;;;;; +1F93E;HANDBALL;So;0;ON;;;;;N;;;;; +1F940;WILTED FLOWER;So;0;ON;;;;;N;;;;; +1F941;DRUM WITH DRUMSTICKS;So;0;ON;;;;;N;;;;; +1F942;CLINKING GLASSES;So;0;ON;;;;;N;;;;; +1F943;TUMBLER GLASS;So;0;ON;;;;;N;;;;; +1F944;SPOON;So;0;ON;;;;;N;;;;; +1F945;GOAL NET;So;0;ON;;;;;N;;;;; +1F946;RIFLE;So;0;ON;;;;;N;;;;; +1F947;FIRST PLACE MEDAL;So;0;ON;;;;;N;;;;; +1F948;SECOND PLACE MEDAL;So;0;ON;;;;;N;;;;; +1F949;THIRD PLACE MEDAL;So;0;ON;;;;;N;;;;; +1F94A;BOXING GLOVE;So;0;ON;;;;;N;;;;; +1F94B;MARTIAL ARTS UNIFORM;So;0;ON;;;;;N;;;;; +1F950;CROISSANT;So;0;ON;;;;;N;;;;; +1F951;AVOCADO;So;0;ON;;;;;N;;;;; +1F952;CUCUMBER;So;0;ON;;;;;N;;;;; +1F953;BACON;So;0;ON;;;;;N;;;;; +1F954;POTATO;So;0;ON;;;;;N;;;;; +1F955;CARROT;So;0;ON;;;;;N;;;;; +1F956;BAGUETTE BREAD;So;0;ON;;;;;N;;;;; +1F957;GREEN SALAD;So;0;ON;;;;;N;;;;; +1F958;SHALLOW PAN OF FOOD;So;0;ON;;;;;N;;;;; +1F959;STUFFED FLATBREAD;So;0;ON;;;;;N;;;;; +1F95A;EGG;So;0;ON;;;;;N;;;;; +1F95B;GLASS OF MILK;So;0;ON;;;;;N;;;;; +1F95C;PEANUTS;So;0;ON;;;;;N;;;;; +1F95D;KIWIFRUIT;So;0;ON;;;;;N;;;;; +1F95E;PANCAKES;So;0;ON;;;;;N;;;;; +1F980;CRAB;So;0;ON;;;;;N;;;;; +1F981;LION FACE;So;0;ON;;;;;N;;;;; +1F982;SCORPION;So;0;ON;;;;;N;;;;; +1F983;TURKEY;So;0;ON;;;;;N;;;;; +1F984;UNICORN FACE;So;0;ON;;;;;N;;;;; +1F985;EAGLE;So;0;ON;;;;;N;;;;; +1F986;DUCK;So;0;ON;;;;;N;;;;; +1F987;BAT;So;0;ON;;;;;N;;;;; +1F988;SHARK;So;0;ON;;;;;N;;;;; +1F989;OWL;So;0;ON;;;;;N;;;;; +1F98A;FOX FACE;So;0;ON;;;;;N;;;;; +1F98B;BUTTERFLY;So;0;ON;;;;;N;;;;; +1F98C;DEER;So;0;ON;;;;;N;;;;; +1F98D;GORILLA;So;0;ON;;;;;N;;;;; +1F98E;LIZARD;So;0;ON;;;;;N;;;;; +1F98F;RHINOCEROS;So;0;ON;;;;;N;;;;; +1F990;SHRIMP;So;0;ON;;;;;N;;;;; +1F991;SQUID;So;0;ON;;;;;N;;;;; +1F9C0;CHEESE WEDGE;So;0;ON;;;;;N;;;;; 20000;;Lo;0;L;;;;;N;;;;; 2A6D6;;Lo;0;L;;;;;N;;;;; 2A700;;Lo;0;L;;;;;N;;;;; 2B734;;Lo;0;L;;;;;N;;;;; 2B740;;Lo;0;L;;;;;N;;;;; 2B81D;;Lo;0;L;;;;;N;;;;; +2B820;;Lo;0;L;;;;;N;;;;; +2CEA1;;Lo;0;L;;;;;N;;;;; 2F800;CJK COMPATIBILITY IDEOGRAPH-2F800;Lo;0;L;4E3D;;;;N;;;;; 2F801;CJK COMPATIBILITY IDEOGRAPH-2F801;Lo;0;L;4E38;;;;N;;;;; 2F802;CJK COMPATIBILITY IDEOGRAPH-2F802;Lo;0;L;4E41;;;;N;;;;; diff --git a/lib/elixir/unicode/graphemes_test.exs b/lib/elixir/unicode/graphemes_test.exs new file mode 100644 index 00000000000..3960e60e9db --- /dev/null +++ b/lib/elixir/unicode/graphemes_test.exs @@ -0,0 +1,68 @@ +defmodule GraphemesTest do + def run do + IO.puts "Running GraphemeBreakTest.txt" + count = run_grapheme_break() + IO.puts "Got #{count} failures" + end + + defp run_grapheme_break do + Path.join(__DIR__, "GraphemeBreakTest.txt") + |> File.stream!() + |> Stream.filter(&match?("÷" <> _, &1)) + |> Stream.reject(& &1 =~ "D800") + |> Enum.reduce(0, fn line, acc -> + [string | _] = String.split(line, "#", parts: 2) + {string, graphemes} = parse_grapheme_break(string) + if String.graphemes(string) == graphemes do + acc + else + acc = acc + 1 + IO.puts """ + ============== Failure ##{acc} ============== + + String.graphemes(#{inspect string}) + + must be: + + #{inspect graphemes} + + got: + + #{inspect String.graphemes(string)} + + On line: + + #{line} + """ + acc + end + end) + end + + defp parse_grapheme_break(string) do + string + |> String.trim() + |> String.trim_leading("÷ ") + |> String.trim_trailing(" ÷") + |> parse_grapheme_break("", []) + end + + defp parse_grapheme_break(string, acc_string, acc_list) do + case String.split(string, " ÷ ", parts: 2) do + [left, right] -> + grapheme = breaks_to_grapheme(left) + parse_grapheme_break(right, acc_string <> grapheme, [grapheme | acc_list]) + [left] -> + grapheme = breaks_to_grapheme(left) + {acc_string <> grapheme, Enum.reverse([grapheme | acc_list])} + end + end + + defp breaks_to_grapheme(string) do + for codepoint <- String.split(string, " × "), + do: <>, + into: "" + end +end + +GraphemesTest.run diff --git a/lib/elixir/unicode/properties.ex b/lib/elixir/unicode/properties.ex new file mode 100644 index 00000000000..0c52844800a --- /dev/null +++ b/lib/elixir/unicode/properties.ex @@ -0,0 +1,346 @@ +data_path = Path.join(__DIR__, "UnicodeData.txt") + +to_binary = fn + "" -> + nil + codepoints -> + codepoints + |> :binary.split(" ", [:global]) + |> Enum.map(&<>) + |> IO.iodata_to_binary +end + +{codes, non_breakable, decompositions, combining_classes} = + Enum.reduce File.stream!(data_path), {[], [], %{}, %{}}, fn line, {cacc, wacc, dacc, kacc} -> + [codepoint, _name, _category, + class, _bidi, decomposition, + _numeric_1, _numeric_2, _numeric_3, + _bidi_mirror, _unicode_1, _iso, + upper, lower, title] = :binary.split(line, ";", [:global]) + + title = :binary.part(title, 0, byte_size(title) - 1) + + cacc = + if upper != "" or lower != "" or title != "" do + [{to_binary.(codepoint), to_binary.(upper), to_binary.(lower), to_binary.(title)} | cacc] + else + cacc + end + + wacc = + case decomposition do + "" <> _ -> [to_binary.(codepoint) | wacc] + _ -> wacc + end + + dacc = + case decomposition do + <> when h != ?< -> # Decomposition + decomposition = + decomposition + |> :binary.split(" ", [:global]) + |> Enum.map(&String.to_integer(&1, 16)) + Map.put(dacc, String.to_integer(codepoint, 16), decomposition) + _ -> + dacc + end + + kacc = + case Integer.parse(class) do + {0, ""} -> kacc + {n, ""} -> Map.put(kacc, String.to_integer(codepoint, 16), n) + end + + {cacc, wacc, dacc, kacc} + end + +defmodule String.Casing do + @moduledoc false + + special_path = Path.join(__DIR__, "SpecialCasing.txt") + + codes = Enum.reduce File.stream!(special_path), codes, fn line, acc -> + [codepoint, lower, title, upper, _] = :binary.split(line, "; ", [:global]) + key = to_binary.(codepoint) + :lists.keystore(key, 1, acc, {key, + to_binary.(upper), + to_binary.(lower), + to_binary.(title)}) + end + + # Downcase + + def downcase(string), do: downcase(string, "") + + for {codepoint, _upper, lower, _title} <- codes, lower && lower != codepoint do + defp downcase(unquote(codepoint) <> rest, acc) do + downcase(rest, acc <> unquote(lower)) + end + end + + defp downcase(<>, acc) do + downcase(rest, <>) + end + + defp downcase("", acc), do: acc + + # Upcase + + def upcase(string), do: upcase(string, "") + + for {codepoint, upper, _lower, _title} <- codes, upper && upper != codepoint do + defp upcase(unquote(codepoint) <> rest, acc) do + upcase(rest, acc <> unquote(upper)) + end + end + + defp upcase(<>, acc) do + upcase(rest, <>) + end + + defp upcase("", acc), do: acc + + # Titlecase once + + def titlecase_once(""), do: {"", ""} + + for {codepoint, _upper, _lower, title} <- codes, title && title != codepoint do + def titlecase_once(unquote(codepoint) <> rest) do + {unquote(title), rest} + end + end + + def titlecase_once(<>) do + {<>, rest} + end + + def titlecase_once(<>) do + {<>, rest} + end +end + +defmodule String.Break do + @moduledoc false + @whitespace_max_size 3 + + prop_path = Path.join(__DIR__, "PropList.txt") + + whitespace = Enum.reduce File.stream!(prop_path), [], fn line, acc -> + case :binary.split(line, ";") do + [<>, <<" White_Space", _::binary>>] -> + first = String.to_integer(first, 16) + last = String.to_integer(last, 16) + Enum.map(first..last, fn int -> <> end) ++ acc + [<>, <<" White_Space", _::binary>>] -> + [<> | acc] + _ -> + acc + end + end + + # trim_leading + + for codepoint <- whitespace do + def trim_leading(unquote(codepoint) <> rest), do: trim_leading(rest) + end + def trim_leading(""), do: "" + def trim_leading(string) when is_binary(string), do: string + + # trim_trailing + + for codepoint <- whitespace do + # We need to increment @whitespace_max_size as well + # as the small table (_s) if we add a new entry here. + case byte_size(codepoint) do + 3 -> + defp do_trim_trailing_l(unquote(codepoint)), do: -3 + 2 -> + defp do_trim_trailing_l(<<_, unquote(codepoint)>>), do: -2 + + defp do_trim_trailing_s(unquote(codepoint)), do: <<>> + 1 -> + defp do_trim_trailing_l(<>), do: -3 + defp do_trim_trailing_l(<<_, unquote(codepoint), unquote(codepoint)>>), do: -2 + defp do_trim_trailing_l(<<_, _, unquote(codepoint)>>), do: -1 + + defp do_trim_trailing_s(<>), do: do_trim_trailing_s(<>) + defp do_trim_trailing_s(unquote(codepoint)), do: <<>> + end + end + + defp do_trim_trailing_l(_), do: 0 + defp do_trim_trailing_s(o), do: o + + def trim_trailing(string) when is_binary(string) do + trim_trailing(string, byte_size(string)) + end + + defp trim_trailing(string, size) when size < @whitespace_max_size do + do_trim_trailing_s(string) + end + + defp trim_trailing(string, size) do + trail = binary_part(string, size, -@whitespace_max_size) + case do_trim_trailing_l(trail) do + 0 -> string + x -> trim_trailing(binary_part(string, 0, size + x), size + x) + end + end + + # Split + + def split(string) do + for piece <- :binary.split(string, unquote(whitespace -- non_breakable), [:global]), + piece != "", + do: piece + end + + # Decompose + + def decompose(entries, map) do + for entry <- entries do + case map do + %{^entry => match} -> decompose(match, map) + %{} -> <> + end + end + end +end + +defmodule String.Normalizer do + @moduledoc false + + exclusions_path = Path.join(__DIR__, "CompositionExclusions.txt") + + compositions = Enum.reduce File.stream!(exclusions_path), decompositions, fn + <> = line, acc when h in ?0..?9 or h in ?A..?F -> + [codepoint, _] = :binary.split(line, " ") + Map.delete(acc, String.to_integer(codepoint, 16)) + _, acc -> + acc + end + + # Normalize + + def normalize(string, :nfd) when is_binary(string) do + normalize_nfd(string, "") + end + + def normalize(string, :nfc) when is_binary(string) do + normalize_nfc(string, "") + end + + defp normalize_nfd("", acc), do: acc + + defp normalize_nfd(<>, acc) when cp in 0xAC00..0xD7A3 do + {syllable_index, t_count, n_count} = {cp - 0xAC00, 28, 588} + lead = 0x1100 + div(syllable_index, n_count) + vowel = 0x1161 + div(rem(syllable_index, n_count), t_count) + trail = 0x11A7 + rem(syllable_index, t_count) + binary = + if trail == 0x11A7 do + <> + else + <> + end + normalize_nfd(rest, acc <> binary) + end + + defp normalize_nfd(binary, acc) do + {n, rest} = String.Unicode.next_grapheme_size(binary) + part = :binary.part(binary, 0, n) + case n do + 1 -> normalize_nfd(rest, acc <> part) + _ -> normalize_nfd(rest, acc <> canonical_order(part, [])) + end + end + + defp normalize_nfc("", acc), do: acc + + defp normalize_nfc(<>, acc) when cp in 0xAC00..0xD7A3 do + normalize_nfc(rest, acc <> <>) + end + + defp normalize_nfc(binary, acc) do + {n, rest} = String.Unicode.next_grapheme_size(binary) + part = :binary.part(binary, 0, n) + case n do + 1 -> normalize_nfc(rest, acc <> part) + _ -> normalize_nfc(rest, acc <> compose(normalize_nfd(part, ""))) + end + end + + for {cp, decomposition} <- decompositions do + decomposition = + decomposition + |> String.Break.decompose(decompositions) + |> IO.iodata_to_binary() + + defp canonical_order(unquote(<>) <> rest, acc) do + canonical_order(unquote(decomposition) <> rest, acc) + end + end + defp canonical_order(<>, acc) do + case combining_class(h) do + 0 -> canonical_order(acc) <> canonical_order(t, [{h, 0}]) + n -> canonical_order(t, [{h, n} | acc]) + end + end + defp canonical_order(<<>>, acc) do + canonical_order(acc) + end + + defp canonical_order([{x, _}]) do + <> + end + defp canonical_order(acc) do + :lists.keysort(2, Enum.reverse(acc)) + |> Enum.map(&<>) + |> IO.iodata_to_binary + end + + for {codepoint, class} <- combining_classes do + defp combining_class(unquote(codepoint)), do: unquote(class) + end + + defp combining_class(_), do: 0 + + defp compose(<>) when lead in 0x1100..0x1112 and vowel in 0x1161..0x1175 do + codepoint = 0xAC00 + ((lead - 0x1100) * 588) + ((vowel - 0x1161) * 28) + case rest do + <> when trail in 0x11A7..0x11C2 -> + <> + _ -> + <> + end + end + + defp compose(binary) do + compose_one(binary) || ( + <> = binary + compose_many(rest, <>, "", combining_class(cp) - 1) + ) + end + + defp compose_many("", base, accents, _), do: base <> accents + + defp compose_many(<>, base, accents, last_class) do + part_class = combining_class(cp) + combined = <> + if composed = (last_class < part_class && compose_one(combined)) do + compose_many(rest, composed, accents, last_class) + else + compose_many(rest, base, <>, part_class) + end + end + + # Compositions: + # 1. We must exclude compositions with a single codepoint + # 2. We must exclude compositions that do not start with 0 combining class + for {cp, [fst, snd]} <- compositions, + Map.get(combining_classes, fst, 0) == 0 do + defp compose_one(unquote(<>)), do: unquote(<>) + end + + defp compose_one(_), do: nil +end diff --git a/lib/elixir/unicode/tokenizer.ex b/lib/elixir/unicode/tokenizer.ex new file mode 100644 index 00000000000..ee66ad6fad3 --- /dev/null +++ b/lib/elixir/unicode/tokenizer.ex @@ -0,0 +1,186 @@ +defmodule String.Tokenizer do + @moduledoc false + + data_path = Path.join(__DIR__, "UnicodeData.txt") + + {letter_uptitlecase, start, continue, _} = + Enum.reduce File.stream!(data_path), {[], [], [], nil}, fn + line, {letter_uptitlecase, start, continue, first} -> + [codepoint, line] = :binary.split(line, ";") + [name, line] = :binary.split(line, ";") + [category, _] = :binary.split(line, ";") + + {codepoints, first} = + case name do + "<" <> _ when is_integer(first) -> + last = String.to_integer(codepoint, 16) + {Enum.to_list(last..first), nil} + "<" <> _ -> + first = String.to_integer(codepoint, 16) + {[first], first + 1} + _ -> + {[String.to_integer(codepoint, 16)], nil} + end + + cond do + category in ~w(Lu Lt) -> + {codepoints ++ letter_uptitlecase, start, continue, first} + category in ~w(Ll Lm Lo Nl) -> + {letter_uptitlecase, codepoints ++ start, continue, first} + category in ~w(Mn Mc Nd Pc) -> + {letter_uptitlecase, start, codepoints ++ continue, first} + true -> + {letter_uptitlecase, start, continue, first} + end + end + + prop_path = Path.join(__DIR__, "PropList.txt") + + {start, continue, patterns} = + Enum.reduce File.stream!(prop_path), {start, continue, []}, fn line, acc -> + [codepoints | category] = :binary.split(line, ";") + + pos = + case category do + [" Other_ID_Start" <> _] -> 0 + [" Other_ID_Continue" <> _] -> 1 + [" Pattern_White_Space" <> _] -> 2 + [" Pattern_Syntax" <> _] -> 2 + _ -> -1 + end + + if pos >= 0 do + entries = + case :binary.split(codepoints, "..") do + [<>] -> + [String.to_integer(codepoint, 16)] + [first, <>] -> + Enum.to_list(String.to_integer(last, 16)..String.to_integer(first, 16)) + end + put_elem(acc, pos, entries ++ elem(acc, pos)) + else + acc + end + end + + id_upper = letter_uptitlecase -- patterns + id_start = start -- patterns + id_continue = continue -- patterns + + unicode_upper = Enum.filter(id_upper, & &1 > 127) + unicode_start = Enum.filter(id_start, & &1 > 127) + unicode_continue = Enum.filter(id_continue, & &1 > 127) + + rangify = fn [head | tail] -> + {first, last, acc} = + Enum.reduce(tail, {head, head, []}, fn + number, {first, last, acc} when number == first - 1 -> + {number, last, acc} + number, {first, last, acc} -> + {number, number, [{first, last} | acc]} + end) + [{first, last} | acc] + end + + @compile {:inline, ascii_upper?: 1, ascii_start?: 1, ascii_continue?: 1} + defp ascii_upper?(entry), do: entry in ?A..?Z + + defp ascii_start?(?_), do: true + defp ascii_start?(entry), do: entry in ?a..?z + + defp ascii_continue?(entry), do: entry in ?0..?9 + + range = rangify.(unicode_upper) + for {first, last} <- range do + if first == last do + defp unicode_upper?(unquote(first)), do: true + else + defp unicode_upper?(entry) when entry in unquote(first)..unquote(last), do: true + end + end + defp unicode_upper?(_), do: false + + range = rangify.(unicode_start) + for {first, last} <- range do + if first == last do + defp unicode_start?(unquote(first)), do: true + else + defp unicode_start?(entry) when entry in unquote(first)..unquote(last), do: true + end + end + defp unicode_start?(_), do: false + + unless {13312, 19893} in range do + raise "CHECK: CJK Ideograph not in range" + end + + for {first, last} <- rangify.(unicode_continue) do + if first == last do + defp unicode_continue?(unquote(first)), do: true + else + defp unicode_continue?(entry) when entry in unquote(first)..unquote(last), do: true + end + end + defp unicode_continue?(_), do: false + + # Pattern is used as a performance check since most + # atoms and variables end with an atom character. + for {first, last} <- rangify.(patterns), last <= 127 do + if first == last do + defp ascii_pattern?(unquote(first)), do: true + else + defp ascii_pattern?(entry) when entry in unquote(first)..unquote(last), do: true + end + end + defp ascii_pattern?(_), do: false + + def tokenize([head | tail]) do + cond do + ascii_upper?(head) -> + validate(continue(tail, [head], 1, true, []), :alias) + ascii_start?(head) -> + validate(continue(tail, [head], 1, true, []), :identifier) + unicode_upper?(head) -> + validate(continue(tail, [head], 1, false, []), :atom) + unicode_start?(head) -> + validate(continue(tail, [head], 1, false, []), :identifier) + true -> + {:error, :empty} + end + end + def tokenize([]) do + {:error, :empty} + end + + defp continue([?! | tail], acc, length, ascii_letters?, special) do + {[?! | acc], tail, length + 1, ascii_letters?, [?! | special]} + end + defp continue([?? | tail], acc, length, ascii_letters?, special) do + {[?? | acc], tail, length + 1, ascii_letters?, [?? | special]} + end + defp continue([?@ | tail], acc, length, ascii_letters?, special) do + continue(tail, [?@ | acc], length + 1, ascii_letters?, [?@ | List.delete(special, ?@)]) + end + defp continue([head | tail] = list, acc, length, ascii_letters?, special) do + cond do + ascii_start?(head) or ascii_upper?(head) or ascii_continue?(head) -> + continue(tail, [head | acc], length + 1, ascii_letters?, special) + not ascii_pattern?(head) and (unicode_start?(head) or unicode_upper?(head) or unicode_continue?(head)) -> + continue(tail, [head | acc], length + 1, false, special) + true -> + {acc, list, length, ascii_letters?, special} + end + end + defp continue([], acc, length, ascii_letters?, special) do + {acc, [], length, ascii_letters?, special} + end + + defp validate({acc, rest, length, ascii_letters?, special}, kind) do + acc = :lists.reverse(acc) + if ascii_letters? or :unicode.characters_to_nfc_list(acc) == acc do + {kind, acc, rest, length, ascii_letters?, special} + else + {:error, {:not_nfc, acc}} + end + end +end diff --git a/lib/elixir/unicode/unicode.ex b/lib/elixir/unicode/unicode.ex index 3ab64897304..3b854458f74 100644 --- a/lib/elixir/unicode/unicode.ex +++ b/lib/elixir/unicode/unicode.ex @@ -1,352 +1,320 @@ +# How to update the Unicode files +# +# 1. Update CompositionExclusions.txt by copying original as is +# 2. Update GraphemeBreakProperty.txt by copying original as is +# 3. Update PropList.txt by copying original as is +# 4. Update GraphemeBreakTest.txt by copying original as is +# 5. Update SpecialCasing.txt by removing comments and conditional mappings from original +# 6. Update String.Unicode.version/0 and on String module docs +# 7. make unicode +# 8. elixir lib/elixir/unicode/graphemes_test.exs +# defmodule String.Unicode do @moduledoc false - def version, do: {7, 0, 0} - - to_binary = fn - "" -> - nil - codepoints -> - codepoints = :binary.split(codepoints, " ", [:global]) - Enum.reduce codepoints, "", fn(codepoint, acc) -> - acc <> << String.to_integer(codepoint, 16) :: utf8 >> - end - end - - data_path = Path.join(__DIR__, "UnicodeData.txt") - - {codes, whitespace} = Enum.reduce File.stream!(data_path), {[], []}, fn(line, {cacc, wacc}) -> - [ codepoint, _name, _category, - _class, bidi, _decomposition, - _numeric_1, _numeric_2, _numeric_3, - _bidi_mirror, _unicode_1, _iso, - upper, lower, title ] = :binary.split(line, ";", [:global]) - - title = :binary.part(title, 0, byte_size(title) - 1) - - cond do - upper != "" or lower != "" or title != "" -> - {[{to_binary.(codepoint), to_binary.(upper), to_binary.(lower), to_binary.(title)} | cacc], wacc} - bidi in ["B", "S", "WS"] -> - {cacc, [to_binary.(codepoint) | wacc]} - true -> - {cacc, wacc} - end - end + def version, do: {9, 0, 0} - special_path = Path.join(__DIR__, "SpecialCasing.txt") + cluster_path = Path.join(__DIR__, "GraphemeBreakProperty.txt") + regex = ~r/(?:^([0-9A-F]+)(?:\.\.([0-9A-F]+))?)\s+;\s(\w+)/m - codes = Enum.reduce File.stream!(special_path), codes, fn(line, acc) -> - [ codepoint, lower, title, upper, _comment ] = :binary.split(line, "; ", [:global]) - key = to_binary.(codepoint) - :lists.keystore(key, 1, acc, {key, to_binary.(upper), to_binary.(lower), to_binary.(title)}) - end + cluster = Enum.reduce File.stream!(cluster_path), %{}, fn line, acc -> + case Regex.run(regex, line, capture: :all_but_first) do + ["D800", "DFFF", _class] -> + acc - # Downcase + [first, "", class] -> + codepoint = <> + Map.update(acc, class, [codepoint], &[<> | &1]) - def downcase(string), do: do_downcase(string) |> IO.iodata_to_binary + [first, last, class] -> + range = String.to_integer(first, 16)..String.to_integer(last, 16) + codepoints = Enum.map(range, fn int -> <> end) + Map.update(acc, class, codepoints, &(codepoints ++ &1)) - for {codepoint, _upper, lower, _title} <- codes, lower && lower != codepoint do - defp do_downcase(unquote(codepoint) <> rest) do - unquote(:binary.bin_to_list(lower)) ++ downcase(rest) + nil -> + acc end end - defp do_downcase(<< char, rest :: binary >>) do - [char|do_downcase(rest)] + # Don't break CRLF + def next_grapheme_size(<>) do + {2, rest} end - defp do_downcase(""), do: [] - - # Upcase - - def upcase(string), do: do_upcase(string) |> IO.iodata_to_binary - - for {codepoint, upper, _lower, _title} <- codes, upper && upper != codepoint do - defp do_upcase(unquote(codepoint) <> rest) do - unquote(:binary.bin_to_list(upper)) ++ do_upcase(rest) + # Break on control + for codepoint <- cluster["CR"] ++ cluster["LF"] ++ cluster["Control"] do + def next_grapheme_size(<>) do + {unquote(byte_size(codepoint)), rest} end end - defp do_upcase(<< char, rest :: binary >>) do - [char|do_upcase(rest)] + # Break on Prepend* + for codepoint <- cluster["Prepend"] do + def next_grapheme_size(<>) do + next_prepend_size(rest, unquote(byte_size(codepoint))) + end end - defp do_upcase(""), do: [] - - # Titlecase once - - def titlecase_once(""), do: {"", ""} - - for {codepoint, _upper, _lower, title} <- codes, title && title != codepoint do - def titlecase_once(unquote(codepoint) <> rest) do - {unquote(title), rest} + # Handle Regional + for codepoint <- cluster["Regional_Indicator"] do + def next_grapheme_size(<>) do + next_regional_size(rest, unquote(byte_size(codepoint))) end end - def titlecase_once(<< char, rest :: binary >>) do - {<< char >>, rest} + # Handle Hangul L + for codepoint <- cluster["L"] do + def next_grapheme_size(<>) do + next_hangul_l_size(rest, unquote(byte_size(codepoint))) + end end - # Strip - - def lstrip(""), do: "" - - for codepoint <- whitespace do - def lstrip(unquote(codepoint) <> rest) do - lstrip(rest) + # Handle Hangul V + for codepoint <- cluster["LV"] ++ cluster["V"] do + def next_grapheme_size(<>) do + next_hangul_v_size(rest, unquote(byte_size(codepoint))) end end - def lstrip(other) when is_binary(other), do: other - - def rstrip(string) when is_binary(string) do - do_rstrip(string, [], []) + # Handle Hangul T + for codepoint <- cluster["LVT"] ++ cluster["T"] do + def next_grapheme_size(<>) do + next_hangul_t_size(rest, unquote(byte_size(codepoint))) + end end - for codepoint <- whitespace do - c = :binary.bin_to_list(codepoint) |> :lists.reverse - - defp do_rstrip(unquote(codepoint) <> rest, acc1, acc2) do - do_rstrip(rest, unquote(c) ++ (acc1 || acc2), acc2) + # Handle E_Base + for codepoint <- cluster["E_Base"] ++ cluster["E_Base_GAZ"] do + def next_grapheme_size(<>) do + next_extend_size(rest, unquote(byte_size(codepoint)), :e_base) end end - defp do_rstrip(<< char, rest :: binary >>, nil, acc2) do - do_rstrip(rest, nil, [char|acc2]) + # Handle ZWJ + for codepoint <- cluster["ZWJ"] do + def next_grapheme_size(<>) do + next_extend_size(rest, unquote(byte_size(codepoint)), :zwj) + end end - defp do_rstrip(<< char, rest :: binary >>, acc1, _acc2) do - do_rstrip(rest, nil, [char|acc1]) + # Handle extended entries + def next_grapheme_size(<>) do + case cp do + x when x <= 0x007F -> next_extend_size(rest, 1, :other) + x when x <= 0x07FF -> next_extend_size(rest, 2, :other) + x when x <= 0xFFFF -> next_extend_size(rest, 3, :other) + _ -> next_extend_size(rest, 4, :other) + end end - defp do_rstrip(<<>>, _acc1, acc2), do: acc2 |> :lists.reverse |> IO.iodata_to_binary - - # Split - - def split(""), do: [""] + def next_grapheme_size(<<_, rest::binary>>) do + {1, rest} + end - def split(string) when is_binary(string) do - :lists.reverse do_split(string, "", []) + def next_grapheme_size(<<>>) do + nil end - for codepoint <- whitespace do - defp do_split(unquote(codepoint) <> rest, buffer, acc) do - do_split(rest, "", add_buffer_to_acc(buffer, acc)) + # Handle hanguls + defp next_hangul_l_size(rest, size) do + case next_hangul(rest, size) do + {:l, rest, size} -> next_hangul_l_size(rest, size) + {:v, rest, size} -> next_hangul_v_size(rest, size) + {:lv, rest, size} -> next_hangul_v_size(rest, size) + {:lvt, rest, size} -> next_hangul_t_size(rest, size) + _ -> next_extend_size(rest, size, :other) end end - defp do_split(<< char, rest :: binary >>, buffer, acc) do - do_split(rest, << buffer :: binary, char >>, acc) + defp next_hangul_v_size(rest, size) do + case next_hangul(rest, size) do + {:v, rest, size} -> next_hangul_v_size(rest, size) + {:t, rest, size} -> next_hangul_t_size(rest, size) + _ -> next_extend_size(rest, size, :other) + end end - defp do_split(<<>>, buffer, acc) do - add_buffer_to_acc(buffer, acc) + defp next_hangul_t_size(rest, size) do + case next_hangul(rest, size) do + {:t, rest, size} -> next_hangul_t_size(rest, size) + _ -> next_extend_size(rest, size, :other) + end end - @compile {:inline, add_buffer_to_acc: 2} - - defp add_buffer_to_acc("", acc), do: acc - defp add_buffer_to_acc(buffer, acc), do: [buffer|acc] - - # Codepoints - - def next_codepoint(<< cp :: utf8, rest :: binary >>) do - {<>, rest} + for codepoint <- cluster["L"] do + defp next_hangul(<>, size) do + {:l, rest, size + unquote(byte_size(codepoint))} + end end - def next_codepoint(<< cp, rest :: binary >>) do - {<>, rest} + for codepoint <- cluster["V"] do + defp next_hangul(<>, size) do + {:v, rest, size + unquote(byte_size(codepoint))} + end end - def next_codepoint(<<>>) do - nil + for codepoint <- cluster["T"] do + defp next_hangul(<>, size) do + {:t, rest, size + unquote(byte_size(codepoint))} + end end - def codepoints(binary) when is_binary(binary) do - do_codepoints(next_codepoint(binary)) + for codepoint <- cluster["LV"] do + defp next_hangul(<>, size) do + {:lv, rest, size + unquote(byte_size(codepoint))} + end end - defp do_codepoints({c, rest}) do - [c|do_codepoints(next_codepoint(rest))] + for codepoint <- cluster["LVT"] do + defp next_hangul(<>, size) do + {:lvt, rest, size + unquote(byte_size(codepoint))} + end end - defp do_codepoints(nil) do - [] + defp next_hangul(_, _) do + false end -end - -defmodule String.Graphemes do - @moduledoc false - cluster_path = Path.join(__DIR__, "GraphemeBreakProperty.txt") - regex = ~r/(?:^([0-9A-F]+)(?:\.\.([0-9A-F]+))?)\s+;\s(\w+)/m - - to_range = fn - first, "" -> - [<< String.to_integer(first, 16) :: utf8 >>] - first, last -> - range = String.to_integer(first, 16)..String.to_integer(last, 16) - Enum.map(range, fn(int) -> << int :: utf8 >> end) - end - - cluster = Enum.reduce File.stream!(cluster_path), HashDict.new, fn(line, dict) -> - [ _full, first, last, class ] = Regex.run(regex, line) - - # Skip surrogates - if first == "D800" and last == "DFFF" do - dict - else - list = to_range.(first, last) - Dict.update(dict, class, list, &(&1 ++ list)) + # Handle regional + for codepoint <- cluster["Regional_Indicator"] do + defp next_regional_size(<>, size) do + next_extend_size(rest, size + unquote(byte_size(codepoint)), :other) end end - - # There is no codepoint marked as Prepend by Unicode 6.3.0 - if cluster["Prepend"] do - raise "It seems this new unicode version has added Prepend items. " <> - "Please remove this error and uncomment the code below." + defp next_regional_size(rest, size) do + next_extend_size(rest, size, :other) end - # Don't break CRLF - def next_grapheme(<< ?\n, ?\r, rest :: binary >>) do - {"\n\r", rest} + # Handle Extend+SpacingMark+ZWJ + for codepoint <- cluster["Extend"] do + defp next_extend_size(<>, size, marker) do + next_extend_size(rest, size + unquote(byte_size(codepoint)), keep_ebase(marker)) + end end - # Break on control - for codepoint <- cluster["CR"] ++ cluster["LF"] ++ cluster["Control"] do - def next_grapheme(<< unquote(codepoint), rest :: binary >> = string) do - {:binary.part(string, 0, unquote(byte_size(codepoint))), rest} + for codepoint <- cluster["SpacingMark"] do + defp next_extend_size(<>, size, _marker) do + next_extend_size(rest, size + unquote(byte_size(codepoint)), :other) end end - # Break on Prepend* - # for codepoint <- cluster["Prepend"] do - # def next_grapheme(<< unquote(codepoint), rest :: binary >> = string) do - # next_prepend(rest, string, unquote(byte_size(codepoint))) - # end - # end - - # Handle Hangul L - for codepoint <- cluster["L"] do - def next_grapheme(<< unquote(codepoint), rest :: binary >> = string) do - next_hangul_l(rest, string, unquote(byte_size(codepoint))) + for codepoint <- cluster["ZWJ"] do + defp next_extend_size(<>, size, _marker) do + next_extend_size(rest, size + unquote(byte_size(codepoint)), :zwj) end end - # Handle Hangul T - for codepoint <- cluster["T"] do - def next_grapheme(<< unquote(codepoint), rest :: binary >> = string) do - next_hangul_t(rest, string, unquote(byte_size(codepoint))) + for codepoint <- cluster["E_Modifier"] do + defp next_extend_size(<>, size, :e_base) do + next_extend_size(rest, size + unquote(byte_size(codepoint)), :other) end end - # Handle Regional - for codepoint <- cluster["Regional_Indicator"] do - def next_grapheme(<< unquote(codepoint), rest :: binary >> = string) do - next_regional(rest, string, unquote(byte_size(codepoint))) + for codepoint <- cluster["Glue_After_Zwj"] do + defp next_extend_size(<>, size, :zwj) do + next_extend_size(rest, size + unquote(byte_size(codepoint)), :other) end end - # Handle extended entries - def next_grapheme(<< cp :: utf8, rest :: binary >> = string) do - next_extend(rest, string, byte_size(<< cp :: utf8 >>)) + for codepoint <- cluster["E_Base_GAZ"] do + defp next_extend_size(<>, size, :zwj) do + next_extend_size(rest, size + unquote(byte_size(codepoint)), :e_base) + end end - def next_grapheme(<< cp, rest :: binary >>) do - {<>, rest} + defp next_extend_size(rest, size, _) do + {size, rest} end - def next_grapheme(<<>>) do - nil - end + defp keep_ebase(:e_base), do: :e_base + defp keep_ebase(_), do: :other - # Handle Hangul L - for codepoint <- cluster["L"] do - defp next_hangul_l(<< unquote(codepoint), rest :: binary >>, string, size) do - next_hangul_l(rest, string, size + unquote(byte_size(codepoint))) + # Handle Prepend + for codepoint <- cluster["Prepend"] do + defp next_prepend_size(<>, size) do + next_prepend_size(rest, size + unquote(byte_size(codepoint))) end end - for codepoint <- cluster["LV"] do - defp next_hangul_l(<< unquote(codepoint), rest :: binary >>, string, size) do - next_hangul_v(rest, string, size + unquote(byte_size(codepoint))) + # However, if we see a control character, we have to break it + for codepoint <- cluster["CR"] ++ cluster["LF"] ++ cluster["Control"] do + defp next_prepend_size(<> = rest, size) do + {size, rest} end end - for codepoint <- cluster["LVT"] do - defp next_hangul_l(<< unquote(codepoint), rest :: binary >>, string, size) do - next_hangul_t(rest, string, size + unquote(byte_size(codepoint))) + defp next_prepend_size(rest, size) do + case next_grapheme_size(rest) do + {more, rest} -> {more + size, rest} + nil -> {size, rest} end end - defp next_hangul_l(rest, string, size) do - next_hangul_v(rest, string, size) + # Graphemes + + def graphemes(binary) when is_binary(binary) do + do_graphemes(next_grapheme_size(binary), binary) end - # Handle Hangul V - for codepoint <- cluster["V"] do - defp next_hangul_v(<< unquote(codepoint), rest :: binary >>, string, size) do - next_hangul_v(rest, string, size + unquote(byte_size(codepoint))) - end + defp do_graphemes({size, rest}, binary) do + [:binary.part(binary, 0, size) | do_graphemes(next_grapheme_size(rest), rest)] end - defp next_hangul_v(rest, string, size) do - next_hangul_t(rest, string, size) + defp do_graphemes(nil, _) do + [] end - # Handle Hangul T - for codepoint <- cluster["T"] do - defp next_hangul_t(<< unquote(codepoint), rest :: binary >>, string, size) do - next_hangul_t(rest, string, size + unquote(byte_size(codepoint))) - end + # Length + + def length(string) do + do_length(next_grapheme_size(string), 0) end - defp next_hangul_t(rest, string, size) do - next_extend(rest, string, size) + defp do_length({_, rest}, acc) do + do_length(next_grapheme_size(rest), acc + 1) end - # Handle regional - for codepoint <- cluster["Regional_Indicator"] do - defp next_regional(<< unquote(codepoint), rest :: binary >>, string, size) do - next_regional(rest, string, size + unquote(byte_size(codepoint))) + defp do_length(nil, acc), do: acc + + # Split at + + def split_at(string, pos) do + do_split_at(string, 0, pos, 0) + end + + defp do_split_at(string, acc, desired_pos, current_pos) when desired_pos > current_pos do + case next_grapheme_size(string) do + {count, rest} -> do_split_at(rest, acc + count, desired_pos, current_pos + 1) + nil -> {acc, nil} end end - defp next_regional(rest, string, size) do - next_extend(rest, string, size) + defp do_split_at(string, acc, desired_pos, desired_pos) do + {acc, string} end - # Handle Extend+SpacingMark - for codepoint <- cluster["Extend"] ++ cluster["SpacingMark"] do - defp next_extend(<< unquote(codepoint), rest :: binary >>, string, size) do - next_extend(rest, string, size + unquote(byte_size(codepoint))) - end + # Codepoints + + def next_codepoint(<>) do + {<>, rest} end - defp next_extend(rest, string, size) do - {:binary.part(string, 0, size), rest} + def next_codepoint(<>) do + {<>, rest} end - # Handle Prepend - # for codepoint <- cluster["Prepend"] do - # defp next_prepend(<< unquote(codepoint), rest :: binary >>, string, size) do - # next_prepend(rest, string, size + unquote(byte_size(codepoint))) - # end - # end - # - # defp next_prepend(rest, string, size) do - # {:binary.part(string, 0, size), rest} - # end + def next_codepoint(<<>>) do + nil + end - def graphemes(binary) when is_binary(binary) do - do_graphemes(next_grapheme(binary)) + def codepoints(binary) when is_binary(binary) do + do_codepoints(next_codepoint(binary)) end - defp do_graphemes({c, rest}) do - [c|do_graphemes(next_grapheme(rest))] + defp do_codepoints({c, rest}) do + [c | do_codepoints(next_codepoint(rest))] end - defp do_graphemes(nil) do + defp do_codepoints(nil) do [] end end diff --git a/lib/ex_unit/examples/difference.exs b/lib/ex_unit/examples/difference.exs new file mode 100644 index 00000000000..31bdb08c2b3 --- /dev/null +++ b/lib/ex_unit/examples/difference.exs @@ -0,0 +1,117 @@ +ExUnit.start [seed: 0] + +defmodule Difference do + @moduledoc """ + This module contains failing tests to see + difference highlighting in action. + """ + use ExUnit.Case + + defmodule User do + defstruct [:age] + end + + test "integers" do + assert 491512235 == 490512035 + end + + test "floats" do + assert 42.0 == 43.0 + end + + test "strings" do + string1 = "fox hops over \"the dog" + string2 = "fox jumps over the lazy cat" + assert string1 == string2 + end + + test "whitespace" do + list1 = [%{a: "abc "}, %{a: "def"}, %{c: "gh"}] + list2 = [%{a: "abc"}, %{a: " def"}, %{c: "hi"}] + assert list1 == list2 + end + + test "large strings" do + string1 = "short" + string2 = "really long string that should not emit diff" + assert string1 == string2 + end + + test "large strings; inner" do + tuple1 = {"short"} + tuple2 = {"really long string that should not emit diff"} + assert tuple1 == tuple2 + end + + test "lists" do + list1 = ["Tvo", make_ref(), :ok, {}] + list2 = ["Two", :ok, self(), {true}] + assert list1 == list2 + end + + test "lists; missing entries" do + assert [] == [1, 2, 3] + end + + test "lists; surplus entries" do + assert [1, 2, 3] == [] + end + + test "improper lists" do + list1 = [1 | "b"] + list2 = [1, "a"] + assert list1 == list2 + end + + test "charlists" do + charlist1 = 'fox hops over \'the dog' + charlist2 = 'fox jumps over the lazy cat' + assert charlist1 == charlist2 + end + + test "keyword lists" do + assert [file: "nofile", line: 12] == [file: nil, lime: 10] + end + + test "keyword lists; reverse order" do + keyword1 = [port: 4000, max_connections: 1000] + keyword2 = [max_connections: 1000, port: 4000] + assert keyword1 == keyword2 + end + + test "tuples" do + tuple1 = {:hex, "0.1", [{:ex_doc}]} + tuple2 = {:hex, "1.1"} + assert tuple1 == tuple2 + end + + test "maps; mixed diff" do + map1 = Enum.into(1..15, %{}, &{&1, &1}) |> Map.delete(13) + map2 = Enum.reduce(5..10, map1, &Map.delete(&2, &1)) |> Map.put(13, 13) |> Map.put(12, 32) + assert map1 == map2 + end + + test "maps; missing pairs and match" do + map1 = %{baz: 12} + map2 = %{foo: 12, bar: 12, baz: 12} + assert map1 == map2 + end + + test "maps; surplus pairs and match" do + map1 = %{foo: 12, bar: 12, baz: 12} + map2 = %{baz: 12} + assert map1 == map2 + end + + test "maps; missing pair" do + assert %{} == %{baz: 12} + end + + test "maps; surplus pair" do + assert %{baz: 12} == %{} + end + + test "structs" do + assert %User{age: 16} == %User{age: 21} + end +end diff --git a/lib/ex_unit/examples/one_of_each.exs b/lib/ex_unit/examples/one_of_each.exs index d5af037c05f..ad1bac7b430 100644 --- a/lib/ex_unit/examples/one_of_each.exs +++ b/lib/ex_unit/examples/one_of_each.exs @@ -5,14 +5,17 @@ defmodule TestOneOfEach do This module contains one of each type of failing test. It is used simply to document the style of each. """ - - use ExUnit.Case, async: false + use ExUnit.Case @one 1 @two 2 - @long_data_1 [ field1: "one", field2: {:two1, :two2}, field3: 'three', field4: [1,2,3,4]] - @long_data_2 [ field1: "one", field2: {:two1, :two3}, field3: 'three', field4: [1,2,3,4]] + @long_data_1 [field1: "one", field2: {:two1, :two2}, field3: 'three', field4: [1, 2, 3, 4]] + @long_data_2 [field1: "one", field2: {:two1, :two3}, field3: 'three', field4: [1, 2, 3, 4]] + + setup do + {:ok, user_id: 1, post_id: 2, many_ids: Enum.to_list(1..50)} + end test "1. assert with a match" do assert [@one] = [@two] @@ -51,7 +54,7 @@ defmodule TestOneOfEach do end test "10. assert with explicit expected and actual values" do - assert @one > @two, @one, @two, "one should be greater than two" + assert @one > @two, left: @one, right: @two, message: "one should be greater than two" end test "11. assert that a message is ready to be received" do @@ -63,7 +66,7 @@ defmodule TestOneOfEach do end test "13. assert an exception with a given message is raised, but no exception" do - assert_raise(SomeException, "some message", fn -> end) + assert_raise(SomeException, "some message", fn -> nil end) end test "14. assert an exception with a given message is raised" do @@ -79,7 +82,7 @@ defmodule TestOneOfEach do end test "16. assert an exception is raised" do - assert_raise(SomeException, fn -> end) + assert_raise(SomeException, fn -> nil end) end test "17. assert two values are within some delta" do @@ -91,12 +94,12 @@ defmodule TestOneOfEach do end test "19. refute a message is received within a timeout" do - send self, {:hello, "Dave"} + send self(), {:hello, "Dave"} refute_receive {:hello, _}, 1000 end test "20. refute a message is ready to be received" do - send self, :hello_again + send self(), :hello_again refute_received :hello_again end @@ -113,7 +116,7 @@ defmodule TestOneOfEach do end test "24. exception raised while running test" do - assert blows_up + assert blows_up() end test "25. error due to exit" do @@ -122,6 +125,40 @@ defmodule TestOneOfEach do end end + test "26. multi error" do + error1 = + try do + assert [@one] = [@two] + rescue e in ExUnit.AssertionError -> + {:error, e, System.stacktrace} + end + + error2 = + try do + assert @one * 4 > @two *3 + rescue e in ExUnit.AssertionError -> + {:error, e, System.stacktrace} + end + + raise ExUnit.MultiError, errors: [error1, error2] + end + + @tag report: [:user_id, :post_id, :many_ids] + test "27. tag reporting" do + flunk "oops" + end + + @tag capture_log: true + test "28. log capturing" do + require Logger + Logger.debug "this will be logged" + flunk "oops" + end + + test "29. function clause error" do + Access.fetch(:foo, :bar) + end + defp blows_up do ignite(0) + 1 end diff --git a/lib/ex_unit/lib/ex_unit.ex b/lib/ex_unit/lib/ex_unit.ex index c004c6ef36c..5333d1aa43a 100644 --- a/lib/ex_unit/lib/ex_unit.ex +++ b/lib/ex_unit/lib/ex_unit.ex @@ -1,6 +1,6 @@ defmodule ExUnit do @moduledoc """ - Basic unit testing framework for Elixir. + Unit testing framework for Elixir. ## Example @@ -11,32 +11,32 @@ defmodule ExUnit do # 1) Start ExUnit. ExUnit.start - # 2) Create a new test module (test case) and use `ExUnit.Case`. + # 2) Create a new test module (test case) and use "ExUnit.Case". defmodule AssertionTest do - # 3) Notice we pass `async: true`, this runs the test case - # concurrently with other test cases + # 3) Notice we pass "async: true", this runs the test case + # concurrently with other test cases. The individual tests + # within each test case are still run serially. use ExUnit.Case, async: true - # 4) Use the `test` macro instead of `def` for clarity. + # 4) Use the "test" macro instead of "def" for clarity. test "the truth" do assert true end end - To run the tests above, run the file - using `elixir` from the command line. Assuming you named the file - `assertion_test.exs`, you can run it as: + To run the tests above, run the file using `elixir` from the + command line. Assuming you named the file `assertion_test.exs`, + you can run it as: - bin/elixir assertion_test.exs + elixir assertion_test.exs ## Case, Callbacks and Assertions - See `ExUnit.Case` and `ExUnit.Callbacks` - for more information about defining test cases. + See `ExUnit.Case` and `ExUnit.Callbacks` for more information + about defining test cases and setting up callbacks. - The `ExUnit.Assertions` module contains - a set of macros to easily generate assertions with appropriate - error messages. + The `ExUnit.Assertions` module contains a set of macros to + generate assertions with appropriate error messages. ## Integration with Mix @@ -57,9 +57,9 @@ defmodule ExUnit do files. See `Mix.Tasks.Test` for more information. """ - @typedoc "The state returned by ExUnit.Test and ExUnit.TestCase" + @typedoc "The error state returned by ExUnit.Test and ExUnit.TestCase" @type state :: nil | {:failed, failed} | {:skip, binary} | {:invalid, module} - @type failed :: {Exception.kind, reason :: term, stacktrace :: [tuple]} + @type failed :: [{Exception.kind, reason :: term, stacktrace :: [tuple]}] defmodule Test do @moduledoc """ @@ -69,16 +69,20 @@ defmodule ExUnit do * `:name` - the test name * `:case` - the test case - * `:state` - the test state (see ExUnit.state) + * `:state` - the test error state (see ExUnit.state) * `:time` - the time to run the test * `:tags` - the test tags + * `:logs` - the captured logs """ - defstruct name: nil :: atom, - case: nil :: module, - state: nil :: ExUnit.state, - time: 0 :: non_neg_integer, - tags: %{} :: map + defstruct [:name, :case, :state, time: 0, tags: %{}, logs: ""] + + @type t :: %__MODULE__{ + name: atom, + case: module, + state: ExUnit.state, + time: non_neg_integer, + tags: map} end defmodule TestCase do @@ -88,13 +92,34 @@ defmodule ExUnit do It is received by formatters and contains the following fields: * `:name` - the test case name - * `:state` - the test state (see ExUnit.state) + * `:state` - the test error state (see ExUnit.state) * `:tests` - all tests for this case """ - defstruct name: nil :: module, - state: nil :: ExUnit.state, - tests: [] :: [ExUnit.Test.t] + defstruct [:name, :state, tests: []] + + @type t :: %__MODULE__{ + name: module, + state: ExUnit.state, + tests: [ExUnit.Test.t]} + end + + defmodule TimeoutError do + defexception [:timeout, :type] + + def message(%{timeout: timeout, type: type}) do + """ + #{type} timed out after #{timeout}ms. You can change the timeout: + + 1. per test by setting "@tag timeout: x" + 2. per case by setting "@moduletag timeout: x" + 3. globally via "ExUnit.start(timeout: x)" configuration + 4. or set it to infinity per run by calling "mix test --trace" + (useful when using IEx.pry) + + Timeouts are given as integers in milliseconds. + """ + end end use Application @@ -105,6 +130,7 @@ defmodule ExUnit do children = [ worker(ExUnit.Server, []), + worker(ExUnit.CaptureServer, []), worker(ExUnit.OnExitHandler, []) ] @@ -120,19 +146,20 @@ defmodule ExUnit do If you want to run tests manually, you can set `:autorun` to `false`. """ def start(options \\ []) do - Application.start(:elixir) - Application.start(:ex_unit) + {:ok, _} = Application.ensure_all_started(:ex_unit) configure(options) + config = put_defaults(configuration()) - if Application.get_env(:ex_unit, :autorun, true) do + if Application.fetch_env!(:ex_unit, :autorun) do Application.put_env(:ex_unit, :autorun, false) System.at_exit fn 0 -> - %{failures: failures} = ExUnit.run + time = ExUnit.Server.cases_loaded() + %{failures: failures} = ExUnit.Runner.run(config, time) System.at_exit fn _ -> - if failures > 0, do: System.halt(1) + if failures > 0, do: exit({:shutdown, 1}) end _ -> :ok @@ -147,27 +174,48 @@ defmodule ExUnit do ExUnit supports the following options: - * `:color` - when color should be used by specific formatters; - defaults to the result of `IO.ANSI.terminal?/1` + * `:assert_receive_timeout` - the timeout to be used on `assert_receive` + calls, defaults to `100` milliseconds; + + * `:autorun` - if ExUnit should run by default on exit. Defaults to `true`; + + * `:capture_log` - if ExUnit should default to keeping track of log messages + and print them on test failure. Can be overridden for individual tests via + `@tag capture_log: false`. Defaults to `false`; - * `:formatters` - the formatters that will print results; - defaults to `[ExUnit.CLIFormatter]` + * `:case_load_timeout` - the timeout to be used when loading a test case, + defaults to `60_000` milliseconds; - * `:max_cases` - maximum number of cases to run in parallel; - defaults to `:erlang.system_info(:schedulers_online)` + * `:colors` - a keyword list of colors to be used by some formatters. + The only option so far is `[enabled: boolean]` which defaults to `IO.ANSI.enabled?/0`; - * `:trace` - set ExUnit into trace mode, this sets `:max_cases` to `1` and - prints each test case and test while running + * `:exclude` - specifies which tests are run by skipping tests that match the + filter; - * `:autorun` - if ExUnit should run by default on exit; defaults to `true` + * `:formatters` - the formatters that will print results, + defaults to `[ExUnit.CLIFormatter]`; - * `:include` - specify which tests are run by skipping tests that do not - match the filter + * `:include` - specifies which tests are run by skipping tests that do not + match the filter. Keep in mind that all tests are included by default, so unless they are + excluded first, the `:include` option has no effect; - * `:exclude` - specify which tests are run by skipping tests that match the - filter + * `:max_cases` - maximum number of cases to run in parallel. + It defaults to `System.schedulers_online * 2` to + optimize both CPU-bound and IO-bound tests; + + * `:refute_receive_timeout` - the timeout to be used on `refute_receive` + calls, defaults to `100` milliseconds; + + * `:seed` - an integer seed value to randomize the test suite; + + * `:stacktrace_depth` - configures the stacktrace depth to be used + on formatting and reporters, defaults to `20`; + + * `:timeout` - sets the timeout for the tests, defaults to `60_000` milliseconds; + + * `:trace` - sets ExUnit into trace mode, this sets `:max_cases` to `1` and + prints each test case and test while running. - * `:seed` - an integer seed value to randomize the test suite """ def configure(options) do Enum.each options, fn {k, v} -> @@ -182,15 +230,67 @@ defmodule ExUnit do Application.get_all_env(:ex_unit) end + @doc """ + Returns the pluralization for `word`. + + If one is not registered, returns the word appended with an "s". + """ + @spec plural_rule(binary) :: binary + def plural_rule(word) when is_binary(word) do + Application.get_env(:ex_unit, :plural_rules, %{}) + |> Map.get(word, "#{word}s") + end + + @doc """ + Registers a `pluralization` for `word`. + + If one is already registered, it is replaced. + """ + @spec plural_rule(binary, binary) :: :ok + def plural_rule(word, pluralization) when is_binary(word) and is_binary(pluralization) do + plural_rules = + Application.get_env(:ex_unit, :plural_rules, %{}) + |> Map.put(word, pluralization) + configure(plural_rules: plural_rules) + end + @doc """ API used to run the tests. It is invoked automatically if ExUnit is started via `ExUnit.start/1`. - Returns a map containing the number of tests and the number - of failures. + Returns a map containing the total number of tests, the number + of failures and the number of skipped tests. """ def run do - {async, sync, load_us} = ExUnit.Server.start_run - ExUnit.Runner.run async, sync, configuration, load_us + config = put_defaults(configuration()) + ExUnit.Runner.run(config, nil) + end + + defp put_defaults(opts) do + opts + |> put_seed() + |> put_max_cases() + end + + defp put_seed(opts) do + Keyword.put_new_lazy(opts, :seed, fn -> + seed = :os.timestamp |> elem(2) + Application.put_env(:ex_unit, :seed, seed) + seed + end) + end + + defp put_max_cases(opts) do + max_cases = max_cases(opts) + Application.put_env(:ex_unit, :max_cases, max_cases) + Keyword.put(opts, :max_cases, max_cases) + end + + defp max_cases(opts) do + cond do + opts[:trace] -> 1 + max = opts[:max_cases] -> max + true -> System.schedulers_online * 2 + end end end diff --git a/lib/ex_unit/lib/ex_unit/assertions.ex b/lib/ex_unit/lib/ex_unit/assertions.ex index b79b8e6e077..1de3b3c2c1d 100644 --- a/lib/ex_unit/lib/ex_unit/assertions.ex +++ b/lib/ex_unit/lib/ex_unit/assertions.ex @@ -1,10 +1,15 @@ defmodule ExUnit.AssertionError do + @moduledoc """ + Raised to signal an assertion error. + """ + @no_value :ex_unit_no_meaningful_value - defexception left: @no_value, - right: @no_value, + defexception left: @no_value, + right: @no_value, message: @no_value, - expr: @no_value + expr: @no_value, + binding: @no_value @doc """ Indicates no meaningful value for a field. @@ -12,6 +17,25 @@ defmodule ExUnit.AssertionError do def no_value do @no_value end + + def message(exception) do + "\n\n" <> ExUnit.Formatter.format_assertion_error(exception) + end +end + +defmodule ExUnit.MultiError do + @moduledoc """ + Raised to signal multiple errors happened in a test case. + """ + + defexception [errors: []] + + def message(exception) do + "got the following errors:\n\n" <> + Enum.map_join(exception, "\n\n", fn {kind, error, stack} -> + Exception.format_banner(kind, error, stack) + end) + end end defmodule ExUnit.Assertions do @@ -20,79 +44,138 @@ defmodule ExUnit.Assertions do imported by default into your test cases. In general, a developer will want to use the general - `assert` macro in tests. This macro tries to be smart - and provide good reporting whenever there is a failure. + `assert` macro in tests. This macro introspects your code + and provides good reporting whenever there is a failure. For example, `assert some_fun() == 10` will fail (assuming - `some_fun()` returns 13): + `some_fun()` returns `13`): - Comparison (using ==) failed in: - code: some_fun() == 10 - lhs: 13 - rhs: 10 + Comparison (using ==) failed in: + code: some_fun() == 10 + left: 13 + right: 10 This module also provides other convenience functions - like `assert_in_delta` and `assert_raise` to easily handle other - common cases such as checking a floating point number or handling exceptions. + like `assert_in_delta` and `assert_raise` to easily handle + other common cases such as checking a floating-point number + or handling exceptions. """ @doc """ - Asserts its argument is true. + Asserts its argument is a truthy value. - `assert` tries to be smart and provide good - reporting whenever there is a failure. In particular, if - given a match expression, it will report any failure in terms - of that match. Given + `assert` introspects the underlying expression and provides + good reporting whenever there is a failure. For example, + if the expression uses the comparison operator, the message + will show the values of the two sides. The assertion + + assert 1 + 2 + 3 + 4 > 15 + + will fail with the message: + + Assertion with > failed + code: 1 + 2 + 3 + 4 > 15 + left: 10 + right: 15 + + Similarly, if a match expression is given, it will report + any failure in terms of that match. Given assert [one] = [two] you'll see: match (=) failed - code: [one] = [two] - rhs: [2] + code: [one] = [two] + right: [2] - If the expression is a comparison operator, the message - will show the values of the two sides. The assertion - - assert 1+2+3+4 > 15 + Keep in mind that `assert` does not change its semantics + based on the expression. In other words, the expression + is still required to return a truthy value. For example, + the following will fail: - will fail with the message: + assert nil = some_function_that_returns_nil() - Assertion with > failed - code: 1+2+3+4 > 15 - lhs: 10 - rhs: 15 + Even though the match works, `assert` still expects a truth + value. In such cases, simply use `Kernel.==/2` or + `Kernel.match?/2`. """ defmacro assert({:=, _, [left, right]} = assertion) do - code = Macro.escape(assertion) - {:case, meta, args} = - quote do + code = escape_quoted(:assert, assertion) + + left = Macro.expand(left, __CALLER__) + vars = collect_vars_from_pattern(left) + pins = collect_pins_from_pattern(left, __CALLER__.vars) + rhs_binding = collect_vars_used_in_expression(right, __CALLER__.vars) + + # If the match works, we need to check if the value + # is not nil nor false. We need to rewrite the if + # to avoid silly warnings though. + return = + no_warning(quote do + case right do + x when x in [nil, false] -> + raise ExUnit.AssertionError, + expr: expr, + binding: unquote(rhs_binding), + message: "Expected truthy, got #{inspect right}" + _ -> + :ok + end + end) + + match_expr = + no_warning(quote do case right do unquote(left) -> - right + unquote(return) + unquote(vars) _ -> raise ExUnit.AssertionError, right: right, - expr: unquote(code), - message: "match (=) failed" + expr: expr, + binding: unquote(rhs_binding), + message: "match (=) failed" <> + ExUnit.Assertions.__pins__(unquote(pins)) end - end + end) quote do right = unquote(right) - unquote({:case, [{:export_head, true}|meta], args}) + expr = unquote(code) + unquote(vars) = unquote(match_expr) + right + end + end + + defmacro assert({:match?, meta, [left, right]} = assertion) do + code = escape_quoted(:assert, assertion) + match? = {:match?, meta, [left, Macro.var(:right, __MODULE__)]} + pins = collect_pins_from_pattern(left, __CALLER__.vars) + rhs_binding = collect_vars_used_in_expression(right, __CALLER__.vars) + + quote do + right = unquote(right) + assert unquote(match?), + right: right, + expr: unquote(code), + binding: unquote(rhs_binding), + message: "match (match?) failed" <> + ExUnit.Assertions.__pins__(unquote(pins)) end end defmacro assert(assertion) do - case translate_assertion(assertion) do + case translate_assertion(:assert, assertion, __CALLER__) do nil -> + binding = collect_vars_used_in_expression(assertion, __CALLER__.vars) + quote do value = unquote(assertion) unless value do raise ExUnit.AssertionError, - expr: unquote(Macro.escape(assertion)), + expr: unquote(escape_quoted(:assert, assertion)), + binding: unquote(binding), message: "Expected truthy, got #{inspect value}" end @@ -105,44 +188,55 @@ defmodule ExUnit.Assertions do end @doc """ - This is a negative assertion, failing if its parameter - is truthy. + A negative assertion, expects the expression to be `false` or `nil`. + + Keep in mind that `refute` does not change the semantics of + the given expression. In other words, the following will fail: + + refute {:ok, _} = some_function_that_returns_error_tuple() + + The code above will fail because the `=` operator always fails + when the sides do not match and `refute/2` does not change it. + + The correct way to write the refutation above is to use + `Kernel.match?/2`: + + refute match? {:ok, _}, some_function_that_returns_error_tuple() ## Examples refute age < 0 """ - defmacro refute({:=, _, [left, right]} = assertion) do - code = Macro.escape(assertion) - {:case, meta, args} = - quote do - case right do - unquote(left) -> - raise ExUnit.AssertionError, - right: right, - expr: unquote(code), - message: "match (=) succeeded, but should have failed" - _ -> - right - end - end + defmacro refute({:match?, meta, [left, right]} = assertion) do + code = escape_quoted(:refute, assertion) + match? = {:match?, meta, [left, Macro.var(:right, __MODULE__)]} + pins = collect_pins_from_pattern(left, __CALLER__.vars) + rhs_binding = collect_vars_used_in_expression(right, __CALLER__.vars) quote do right = unquote(right) - unquote({:case, [{:export_head, true}|meta], args}) + refute unquote(match?), + right: right, + expr: unquote(code), + binding: unquote(rhs_binding), + message: "match (match?) succeeded, but should have failed" <> + ExUnit.Assertions.__pins__(unquote(pins)) end end defmacro refute(assertion) do - case translate_assertion({:!, [], [assertion]}) do + case translate_assertion(:refute, assertion, __CALLER__) do nil -> + binding = collect_vars_used_in_expression(assertion, __CALLER__.vars) + quote do value = unquote(assertion) if value do raise ExUnit.AssertionError, - expr: unquote(Macro.escape(assertion)), + expr: unquote(escape_quoted(:refute, assertion)), + binding: unquote(binding), message: "Expected false or nil, got #{inspect value}" end @@ -158,40 +252,89 @@ defmodule ExUnit.Assertions do @operator [:==, :<, :>, :<=, :>=, :===, :=~, :!==, :!=, :in] - defp translate_assertion({operator, _, [left, right]} = expr) when operator in @operator do - expr = Macro.escape(expr) + defp translate_assertion(:assert, {operator, meta, [_, _]} = expr, caller) when operator in @operator do + left = Macro.var(:left, __MODULE__) + right = Macro.var(:right, __MODULE__) + call = {operator, meta, [left, right]} + equality_check? = operator in [:<, :>, :!==, :!=] + message = "Assertion with #{operator} failed" + translate_assertion(:assert, expr, call, message, equality_check?, caller) + end + + defp translate_assertion(:refute, {operator, meta, [_, _]} = expr, caller) when operator in @operator do + left = Macro.var(:left, __MODULE__) + right = Macro.var(:right, __MODULE__) + call = {:not, meta, [{operator, meta, [left, right]}]} + equality_check? = operator in [:<=, :>=, :===, :==, :=~] + message = "Refute with #{operator} failed" + translate_assertion(:refute, expr, call, message, equality_check?, caller) + end + + defp translate_assertion(_kind, _expected, _caller) do + nil + end + + defp translate_assertion(kind, {_, _, [left, right]} = expr, call, message, true, caller) do + expr = escape_quoted(kind, expr) + + # We collect the binding for LHS/RHS separately because we want top-level + # variables to not show up in the binding, but if we pass "expr" we're sure + # there won't be any top-level variables. + binding = Enum.uniq(collect_vars_used_in_expression(left, caller.vars) ++ collect_vars_used_in_expression(right, caller.vars)) + quote do - left = unquote(left) + left = unquote(left) right = unquote(right) - assert unquote(operator)(left, right), - left: left, - right: right, - expr: unquote(expr), - message: unquote("Assertion with #{operator} failed") + if ExUnit.Assertions.__equal__?(left, right) do + assert false, + left: left, + expr: unquote(expr), + binding: unquote(binding), + message: unquote(message <> ", both sides are exactly equal") + else + assert unquote(call), + left: left, + right: right, + expr: unquote(expr), + binding: unquote(binding), + message: unquote(message) + end end end - defp translate_assertion({:!, [], [{operator, _, [left, right]} = expr]}) when operator in @operator do - expr = Macro.escape(expr) + defp translate_assertion(kind, {_, _, [left, right]} = expr, call, message, false, caller) do + expr = escape_quoted(kind, expr) + + # We collect the binding for LHS/RHS separately because we want top-level + # variables to not show up in the binding, but if we pass "expr" we're sure + # there won't be any top-level variables. + binding = Enum.uniq(collect_vars_used_in_expression(left, caller.vars) ++ collect_vars_used_in_expression(right, caller.vars)) + quote do - left = unquote(left) + left = unquote(left) right = unquote(right) - assert not(unquote(operator)(left, right)), - left: left, - right: right, - expr: unquote(expr), - message: unquote("Refute with #{operator} failed") + assert unquote(call), + left: left, + right: right, + expr: unquote(expr), + binding: unquote(binding), + message: unquote(message) end end - defp translate_assertion(_expected) do - nil + @doc false + def __equal__?(left, right) do + left === right + end + + defp escape_quoted(kind, expr) do + Macro.escape({kind, [], [expr]}) end ## END HELPERS @doc """ - Asserts `value` is true, displaying the given `message` otherwise. + Asserts `value` is `true`, displaying the given `message` otherwise. ## Examples @@ -208,27 +351,14 @@ defmodule ExUnit.Assertions do end @doc """ - Asserts `value` is true. - If it fails, it raises an expectation error - using the given `left` and `right` values. - - You probably don't need to use this—the regular `assert` function - handles this for you. - - ## Examples - - assert this > that, this, that, "more than" - - """ - def assert(value, left, right, message) when is_binary(message) do - assert(value, left: left, right: right, message: message) - end + Asserts that a message matching `pattern` was or is going to be received + within the `timeout` period, specified in milliseconds. - @doc """ - Asserts a message was or is going to be received. Unlike - `assert_received`, it has a default timeout of 100 milliseconds. + Unlike `assert_received`, it has a default `timeout` + of 100 milliseconds. - The `expected` argument is a pattern. + The `pattern` argument must be a match pattern. Flunks with `failure_message` + if a message matching `pattern` is not received. ## Examples @@ -246,69 +376,233 @@ defmodule ExUnit.Assertions do assert_receive {:count, ^x} """ - defmacro assert_receive(expected, timeout \\ 100, message \\ nil) do - do_assert_receive(expected, timeout, message) + defmacro assert_receive(pattern, + timeout \\ Application.fetch_env!(:ex_unit, :assert_receive_timeout), + failure_message \\ nil) do + assert_receive(pattern, timeout, failure_message, __CALLER__) end @doc """ - Asserts a message was received and is in the current process' mailbox. - Timeout is set to 0, so there is no waiting time. + Asserts that a message matching `pattern` was received and is in the + current process' mailbox. - The `expected` argument is a pattern. + The `pattern` argument must be a match pattern. Flunks with `failure_message` + if a message matching `pattern` was not received. + + Timeout is set to 0, so there is no waiting time. ## Examples - send self, :hello + send self(), :hello assert_received :hello + send self(), :bye + assert_received :hello, "Oh No!" + ** (ExUnit.AssertionError) Oh No! + Process mailbox: + :bye + You can also match against specific patterns: - send self, {:hello, "world"} + send self(), {:hello, "world"} assert_received {:hello, _} """ - defmacro assert_received(expected, message \\ nil) do - do_assert_receive(expected, 0, message) + defmacro assert_received(pattern, failure_message \\ nil) do + assert_receive(pattern, 0, failure_message, __CALLER__) end - defp do_assert_receive(expected, timeout, message) do - binary = Macro.to_string(expected) - message = message || "No message matching #{binary}" + defp assert_receive(pattern, timeout, failure_message, caller) do + binary = Macro.to_string(pattern) + + # Expand before extracting metadata + pattern = Macro.expand(pattern, caller) + vars = collect_vars_from_pattern(pattern) + pins = collect_pins_from_pattern(pattern, caller.vars) + + pattern = + case pattern do + {:when, meta, [left, right]} -> + {:when, meta, [quote(do: unquote(left) = received), right]} + left -> + quote(do: unquote(left) = received) + end + + quote do + timeout = unquote(timeout) - {:receive, meta, args} = - quote do + {received, unquote(vars)} = receive do - unquote(expected) = received -> received + unquote(pattern) -> + {received, unquote(vars)} after - unquote(timeout) -> - flunk unquote(message) + timeout -> + {:messages, messages} = Process.info(self(), :messages) + + pattern_finder = fn message -> + case message do + unquote(pattern) -> + _ = unquote(vars) + true + _ -> + false + end + end + + if Enum.any?(messages, pattern_finder) do + flunk(unquote(failure_message) || """ + Found message matching #{unquote(binary)} after #{timeout}ms. + + This means the message was delivered too close to the timeout value, you may want to either: + + 1. Give an increased timeout to `assert_receive/2` + 2. Increase the default timeout to all `assert_receive` in your + test_helper.exs by setting ExUnit.configure(assert_receive_timeout: ...) + """) + else + failure_message = unquote(failure_message) || "No message matching #{unquote(binary)} after #{timeout}ms." + flunk(failure_message <> + ExUnit.Assertions.__pins__(unquote(pins)) <> + ExUnit.Assertions.__mailbox__(messages)) + end end - end - {:receive, [{:export_head, true}|meta], args} + received + end + end + + @indent "\n " + @max_mailbox_length 10 + + @doc false + def __mailbox__(messages) do + length = length(messages) + mailbox = + messages + |> Enum.take(@max_mailbox_length) + |> Enum.map_join(@indent, &inspect/1) + mailbox_message(length, @indent <> mailbox) + end + + @doc false + def __pins__([]), do: "" + def __pins__(pins) do + content = + pins + |> Enum.reverse() + |> Enum.map_join(@indent, fn {name, var} -> "#{name} = #{inspect(var)}" end) + "\nThe following variables were pinned:" <> @indent <> content + end + + defp mailbox_message(0, _mailbox), do: "\nThe process mailbox is empty." + defp mailbox_message(length, mailbox) when length > 10 do + "\nProcess mailbox:" <> mailbox <> + "\nShowing only #{@max_mailbox_length} of #{length} messages." + end + defp mailbox_message(_length, mailbox) do + "\nProcess mailbox:" <> mailbox + end + + defp collect_pins_from_pattern(expr, vars) do + {_, pins} = + Macro.prewalk(expr, [], fn + {:^, _, [{name, _, nil} = var]}, acc -> + if {name, nil} in vars do + {:ok, [{name, var} | acc]} + else + {:ok, acc} + end + form, acc -> + {form, acc} + end) + Enum.uniq_by(pins, &elem(&1, 0)) + end + + defp collect_vars_from_pattern({:when, _, [left, right]}) do + pattern = collect_vars_from_pattern(left) + for {name, _, context} = var <- collect_vars_from_pattern(right), + Enum.any?(pattern, &match?({^name, _, ^context}, &1)), + into: pattern, + do: var + end + + defp collect_vars_from_pattern(expr) do + Macro.prewalk(expr, [], fn + {:::, _, [left, _]}, acc -> + {[left], acc} + {skip, _, [_]}, acc when skip in [:^, :@] -> + {:ok, acc} + {:_, _, context}, acc when is_atom(context) -> + {:ok, acc} + {name, meta, context}, acc when is_atom(name) and is_atom(context) -> + {:ok, [{name, [generated: true] ++ meta, context} | acc]} + node, acc -> + {node, acc} + end) + |> elem(1) + end + + defp collect_vars_used_in_expression({name, _meta, context} = _var, _existing_vars) + when is_atom(name) and is_atom(context) do + [] + end + + defp collect_vars_used_in_expression(expr, existing_vars) do + {_ast, vars} = + Macro.prewalk(expr, [], fn + {name, _meta, nil} = var, acc when is_atom(name) -> + if {name, nil} in existing_vars do + {:ok, [{name, var} | acc]} + else + {:ok, acc} + end + other, acc -> + {other, acc} + end) + + vars + |> Enum.reverse() + |> Enum.uniq_by(&elem(&1, 0)) + end + + defp no_warning({name, meta, [expr, [do: clauses]]}) do + clauses = Enum.map clauses, fn {:->, meta, args} -> + {:->, [generated: true] ++ meta, args} + end + {name, meta, [expr, [do: clauses]]} end @doc """ Asserts the `exception` is raised during `function` execution with - the `expected_message`. Returns the rescued exception, fails otherwise. + the expected `message`, which can be a `Regex` or an exact `String`. + Returns the rescued exception, fails otherwise. ## Examples assert_raise ArithmeticError, "bad argument in arithmetic expression", fn -> 1 + "test" end + + assert_raise RuntimeError, ~r/^today's lucky number is 0\.\d+!$/, fn -> + raise "today's lucky number is #{:rand.uniform}!" + end """ def assert_raise(exception, message, function) when is_function(function) do error = assert_raise(exception, function) - is_match = cond do + match? = cond do is_binary(message) -> Exception.message(error) == message Regex.regex?(message) -> Exception.message(error) =~ message end - msg = "Wrong message for #{inspect exception}. " <> - "Expected #{inspect message}, got #{inspect Exception.message(error)}" - assert is_match, message: msg + message = + "Wrong message for #{inspect exception}\n" <> + "expected:\n" <> + " #{inspect message}\n" <> + "actual:\n" <> + " #{inspect Exception.message(error)}" + + assert match?, message: message error end @@ -338,7 +632,7 @@ defmodule ExUnit.Assertions do name == ExUnit.AssertionError -> reraise(error, stacktrace) true -> - flunk "Expected exception #{inspect exception} but got #{inspect name} (#{Exception.message(error)})" + reraise ExUnit.AssertionError, [message: "Expected exception #{inspect exception} but got #{inspect name} (#{Exception.message(error)})"], stacktrace end else _ -> flunk "Expected exception #{inspect exception} but nothing was raised" @@ -346,7 +640,7 @@ defmodule ExUnit.Assertions do end @doc """ - Asserts that `val1` and `val2` differ by no more than `delta`. + Asserts that `value1` and `value2` differ by no more than `delta`. ## Examples @@ -355,11 +649,11 @@ defmodule ExUnit.Assertions do assert_in_delta 10, 15, 4 """ - def assert_in_delta(val1, val2, delta, message \\ nil) do - diff = abs(val1 - val2) + def assert_in_delta(value1, value2, delta, message \\ nil) do + diff = abs(value1 - value2) message = message || - "Expected the difference between #{inspect val1} and " <> - "#{inspect val2} (#{inspect diff}) to be less than #{inspect delta}" + "Expected the difference between #{inspect value1} and " <> + "#{inspect value2} (#{inspect diff}) to be less than #{inspect delta}" assert diff < delta, message end @@ -405,7 +699,7 @@ defmodule ExUnit.Assertions do defp do_catch(kind, expr) do quote do try do - unquote(expr) + _ = unquote(expr) flunk "Expected to catch #{unquote(kind)}, got nothing" rescue e in [ExUnit.AssertionError] -> @@ -429,44 +723,54 @@ defmodule ExUnit.Assertions do end @doc """ - refute_receive message, timeout \\ 100, message \\ nil + Asserts that a message matching `pattern` was not received (and won't be received) + within the `timeout` period, specified in milliseconds. - Asserts `message` was not received (and won't be received) within - the `timeout` period. - - The `not_expected` argument is a match pattern. + The `pattern` argument must be a match pattern. Flunks with `failure_message` + if a message matching `pattern` is received. ## Examples refute_receive :bye - Refute received with a explicit timeout: + Refute received with an explicit timeout: refute_receive :bye, 1000 """ - defmacro refute_receive(not_expected, timeout \\ 100, message \\ nil) do - do_refute_receive(not_expected, timeout, message) + defmacro refute_receive(pattern, + timeout \\ Application.fetch_env!(:ex_unit, :refute_receive_timeout), + failure_message \\ nil) do + do_refute_receive(pattern, timeout, failure_message) end @doc """ - Asserts a message was not received (i.e. it is not in the current process mailbox). - The `not_expected` argument must be a match pattern. + Asserts a message matching `pattern` was not received (i.e. it is not in the + current process' mailbox). + + The `pattern` argument must be a match pattern. Flunks with `failure_message` + if a message matching `pattern` was received. Timeout is set to 0, so there is no waiting time. ## Examples - send self, :hello + send self(), :hello refute_received :bye + send self(), :hello + refute_received :hello, "Oh No!" + ** (ExUnit.AssertionError) Oh No! + Process mailbox: + :bye + """ - defmacro refute_received(not_expected, message \\ nil) do - do_refute_receive(not_expected, 0, message) + defmacro refute_received(pattern, failure_message \\ nil) do + do_refute_receive(pattern, 0, failure_message) end - defp do_refute_receive(not_expected, timeout, message) do - receive_clause = refute_receive_clause(not_expected, message) + defp do_refute_receive(pattern, timeout, failure_message) do + receive_clause = refute_receive_clause(pattern, failure_message) quote do receive do @@ -477,22 +781,22 @@ defmodule ExUnit.Assertions do end end - defp refute_receive_clause(not_expected, nil) do - binary = Macro.to_string(not_expected) + defp refute_receive_clause(pattern, nil) do + binary = Macro.to_string(pattern) quote do - unquote(not_expected) = actual -> + unquote(pattern) = actual -> flunk "Unexpectedly received message #{inspect actual} (which matched #{unquote binary})" end end - defp refute_receive_clause(not_expected, message) do + defp refute_receive_clause(pattern, failure_message) do quote do - unquote(not_expected) -> flunk unquote(message) + unquote(pattern) -> flunk unquote(failure_message) end end @doc """ - Asserts `val1` and `val2` are not within `delta`. + Asserts `value1` and `value2` are not within `delta`. If you supply `message`, information about the values will automatically be appended to it. @@ -503,14 +807,14 @@ defmodule ExUnit.Assertions do refute_in_delta 10, 11, 2 """ - def refute_in_delta(val1, val2, delta, message \\ nil) do - diff = abs(val1 - val2) + def refute_in_delta(value1, value2, delta, message \\ nil) do + diff = abs(value1 - value2) message = if message do - message <> " (difference between #{inspect val1} " <> - "and #{inspect val2} is less than #{inspect delta})" + message <> " (difference between #{inspect value1} " <> + "and #{inspect value2} is less than #{inspect delta})" else - "Expected the difference between #{inspect val1} and " <> - "#{inspect val2} (#{inspect diff}) to be more than #{inspect delta}" + "Expected the difference between #{inspect value1} and " <> + "#{inspect value2} (#{inspect diff}) to be more than #{inspect delta}" end refute diff < delta, message end @@ -525,7 +829,7 @@ defmodule ExUnit.Assertions do """ @spec flunk :: no_return @spec flunk(String.t) :: no_return - def flunk(message \\ "Flunked!") do + def flunk(message \\ "Flunked!") when is_binary(message) do assert false, message: message end end diff --git a/lib/ex_unit/lib/ex_unit/callbacks.ex b/lib/ex_unit/lib/ex_unit/callbacks.ex index f05b0c32bee..169831e3d82 100644 --- a/lib/ex_unit/lib/ex_unit/callbacks.ex +++ b/lib/ex_unit/lib/ex_unit/callbacks.ex @@ -1,43 +1,45 @@ defmodule ExUnit.Callbacks do @moduledoc ~S""" - Defines ExUnit Callbacks. + Defines ExUnit callbacks. This module defines both `setup_all` and `setup` callbacks, as well as - the `on_exit` facility. + the `on_exit/2` function. The setup callbacks are defined via macros and each one can optionally receive a map with metadata, usually referred to as `context`. The - callback may optionally put extra data into `context` to be used in + callback may optionally put extra data into the `context` to be used in the tests. - The `setup_all` callbacks are invoked once before the first test's `setup` - and all `setup` callbacks are run before each test. No callback runs if the - test case has no tests or all tests were filtered out. + The `setup_all` callbacks are invoked only once to setup the test case before any + test is run and all `setup` callbacks are run before each test. No callback + runs if the test case has no tests or all tests have been filtered out. - `on_exit` callbacks are registered on demand, usually to undo an action - performed by a setup callback. `on_exit` may also take a reference, - allowing callback to be overridden in the future. A registered `on_exit` + `on_exit/2` callbacks are registered on demand, usually to undo an action + performed by a setup callback. `on_exit/2` may also take a reference, + allowing callback to be overridden in the future. A registered `on_exit/2` callback always runs, while failures in `setup` and `setup_all` will stop all remaining setup callbacks from executing. Finally, `setup_all` callbacks run in the test case process, while all - `setup` callbacks run in the same process as the test itself. `on_exit` + `setup` callbacks run in the same process as the test itself. `on_exit/2` callbacks always run in a separate process than the test case or the test itself. Since the test process exits with reason `:shutdown`, most - of times `on_exit/1` can be avoided as processes are going to clean + of times `on_exit/2` can be avoided as processes are going to clean up on their own. ## Context - If you return `{:ok, }` from `setup_all`, the dictionary - will be merged into the current context and be available in all - subsequent `setup_all`, `setup` and the test itself. + If you return a keyword list, a map, or `{:ok, keywords | map}` from + `setup_all`, the keyword list/map will be merged into the current context and + be available in all subsequent `setup_all`, `setup`, and the `test` itself. - Similarly, returning `{:ok, }` from `setup`, the dict returned - will be merged into the current context and be available in all - subsequent `setup` and the `test` itself. + Similarly, returning a keyword list, map, or `{:ok, keywords | map}` from + `setup` means that the returned keyword list/map will be merged into the + current context and be available in all subsequent `setup` and the `test` + itself. - Returning `:ok` leaves the context unchanged in both cases. + Returning `:ok` leaves the context unchanged (both in `setup` and `setup_all` + callbacks). Returning anything else from `setup_all` will force all tests to fail, while a bad response from `setup` causes the current test to fail. @@ -47,15 +49,15 @@ defmodule ExUnit.Callbacks do defmodule AssertionTest do use ExUnit.Case, async: true - # `setup_all` is called once before every test + # "setup_all" is called once to setup the case before any test is run setup_all do IO.puts "Starting AssertionTest" - # No metadata + # No context is returned here :ok end - # `setup` is called before each test is run + # "setup" is called before each test is run setup do IO.puts "This is a setup callback" @@ -63,17 +65,20 @@ defmodule ExUnit.Callbacks do IO.puts "This is invoked once the test is done" end - # Returns extra metadata, it must be a dict - {:ok, hello: "world"} + # Returns extra metadata to be merged into context + [hello: "world"] end - # Same as `setup`, but receives the context + # Same as "setup", but receives the context # for the current test setup context do IO.puts "Setting up: #{context[:test]}" :ok end + # Setups can also invoke a local or imported function that can return a context + setup :invoke_local_or_imported_function + test "always pass" do assert true end @@ -81,6 +86,10 @@ defmodule ExUnit.Callbacks do test "another one", context do assert context[:hello] == "world" end + + defp invoke_local_or_imported_function(context) do + [from_named_setup: true] + end end """ @@ -88,6 +97,7 @@ defmodule ExUnit.Callbacks do @doc false defmacro __using__(_) do quote do + @ex_unit_describe nil @ex_unit_setup [] @ex_unit_setup_all [] @@ -104,60 +114,172 @@ defmodule ExUnit.Callbacks do @doc """ Defines a callback to be run before each test in a case. + + ## Examples + + setup :clean_up_tmp_directory + """ - defmacro setup(var \\ quote(do: _), block) do + defmacro setup(block) do + if Keyword.keyword?(block) do + do_setup(quote(do: _), block) + else + quote do + @ex_unit_setup ExUnit.Callbacks.__callback__(unquote(block), @ex_unit_describe) ++ + @ex_unit_setup + end + end + end + + @doc """ + Defines a callback to be run before each test in a case. + + ## Examples + + setup context do + [conn: Plug.Conn.build_conn()] + end + + """ + defmacro setup(var, block) do + do_setup(var, block) + end + + defp do_setup(var, block) do quote bind_quoted: [var: escape(var), block: escape(block)] do name = :"__ex_unit_setup_#{length(@ex_unit_setup)}" defp unquote(name)(unquote(var)), unquote(block) - @ex_unit_setup [name|@ex_unit_setup] + @ex_unit_setup [{name, @ex_unit_describe} | @ex_unit_setup] + end + end + + @doc """ + Defines a callback to be run before all tests in a case. + + ## Examples + + setup_all :clean_up_tmp_directory + + """ + defmacro setup_all(block) do + if Keyword.keyword?(block) do + do_setup_all(quote(do: _), block) + else + quote do + @ex_unit_describe && raise "cannot invoke setup_all/1 inside describe as setup_all/1 " <> + "always applies to all tests in a module" + @ex_unit_setup_all ExUnit.Callbacks.__callback__(unquote(block), nil) ++ + @ex_unit_setup_all + end end end @doc """ Defines a callback to be run before all tests in a case. + + ## Examples + + setup_all context do + [conn: Plug.Conn.build_conn()] + end + """ - defmacro setup_all(var \\ quote(do: _), block) do + defmacro setup_all(var, block) do + do_setup_all(var, block) + end + + defp do_setup_all(var, block) do quote bind_quoted: [var: escape(var), block: escape(block)] do + @ex_unit_describe && raise "cannot invoke setup_all/2 inside describe" name = :"__ex_unit_setup_all_#{length(@ex_unit_setup_all)}" defp unquote(name)(unquote(var)), unquote(block) - @ex_unit_setup_all [name|@ex_unit_setup_all] + @ex_unit_setup_all [{name, nil} | @ex_unit_setup_all] end end @doc """ Defines a callback that runs on the test (or test case) exit. - An `on_exit` callback is a function that receives no arguments and + `callback` is a function that receives no arguments and runs in a separate process than the caller. `on_exit/2` is usually called from `setup` and `setup_all` callbacks, - often to undo the action performed during `setup`. However, `on_exit` + often to undo the action performed during `setup`. However, `on_exit/2` may also be called dynamically, where a reference can be used to guarantee the callback will be invoked only once. """ - @spec on_exit(term, (() -> term)) :: :ok - def on_exit(ref \\ make_ref, callback) do - case ExUnit.OnExitHandler.add(self, ref, callback) do + @spec on_exit(term, (() -> term)) :: :ok | no_return + def on_exit(name_or_ref \\ make_ref(), callback) when is_function(callback, 0) do + case ExUnit.OnExitHandler.add(self(), name_or_ref, callback) do :ok -> :ok :error -> - raise ArgumentError, "on_exit/1 callback can only be invoked from the test process" + raise ArgumentError, "on_exit/2 callback can only be invoked from the test process" end end ## Helpers + @reserved [:case, :file, :line, :test, :async, :registered, :describe] + @doc false - def __merge__(_mod, other, :ok) do - {:ok, other} + def __callback__(callback, describe) do + for k <- List.wrap(callback) do + if not is_atom(k) do + raise ArgumentError, "setup/setup_all expect a callback name as an atom or " <> + "a list of callback names, got: #{inspect k}" + end + + {k, describe} + end |> Enum.reverse() + end + + @doc false + def __merge__(mod, context, value) do + merge(mod, context, value, value) + end + + @doc false + defp merge(_mod, context, :ok, _original_value) do + context + end + + defp merge(mod, context, {:ok, value}, original_value) do + merge(mod, context, value, original_value) end - def __merge__(_mod, other, {:ok, data}) do - {:ok, Dict.merge(other, data)} + defp merge(mod, _context, %{__struct__: _}, original_value) do + raise_merge_failed!(mod, original_value) end - def __merge__(mod, _, failure) do - raise "expected ExUnit callback in #{inspect mod} to return :ok " <> - " or {:ok, dict}, got #{inspect failure} instead" + defp merge(mod, context, data, original_value) when is_list(data) do + merge(mod, context, Map.new(data), original_value) + end + + defp merge(mod, context, data, _original_value) when is_map(data) do + context_merge(mod, context, data) + end + + defp merge(mod, _, _return_value, original_value) do + raise_merge_failed!(mod, original_value) + end + + defp context_merge(mod, context, data) do + Map.merge(context, data, fn + k, v1, v2 when k in @reserved -> + if v1 == v2, do: v1, else: raise_merge_reserved!(mod, k, v2) + _, _, v -> + v + end) + end + + defp raise_merge_failed!(mod, return_value) do + raise "expected ExUnit callback in #{inspect mod} to return :ok | keyword | map, " <> + "got #{inspect return_value} instead" + end + + defp raise_merge_reserved!(mod, key, value) do + raise "ExUnit callback in #{inspect mod} is trying to set " <> + "reserved field #{inspect key} to #{inspect value}" end defp escape(contents) do @@ -170,28 +292,37 @@ defmodule ExUnit.Callbacks do acc = case callbacks do [] -> - quote do: {:ok, context} - [h|t] -> - Enum.reduce t, compile_merge(h), fn(callback, acc) -> + quote do: context + [h | t] -> + Enum.reduce t, compile_merge(h), fn callback_describe, acc -> quote do - case unquote(acc) do - {:ok, context} -> - unquote(compile_merge(callback)) - other -> - other - end + context = unquote(acc) + unquote(compile_merge(callback_describe)) end end end quote do - def __ex_unit__(unquote(kind), context), do: unquote(acc) + def __ex_unit__(unquote(kind), context) do + describe = Map.get(context, :describe, nil) + unquote(acc) + end end end - defp compile_merge(callback) do + defp compile_merge({callback, nil}) do quote do unquote(__MODULE__).__merge__(__MODULE__, context, unquote(callback)(context)) end end + + defp compile_merge({callback, describe}) do + quote do + if unquote(describe) == describe do + unquote(compile_merge({callback, nil})) + else + context + end + end + end end diff --git a/lib/ex_unit/lib/ex_unit/capture_io.ex b/lib/ex_unit/lib/ex_unit/capture_io.ex index c0767d786a2..020ef3ecb63 100644 --- a/lib/ex_unit/lib/ex_unit/capture_io.ex +++ b/lib/ex_unit/lib/ex_unit/capture_io.ex @@ -9,11 +9,19 @@ defmodule ExUnit.CaptureIO do import ExUnit.CaptureIO - test :example do + test "example" do assert capture_io(fn -> IO.puts "a" end) == "a\n" end + + test "checking the return value and the IO output" do + fun = fn -> + assert Enum.each(["some", "example"], &(IO.puts &1)) == :ok + end + assert capture_io(fun) == "some\nexample\n" + # tip: or use only: "capture_io(fun)" to silence the IO output (so only assert the return value) + end end """ @@ -35,15 +43,14 @@ defmodule ExUnit.CaptureIO do prompts (specified as arguments to `IO.get*` functions) are not captured. - A developer can set a string as an input. The default - input is `:eof`. + A developer can set a string as an input. The default input is `:eof`. ## Examples - iex> capture_io(fn -> IO.write "josé" end) == "josé" + iex> capture_io(fn -> IO.write "john" end) == "john" true - iex> capture_io(:stderr, fn -> IO.write(:stderr, "josé") end) == "josé" + iex> capture_io(:stderr, fn -> IO.write(:stderr, "john") end) == "john" true iex> capture_io("this is input", fn -> @@ -58,6 +65,20 @@ defmodule ExUnit.CaptureIO do ...> end) == "this is input" true + ## Returning values + + As seen in the examples above, `capture_io` returns the captured output. + If you want to also capture the result of the function executed inside + the `capture_io`, you can use `Kernel.send/2` to send yourself a message + and use `ExUnit.Assertions.assert_received/2` to match on the results: + + capture_io([input: "this is input", capture_prompt: false], fn -> + send self(), {:block_result, 42} + # ... + end) + + assert_received {:block_result, 42} + """ def capture_io(fun) do do_capture_io(:standard_io, [], fun) @@ -91,44 +112,48 @@ defmodule ExUnit.CaptureIO do prompt_config = Keyword.get(options, :capture_prompt, true) input = Keyword.get(options, :input, "") - original_gl = :erlang.group_leader + original_gl = Process.group_leader() {:ok, capture_gl} = StringIO.open(input, capture_prompt: prompt_config) - :erlang.group_leader(capture_gl, self) - try do - fun.() - StringIO.close(capture_gl) |> elem(1) |> elem(1) + Process.group_leader(self(), capture_gl) + do_capture_io(capture_gl, fun) after - :erlang.group_leader(original_gl, self) + Process.group_leader(self(), original_gl) end end defp do_capture_io(device, options, fun) do - unless original_io = Process.whereis(device) do - raise "could not find IO device registered at #{inspect device}" - end - - unless ExUnit.Server.add_device(device) do - raise "IO device registered at #{inspect device} is already captured" - end - input = Keyword.get(options, :input, "") + {:ok, string_io} = StringIO.open(input) + case ExUnit.CaptureServer.device_capture_on(device, string_io) do + {:ok, ref} -> + try do + do_capture_io(string_io, fun) + after + ExUnit.CaptureServer.device_capture_off(ref) + end + {:error, :no_device} -> + _ = StringIO.close(string_io) + raise "could not find IO device registered at #{inspect device}" + {:error, :already_captured} -> + _ = StringIO.close(string_io) + raise "IO device registered at #{inspect device} is already captured" + end + end - Process.unregister(device) - {:ok, capture_io} = StringIO.open(input) - Process.register(capture_io, device) - + defp do_capture_io(string_io, fun) do try do - fun.() - StringIO.close(capture_io) |> elem(1) |> elem(1) - after - try do - Process.unregister(device) - rescue - ArgumentError -> nil - end - Process.register(original_io, device) - ExUnit.Server.remove_device(device) + _ = fun.() + :ok + catch + kind, reason -> + stack = System.stacktrace() + _ = StringIO.close(string_io) + :erlang.raise(kind, reason, stack) + else + :ok -> + {:ok, output} = StringIO.close(string_io) + elem(output, 1) end end end diff --git a/lib/ex_unit/lib/ex_unit/capture_log.ex b/lib/ex_unit/lib/ex_unit/capture_log.ex new file mode 100644 index 00000000000..dec9322d235 --- /dev/null +++ b/lib/ex_unit/lib/ex_unit/capture_log.ex @@ -0,0 +1,132 @@ +defmodule ExUnit.CaptureLog do + @moduledoc ~S""" + Functionality to capture logs for testing. + + ## Examples + + defmodule AssertionTest do + use ExUnit.Case + + import ExUnit.CaptureLog + require Logger + + test "example" do + assert capture_log(fn -> + Logger.error "log msg" + end) =~ "log msg" + end + + test "check multiple captures concurrently" do + fun = fn -> + for msg <- ["hello", "hi"] do + assert capture_log(fn -> Logger.error msg end) =~ msg + end + Logger.debug "testing" + end + assert capture_log(fun) =~ "hello" + assert capture_log(fun) =~ "testing" + end + end + + """ + + alias Logger.Backends.Console + + @doc """ + Captures Logger messages generated when evaluating `fun`. + + Returns the binary which is the captured output. + + This function mutes the `:console` backend and captures any log + messages sent to Logger from the calling processes. It is possible + to ensure explicit log messages from other processes are captured + by waiting for their exit or monitor signal. + + However, `capture_log` does not guarantee to capture log messages + originated from processes spawned using a low level `Kernel` spawn + function (e.g. `Kernel.spawn/1`) and such processes exit with an + exception or a throw. Therefore, prefer using a `Task`, or other OTP + process, will send explicit logs before its exit or monitor signals + and will not cause VM generated log messages. + + Note that when the `async` is set to `true`, the messages from another + test might be captured. This is OK as long you consider such cases in + your assertions. + + It is possible to configure the level to capture with `:level`, + which will set the capturing level for the duration of the + capture, for instance, if the log level is set to :error + any message with the lower level will be ignored. + The default level is `nil`, which will capture all messages. + The behaviour is undetermined if async tests change Logger level. + + The format, metadata and colors can be configured with `:format`, + `:metadata` and `:colors` respectively. These three options + defaults to the `:console` backend configuration parameters. + """ + @spec capture_log(Keyword.t, (() -> any)) :: String.t + def capture_log(opts \\ [], fun) do + opts = Keyword.put_new(opts, :level, nil) + {:ok, string_io} = StringIO.open("") + + try do + _ = :gen_event.which_handlers(:error_logger) + :ok = add_capture(string_io, opts) + ref = ExUnit.CaptureServer.log_capture_on(self()) + + try do + fun.() + after + :ok = Logger.flush() + :ok = ExUnit.CaptureServer.log_capture_off(ref) + :ok = remove_capture(string_io) + end + + :ok + catch + kind, reason -> + stack = System.stacktrace() + _ = StringIO.close(string_io) + :erlang.raise(kind, reason, stack) + else + :ok -> + {:ok, content} = StringIO.close(string_io) + elem(content, 1) + end + end + + defp add_capture(pid, opts) do + case :proc_lib.start(__MODULE__, :init_proxy, [pid, opts, self()]) do + :ok -> + :ok + other -> + mfa = {ExUnit.CaptureLog, :add_capture, [pid, opts]} + exit({other, mfa}) + end + end + + @doc false + def init_proxy(pid, opts, parent) do + case :gen_event.add_sup_handler(Logger, {Console, pid}, {Console, [device: pid] ++ opts}) do + :ok -> + ref = Process.monitor(parent) + :proc_lib.init_ack(:ok) + receive do + {:DOWN, ^ref, :process, ^parent, _reason} -> :ok + {:gen_event_EXIT, {Console, ^pid}, _reason} -> :ok + end + other -> + :proc_lib.init_ack(other) + end + end + + defp remove_capture(pid) do + case :gen_event.delete_handler(Logger, {Console, pid}, :ok) do + :ok -> + :ok + {:error, :module_not_found} = error -> + mfa = {ExUnit.CaptureLog, :remove_capture, [pid]} + exit({error, mfa}) + end + end +end diff --git a/lib/ex_unit/lib/ex_unit/capture_server.ex b/lib/ex_unit/lib/ex_unit/capture_server.ex new file mode 100644 index 00000000000..ff369574d34 --- /dev/null +++ b/lib/ex_unit/lib/ex_unit/capture_server.ex @@ -0,0 +1,119 @@ +defmodule ExUnit.CaptureServer do + @moduledoc false + @timeout 30_000 + + use GenServer + + def start_link() do + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) + end + + def device_capture_on(device, pid) do + GenServer.call(__MODULE__, {:device_capture_on, device, pid}, @timeout) + end + + def device_capture_off(ref) do + GenServer.call(__MODULE__, {:device_capture_off, ref}, @timeout) + end + + def log_capture_on(pid) do + GenServer.call(__MODULE__, {:log_capture_on, pid}, @timeout) + end + + def log_capture_off(ref) do + GenServer.call(__MODULE__, {:log_capture_off, ref}, @timeout) + end + + ## Callbacks + + def init(:ok) do + {:ok, %{ + devices: {%{}, %{}}, + log_captures: %{}, + log_status: nil + }} + end + + def handle_call({:device_capture_on, name, pid}, _from, config) do + {names, refs} = config.devices + if Map.has_key?(names, name) do + {:reply, {:error, :already_captured}, config} + else + orig_pid = Process.whereis(name) + Process.unregister(name) + Process.register(pid, name) + ref = Process.monitor(pid) + refs = Map.put(refs, ref, {name, orig_pid}) + names = Map.put(names, name, true) + {:reply, {:ok, ref}, %{config | devices: {names, refs}}} + end + end + + def handle_call({:device_capture_off, ref}, _from, config) do + config = release_device(ref, config) + {:reply, :ok, config} + end + + def handle_call({:log_capture_on, pid}, _from, config) do + ref = Process.monitor(pid) + refs = Map.put(config.log_captures, ref, true) + + if map_size(refs) == 1 do + status = Logger.remove_backend(:console) + {:reply, ref, %{config | log_captures: refs, log_status: status}} + else + {:reply, ref, %{config | log_captures: refs}} + end + end + + def handle_call({:log_capture_off, ref}, _from, config) do + Process.demonitor(ref, [:flush]) + config = remove_log_capture(ref, config) + {:reply, :ok, config} + end + + def handle_info({:DOWN, ref, _, _, _}, config) do + config = remove_log_capture(ref, config) + config = release_device(ref, config) + {:noreply, config} + end + + def handle_info(msg, state) do + super(msg, state) + end + + defp release_device(ref, %{devices: {names, refs}} = config) do + case Map.pop(refs, ref) do + {{name, pid}, refs} -> + names = Map.delete(names, name) + Process.demonitor(ref, [:flush]) + try do + try do + Process.unregister(name) + after + Process.register(pid, name) + end + rescue + ArgumentError -> nil + end + %{config | devices: {names, refs}} + {nil, _refs} -> config + end + end + + defp remove_log_capture(ref, %{log_captures: refs} = config) do + if Map.has_key?(refs, ref) do + refs = Map.delete(refs, ref) + maybe_add_console(refs, config.log_status) + %{config | log_captures: refs} + else + config + end + end + + defp maybe_add_console(refs, status) do + if status == :ok and map_size(refs) == 0 do + Logger.add_backend(:console, flush: true) + end + end +end diff --git a/lib/ex_unit/lib/ex_unit/case.ex b/lib/ex_unit/lib/ex_unit/case.ex index 0c701fa5cbb..0279797421f 100644 --- a/lib/ex_unit/lib/ex_unit/case.ex +++ b/lib/ex_unit/lib/ex_unit/case.ex @@ -1,3 +1,7 @@ +defmodule ExUnit.DuplicateTestError do + defexception [:message] +end + defmodule ExUnit.Case do @moduledoc """ Sets up an ExUnit test case. @@ -7,9 +11,9 @@ defmodule ExUnit.Case do When used, it accepts the following options: - * :async - configure Elixir to run that specific test case in parallel with - others. Must be used for performance when your test cases do not change - any global state. + * `:async` - configure this specific test case to run in parallel + with other test cases. May be used for performance when this test case + does not change any global state. Defaults to `false`. This module automatically includes all callbacks defined in `ExUnit.Callbacks`. See that module's documentation for more @@ -21,7 +25,7 @@ defmodule ExUnit.Case do # Use the module use ExUnit.Case, async: true - # The `test` macro is imported by ExUnit.Case + # The "test" macro is imported by ExUnit.Case test "always pass" do assert true end @@ -40,7 +44,7 @@ defmodule ExUnit.Case do {:ok, [pid: pid]} end - test "stores key-values", context do + test "stores key-value pairs", context do assert KV.put(context[:pid], :hello, :world) == :ok assert KV.get(context[:pid], :hello) == :world end @@ -49,7 +53,7 @@ defmodule ExUnit.Case do As the context is a map, it can be pattern matched on to extract information: - test "stores key-values", %{pid: pid} do + test "stores key-value pairs", %{pid: pid} do assert KV.put(pid, :hello, :world) == :ok assert KV.get(pid, :hello) == :world end @@ -80,7 +84,7 @@ defmodule ExUnit.Case do end @tag cd: "fixtures" - test "reads utf-8 fixtures" do + test "reads UTF-8 fixtures" do File.read("hello") end end @@ -100,24 +104,55 @@ defmodule ExUnit.Case do If a tag is given more than once, the last value wins. - ### Module tags + ### Module and describe tags - A tag can be set for all tests in a module by setting `@moduletag`: + A tag can be set for all tests in a module or describe block by + setting `@moduletag` or `@describetag` respectively: @moduletag :external If the same key is set via `@tag`, the `@tag` value has higher precedence. - ### Reserved tags + ### Known tags The following tags are set automatically by ExUnit and are therefore reserved: - * `:case` - the test case module - * `:test` - the test name - * `:line` - the line on which the test was defined - * `:file` - the file on which the test was defined + * `:case` - the test case module + * `:file` - the file on which the test was defined + * `:line` - the line on which the test was defined + * `:test` - the test name + * `:async` - if the test case is in async mode + * `:type` - the type of the test (`:test`, `:property`, etc) + * `:registered` - used for `ExUnit.Case.register_attribute/3` values + * `:describe` - the describe block the test belongs to + + The following tags customize how tests behaves: + + * `:capture_log` - see the "Log Capture" section below + * `:skip` - skips the test with the given reason + * `:timeout` - customizes the test timeout in milliseconds (defaults to 60000) + * `:report` - includes the given tags and context keys on error reports, + see the "Reporting tags" section + + ### Reporting tags + + ExUnit also allows tags or any other key in your context to be included + in error reports, making it easy for developers to see under which + circumstances a test was evaluated. To do so, you use the `:report` tag: + + @moduletag report: [:user_id, :server] + + Now when an error happens, there is a tags section containing the value + for each reported field: + + code: flunk "oops" + stacktrace: + lib/my_lib/source.exs:148 + tags: + user_id: 1 + server: #PID<0.63.0> ## Filters @@ -130,7 +165,7 @@ defmodule ExUnit.Case do ExUnit.configure(exclude: [external: true]) From now on, ExUnit will not run any test that has the `external` flag - set to true. This behaviour can be reversed with the `:include` option + set to `true`. This behaviour can be reversed with the `:include` option which is usually passed through the command line: mix test --include external:true @@ -145,41 +180,59 @@ defmodule ExUnit.Case do Keep in mind that all tests are included by default, so unless they are excluded first, the `include` option has no effect. + + ## Log Capture + + ExUnit can optionally suppress printing of log messages that are generated during a test. Log + messages generated while running a test are captured and only if the test fails are they printed + to aid with debugging. + + You can opt into this behaviour for individual tests by tagging them with `:capture_log` or enable + log capture for all tests in the ExUnit configuration: + + ExUnit.start(capture_log: true) + + This default can be overridden by `@tag capture_log: false` or `@moduletag capture_log: false`. + + Since `setup_all` blocks don't belong to a specific test, log messages generated in them (or + between tests) are never captured. If you want to suppress these messages as well, remove the + console backend globally: + + config :logger, backends: [] """ + @reserved [:case, :file, :line, :test, :async, :registered, :describe, :type] + @doc false defmacro __using__(opts) do - async = Keyword.get(opts, :async, false) - unless Process.whereis(ExUnit.Server) do raise "cannot use ExUnit.Case without starting the ExUnit application, " <> "please call ExUnit.start() or explicitly start the :ex_unit app" end quote do - unless Module.get_attribute(__MODULE__, :ex_unit_tests) do - if unquote(async) do - ExUnit.Server.add_async_case(__MODULE__) - else - ExUnit.Server.add_sync_case(__MODULE__) - end + async = !!unquote(opts)[:async] - Enum.each [:ex_unit_tests, :tag, :moduletag], + unless Module.get_attribute(__MODULE__, :ex_unit_tests) do + Enum.each [:ex_unit_tests, :tag, :describetag, :moduletag, :ex_unit_registered], &Module.register_attribute(__MODULE__, &1, accumulate: true) @before_compile ExUnit.Case + @after_compile ExUnit.Case + @ex_unit_async async + @ex_unit_describe nil use ExUnit.Callbacks end import ExUnit.Callbacks import ExUnit.Assertions - import ExUnit.Case + import ExUnit.Case, only: [describe: 2, test: 1, test: 2, test: 3] import ExUnit.DocTest end end @doc """ - Define a test with a string. + Defines a test with a string. Provides a convenient macro that allows a test to be defined with a string. This macro automatically inserts @@ -198,9 +251,9 @@ defmodule ExUnit.Case do defmacro test(message, var \\ quote(do: _), contents) do contents = case contents do - [do: _] -> + [do: block] -> quote do - unquote(contents) + unquote(block) :ok end _ -> @@ -213,10 +266,108 @@ defmodule ExUnit.Case do var = Macro.escape(var) contents = Macro.escape(contents, unquote: true) - quote bind_quoted: binding do - test = :"test #{message}" - ExUnit.Case.__on_definition__(__ENV__, test) - def unquote(test)(unquote(var)), do: unquote(contents) + quote bind_quoted: [var: var, contents: contents, message: message] do + name = ExUnit.Case.register_test(__ENV__, :test, message, []) + def unquote(name)(unquote(var)), do: unquote(contents) + end + end + + @doc """ + Defines a not implemented test with a string. + + Provides a convenient macro that allows a test to be defined + with a string, but not yet implemented. The resulting test will + always fail and print "Not implemented" error message. The + resulting test case is also tagged with `:not_implemented`. + + ## Examples + + test "this will be a test in future" + + """ + defmacro test(message) do + quote bind_quoted: binding() do + name = ExUnit.Case.register_test(__ENV__, :test, message, [:not_implemented]) + def unquote(name)(_), do: flunk("Not implemented") + end + end + + @doc """ + Describes tests together. + + Every describe block receives a name which is used as prefix for + upcoming tests. Inside a block, `ExUnit.Callbacks.setup/1` may be + invoked and it will define a setup callback to run only for the + current block. The describe name is also added as a tag, allowing + developers to run tests for specific blocks. + + ## Examples + + defmodule StringTest do + use ExUnit.Case, async: true + + describe "String.capitalize/1" do + test "first grapheme is in uppercase" do + assert String.capitalize("hello") == "Hello" + end + + test "converts remaining graphemes to lowercase" do + assert String.capitalize("HELLO") == "Hello" + end + end + end + + When using Mix, you can run all tests in a describe block as: + + mix test --only describe:"String.capitalize/1" + + Note describe blocks cannot be nested. Instead of relying on hierarchy + for composition, developers should build on top of named setups. For + example: + + defmodule UserManagementTest do + use ExUnit.Case, async: true + + describe "when user is logged in and is an admin" do + setup [:log_user_in, :set_type_to_admin] + + test ... + end + + describe "when user is logged in and is a manager" do + setup [:log_user_in, :set_type_to_manager] + + test ... + end + + defp log_user_in(context) do + # ... + end + end + + By forbidding hierarchies in favor of named setups, it is straightforward + for the developer to glance at each describe block and know exactly the + setup steps involved. + """ + defmacro describe(message, do: block) do + quote do + if @ex_unit_describe do + raise "cannot call describe/2 inside another describe. See the documentation " <> + "for describe/2 on named setups and how to handle hierarchies" + end + + @ex_unit_describe (case unquote(message) do + msg when is_binary(msg) -> msg + msg -> raise ArgumentError, "describe name must be a string, got: #{inspect msg}" + end) + Module.delete_attribute(__ENV__.module, :describetag) + + try do + unquote(block) + after + @ex_unit_describe nil + Module.delete_attribute(__ENV__.module, :describetag) + end end end @@ -230,21 +381,117 @@ defmodule ExUnit.Case do end @doc false - def __on_definition__(env, name) do - mod = env.module - tags = Module.get_attribute(mod, :tag) ++ Module.get_attribute(mod, :moduletag) - tags = tags |> normalize_tags |> Map.merge(%{line: env.line, file: env.file}) + def __after_compile__(%{module: module}, _) do + if Module.get_attribute(module, :ex_unit_async) do + ExUnit.Server.add_async_case(module) + else + ExUnit.Server.add_sync_case(module) + end + end + + @doc """ + Registers a function to run as part of this case. + + This is used by 3rd party projects, like QuickCheck, to + implement macros like `property/3` that works like `test` + but instead defines a property. See `test/3` implementation + for an example of invoking this function. + + The test type will be converted to a string and pluralized for + display. You can use `ExUnit.plural_rule/2` to set a custom + pluralization. + """ + def register_test(%{module: mod, file: file, line: line}, type, name, tags) do + moduletag = Module.get_attribute(mod, :moduletag) + + unless moduletag do + raise "cannot define #{type}. Please make sure you have invoked " <> + "\"use ExUnit.Case\" in the current module" + end + + registered_attributes = Module.get_attribute(mod, :ex_unit_registered) + registered = Map.new(registered_attributes, &{&1, Module.get_attribute(mod, &1)}) + + tag = Module.delete_attribute(mod, :tag) + async = Module.get_attribute(mod, :ex_unit_async) + + {name, describe, describetag} = + if describe = Module.get_attribute(mod, :ex_unit_describe) do + {:"#{type} #{describe} #{name}", describe, Module.get_attribute(mod, :describetag)} + else + {:"#{type} #{name}", nil, []} + end + + if Module.defines?(mod, {name, 1}) do + raise ExUnit.DuplicateTestError, ~s("#{name}" is already defined in #{inspect mod}) + end + + tags = + (tags ++ tag ++ describetag ++ moduletag) + |> normalize_tags + |> validate_tags + |> Map.merge(%{line: line, file: file, registered: registered, + async: async, describe: describe, type: type}) + + test = %ExUnit.Test{name: name, case: mod, tags: tags} + Module.put_attribute(mod, :ex_unit_tests, test) + + Enum.each registered_attributes, fn(attribute) -> + Module.delete_attribute(mod, attribute) + end + + name + end + + @doc """ + Registers a new attribute to be used during `ExUnit.Case` tests. + + The attribute values will be available as a key/value pair in + `context.registered`. The key/value pairs will be cleared + after each `ExUnit.Case.test/3` similar to `@tag`. + + `Module.register_attribute/3` is used to register the attribute, + this function takes the same options. + + ## Examples + + defmodule MyTest do + use ExUnit.Case + ExUnit.Case.register_attribute __ENV__, :foobar + + @foobar hello: "world" + test "using custom test attribute", context do + assert context.registered.hello == "world" + end + end + """ + def register_attribute(env, name, opts \\ []) + + def register_attribute(%{module: module}, name, opts) do + register_attribute(module, name, opts) + end + + def register_attribute(mod, name, opts) when is_atom(mod) and is_atom(name) and is_list(opts) do + Module.register_attribute(mod, name, opts) + Module.put_attribute(mod, :ex_unit_registered, name) + end + + defp validate_tags(tags) do + for tag <- @reserved, + Map.has_key?(tags, tag) do + raise "cannot set tag #{inspect tag} because it is reserved by ExUnit" + end - Module.put_attribute(mod, :ex_unit_tests, - %ExUnit.Test{name: name, case: mod, tags: tags}) + unless is_atom(tags[:type]), + do: raise "value for tag \":type\" must be an atom" - Module.delete_attribute(mod, :tag) + tags end defp normalize_tags(tags) do Enum.reduce Enum.reverse(tags), %{}, fn tag, acc when is_atom(tag) -> Map.put(acc, tag, true) - tag, acc when is_list(tag) -> Dict.merge(acc, tag) + tag, acc when is_list(tag) -> tag |> Enum.into(acc) end end end diff --git a/lib/ex_unit/lib/ex_unit/case_template.ex b/lib/ex_unit/lib/ex_unit/case_template.ex index 2e41af01cdd..b4b9098ae93 100644 --- a/lib/ex_unit/lib/ex_unit/case_template.ex +++ b/lib/ex_unit/lib/ex_unit/case_template.ex @@ -1,7 +1,7 @@ defmodule ExUnit.CaseTemplate do @moduledoc """ This module allows a developer to define a test case - template to be used throughout his tests. This is useful + template to be used throughout their tests. This is useful when there are a set of functions that should be shared between tests or a set of setup callbacks. @@ -62,9 +62,18 @@ defmodule ExUnit.CaseTemplate do @doc """ Allows a developer to customize the using block when the case template is used. + + ## Example + + using do + quote do + alias MyApp.FunModule + end + end + """ defmacro using(var \\ quote(do: _), do: block) do - quote location: :keep do + quote do defmacro __using__(unquote(var) = opts) do parent = unquote(__MODULE__).__proxy__(__MODULE__, opts) result = unquote(block) diff --git a/lib/ex_unit/lib/ex_unit/cli_formatter.ex b/lib/ex_unit/lib/ex_unit/cli_formatter.ex index 444b896068e..ff4e697370e 100644 --- a/lib/ex_unit/lib/ex_unit/cli_formatter.ex +++ b/lib/ex_unit/lib/ex_unit/cli_formatter.ex @@ -1,7 +1,6 @@ defmodule ExUnit.CLIFormatter do @moduledoc false - - use GenEvent + use GenServer import ExUnit.Formatter, only: [format_time: 2, format_filters: 2, format_test_failure: 5, format_test_case_failure: 5] @@ -13,105 +12,103 @@ defmodule ExUnit.CLIFormatter do config = %{ seed: opts[:seed], trace: opts[:trace], - color: opts[:color], + colors: Keyword.put_new(opts[:colors], :enabled, IO.ANSI.enabled?), width: get_terminal_width(), - tests_counter: 0, - failures_counter: 0, - invalids_counter: 0 + test_counter: %{}, + failure_counter: 0, + skipped_counter: 0, + invalid_counter: 0 } {:ok, config} end - def handle_event({:suite_finished, run_us, load_us}, config) do + def handle_cast({:suite_started, _opts}, config) do + {:noreply, config} + end + + def handle_cast({:suite_finished, run_us, load_us}, config) do print_suite(config, run_us, load_us) - :remove_handler + {:noreply, config} end - def handle_event({:test_started, %ExUnit.Test{} = test}, config) do - if config.trace, do: IO.write " * #{trace_test_name test}" - {:ok, config} + def handle_cast({:test_started, %ExUnit.Test{} = test}, config) do + if config.trace, do: IO.write " * #{test.name}" + {:noreply, config} end - def handle_event({:test_finished, %ExUnit.Test{state: nil} = test}, config) do + def handle_cast({:test_finished, %ExUnit.Test{state: nil} = test}, config) do if config.trace do IO.puts success(trace_test_result(test), config) else IO.write success(".", config) end - {:ok, %{config | tests_counter: config.tests_counter + 1}} + {:noreply, %{config | test_counter: update_test_counter(config.test_counter, test)}} end - def handle_event({:test_finished, %ExUnit.Test{state: {:skip, _}} = test}, config) do + def handle_cast({:test_finished, %ExUnit.Test{state: {:skip, _}} = test}, config) do if config.trace, do: IO.puts trace_test_skip(test) - {:ok, config} + {:noreply, %{config | test_counter: update_test_counter(config.test_counter, test), + skipped_counter: config.skipped_counter + 1}} end - def handle_event({:test_finished, %ExUnit.Test{state: {:invalid, _}} = test}, config) do + def handle_cast({:test_finished, %ExUnit.Test{state: {:invalid, _}} = test}, config) do if config.trace do IO.puts invalid(trace_test_result(test), config) else IO.write invalid("?", config) end - {:ok, %{config | tests_counter: config.tests_counter + 1, - invalids_counter: config.invalids_counter + 1}} + {:noreply, %{config | test_counter: update_test_counter(config.test_counter, test), + invalid_counter: config.invalid_counter + 1}} end - def handle_event({:test_finished, %ExUnit.Test{state: {:failed, failed}} = test}, config) do + def handle_cast({:test_finished, %ExUnit.Test{state: {:failed, failures}} = test}, config) do if config.trace do IO.puts failure(trace_test_result(test), config) end - formatted = format_test_failure(test, failed, config.failures_counter + 1, + formatted = format_test_failure(test, failures, config.failure_counter + 1, config.width, &formatter(&1, &2, config)) print_failure(formatted, config) + print_logs(test.logs) - {:ok, %{config | tests_counter: config.tests_counter + 1, - failures_counter: config.failures_counter + 1}} + {:noreply, %{config | test_counter: update_test_counter(config.test_counter, test), + failure_counter: config.failure_counter + 1}} end - def handle_event({:case_started, %ExUnit.TestCase{name: name}}, config) do + def handle_cast({:case_started, %ExUnit.TestCase{name: name}}, config) do if config.trace do IO.puts("\n#{inspect name}") end - {:ok, config} + {:noreply, config} end - def handle_event({:case_finished, %ExUnit.TestCase{state: nil}}, config) do - {:ok, config} + def handle_cast({:case_finished, %ExUnit.TestCase{state: nil}}, config) do + {:noreply, config} end - def handle_event({:case_finished, %ExUnit.TestCase{state: {:failed, failed}} = test_case}, config) do - formatted = format_test_case_failure(test_case, failed, config.failures_counter + 1, + def handle_cast({:case_finished, %ExUnit.TestCase{state: {:failed, failures}} = test_case}, config) do + formatted = format_test_case_failure(test_case, failures, config.failure_counter + length(test_case.tests), config.width, &formatter(&1, &2, config)) - print_failure(formatted, config) - {:ok, %{config | failures_counter: config.failures_counter + 1}} - end - def handle_event(_, config) do - {:ok, config} + print_failure(formatted, config) + test_counter = Enum.reduce(test_case.tests, config.test_counter, &update_test_counter(&2, &1)) + {:noreply, %{config | test_counter: test_counter, failure_counter: config.failure_counter + length(test_case.tests)}} end ## Tracing - defp trace_test_name(%ExUnit.Test{name: name}) do - case Atom.to_string(name) do - "test " <> rest -> rest - rest -> rest - end - end - defp trace_test_time(%ExUnit.Test{time: time}) do "#{format_us(time)}ms" end defp trace_test_result(test) do - "\r * #{trace_test_name test} (#{trace_test_time(test)})" + "\r * #{test.name} (#{trace_test_time(test)})" end defp trace_test_skip(test) do - "\r * #{trace_test_name test} (skipped)" + "\r * #{test.name} (skipped)" end defp format_us(us) do @@ -124,27 +121,37 @@ defmodule ExUnit.CLIFormatter do end end + defp update_test_counter(test_counter, %{tags: %{type: type}}) do + Map.update(test_counter, type, 1, &(&1 + 1)) + end + ## Printing defp print_suite(config, run_us, load_us) do IO.write "\n\n" IO.puts format_time(run_us, load_us) - message = "#{config.tests_counter} tests, #{config.failures_counter} failures" + # singular/plural + test_type_counts = format_test_type_counts(config) + failure_pl = pluralize(config.failure_counter, "failure", "failures") - if config.invalids_counter > 0 do - message = message <> ", #{config.invalids_counter} invalid" - end + message = + "#{test_type_counts}#{config.failure_counter} #{failure_pl}" + |> if_true(config.skipped_counter > 0, & &1 <> ", #{config.skipped_counter} skipped") + |> if_true(config.invalid_counter > 0, & &1 <> ", #{config.invalid_counter} invalid") cond do - config.failures_counter > 0 -> IO.puts failure(message, config) - config.invalids_counter > 0 -> IO.puts invalid(message, config) - true -> IO.puts success(message, config) + config.failure_counter > 0 -> IO.puts failure(message, config) + config.invalid_counter > 0 -> IO.puts invalid(message, config) + true -> IO.puts success(message, config) end IO.puts "\nRandomized with seed #{config.seed}" end + defp if_true(value, false, _fun), do: value + defp if_true(value, true, fun), do: fun.(value) + defp print_filters([include: [], exclude: []]) do :ok end @@ -164,30 +171,82 @@ defmodule ExUnit.CLIFormatter do IO.puts formatted end + defp format_test_type_counts(%{test_counter: test_counter} = _config) do + Enum.map test_counter, fn {type, count} -> + type_pluralized = pluralize(count, type, ExUnit.plural_rule(type |> to_string())) + "#{count} #{type_pluralized}, " + end + end + # Color styles - defp colorize(escape, string, %{color: color}) do - IO.ANSI.escape_fragment("%{#{escape}}", color) - <> string - <> IO.ANSI.escape_fragment("%{reset}", color) + defp colorize(escape, string, %{colors: colors}) do + if colors[:enabled] do + [escape, string, :reset] + |> IO.ANSI.format_fragment(true) + |> IO.iodata_to_binary + else + string + end end defp success(msg, config) do - colorize("green", msg, config) + colorize(:green, msg, config) end defp invalid(msg, config) do - colorize("yellow", msg, config) + colorize(:yellow, msg, config) end defp failure(msg, config) do - colorize("red", msg, config) + colorize(:red, msg, config) end - defp formatter(:error_info, msg, config), do: colorize("red", msg, config) - defp formatter(:extra_info, msg, config), do: colorize("cyan", msg, config) - defp formatter(:location_info, msg, config), do: colorize("bright,black", msg, config) - defp formatter(_, msg, _config), do: msg + defp formatter(:diff_enabled?, _, %{colors: colors}), + do: colors[:enabled] + + defp formatter(:error_info, msg, config), + do: colorize(:red, msg, config) + + defp formatter(:extra_info, msg, config), + do: colorize(:cyan, msg, config) + + defp formatter(:location_info, msg, config), + do: colorize([:bright, :black], msg, config) + + defp formatter(:diff_delete, msg, config), + do: colorize(:red, msg, config) + + defp formatter(:diff_delete_whitespace, msg, config), + do: colorize(IO.ANSI.color_background(2, 0, 0), msg, config) + + defp formatter(:diff_insert, msg, config), + do: colorize(:green, msg, config) + + defp formatter(:diff_insert_whitespace, msg, config), + do: colorize(IO.ANSI.color_background(0, 2, 0), msg, config) + + defp formatter(:blame_same, msg, %{colors: colors} = config) do + if colors[:enabled] do + colorize(:normal, msg, config) + else + "+" <> msg <> "+" + end + end + + defp formatter(:blame_diff, msg, %{colors: colors} = config) do + if colors[:enabled] do + colorize(:red, msg, config) + else + "-" <> msg <> "-" + end + end + + defp formatter(_, msg, _config), + do: msg + + defp pluralize(1, singular, _plural), do: singular + defp pluralize(_, _singular, plural), do: plural defp get_terminal_width do case :io.columns do @@ -195,4 +254,12 @@ defmodule ExUnit.CLIFormatter do _ -> 80 end end + + defp print_logs(""), do: nil + + defp print_logs(output) do + indent = "\n " + output = String.replace(output, "\n", indent) + IO.puts([" The following output was logged:", indent | output]) + end end diff --git a/lib/ex_unit/lib/ex_unit/diff.ex b/lib/ex_unit/lib/ex_unit/diff.ex new file mode 100644 index 00000000000..25a1e56e7bc --- /dev/null +++ b/lib/ex_unit/lib/ex_unit/diff.ex @@ -0,0 +1,465 @@ +defmodule ExUnit.Diff do + @moduledoc false + + @doc """ + Returns an edit script representing the difference between `left` and `right`. + + Returns `nil` if they are not the same data type, + or if the given data type is not supported. + """ + def script(left, right) + + def script(term, term) + when is_binary(term) or is_number(term) + when is_map(term) or is_list(term) or is_tuple(term) do + [eq: inspect(term)] + end + + # Binaries + def script(left, right) when is_binary(left) and is_binary(right) do + if String.printable?(left) and String.printable?(right) do + script_string(left, right, ?\") + end + end + + # Structs + def script(%name{} = left, %name{} = right) do + left = Map.from_struct(left) + right = Map.from_struct(right) + script_map(left, right, inspect(name)) + end + + # Maps + def script(%{} = left, %{} = right) do + if match?(%_{}, left) or match?(%_{}, right) do + nil + else + script_map(left, right, "") + end + end + + # Char lists and lists + def script(left, right) when is_list(left) and is_list(right) do + if Inspect.List.printable?(left) and Inspect.List.printable?(right) do + script_string(List.to_string(left), List.to_string(right), ?') + else + keywords? = Inspect.List.keyword?(left) and Inspect.List.keyword?(right) + script_list_new(left, right, keywords?) + end + end + + # Numbers + def script(left, right) + when is_integer(left) and is_integer(right) + when is_float(left) and is_float(right) do + script_string(inspect(left), inspect(right)) + end + + # Tuples + def script(left, right) + when is_tuple(left) and is_tuple(right) do + left = {left, tuple_size(left) - 1} + right = {right, tuple_size(right) - 1} + script_tuple(left, right, []) + end + + def script(_left, _right), do: nil + + defp script_string(string1, string2, token) do + length1 = String.length(string1) + length2 = String.length(string2) + if bag_distance(string1, string2) / max(length1, length2) <= 0.6 do + {escaped1, _} = Inspect.BitString.escape(string1, token) + {escaped2, _} = Inspect.BitString.escape(string2, token) + string1 = IO.iodata_to_binary escaped1 + string2 = IO.iodata_to_binary escaped2 + [{:eq, <>}, script_string(string1, string2), {:eq, <>}] + end + end + + defp script_string(string1, string2) do + String.myers_difference(string1, string2) + end + + defp check_if_proper_and_get_length([_ | rest], length), + do: check_if_proper_and_get_length(rest, length + 1) + + defp check_if_proper_and_get_length([], length), + do: {true, length} + + defp check_if_proper_and_get_length(_other, length), + do: {false, length + 1} + + # The algorithm is outlined in the + # "String Matching with Metric Trees Using an Approximate Distance" + # paper by Ilaria Bartolini, Paolo Ciaccia, and Marco Patella. + defp bag_distance(string1, string2) do + bag1 = string_to_bag(string1) + bag2 = string_to_bag(string2) + + diff1 = bag_difference(bag1, bag2) + diff2 = bag_difference(bag2, bag1) + + max(diff1, diff2) + end + + defp string_to_bag(string) do + string_to_bag(string, %{}, &(&1 + 1)) + end + + defp string_to_bag(string, bag, fun) do + case String.next_grapheme(string) do + {char, rest} -> + bag = Map.update(bag, char, 1, fun) + string_to_bag(rest, bag, fun) + nil -> + bag + end + end + + defp bag_difference(bag1, bag2) do + Enum.reduce(bag1, 0, fn {char, count1}, sum -> + case Map.fetch(bag2, char) do + {:ok, count2} -> + sum + max(count1 - count2, 0) + :error -> + sum + count1 + end + end) + end + + defp script_list_new(list1, list2, keywords?) do + {proper1?, length1} = check_if_proper_and_get_length(list1, 0) + {proper2?, length2} = check_if_proper_and_get_length(list2, 0) + + if proper1? and proper2? do + initial_path = {0, 0, list1, list2, []} + result = + find_script(0, length1 + length2, [initial_path], keywords?) + |> format_each_fragment([], keywords?) + [{:eq, "["}, result, {:eq, "]"}] + else + script_list(list1, list2, []) + end + end + + defp format_each_fragment([{:diff, script}], [], _keywords?), + do: script + + defp format_each_fragment([{kind, elems}], [], keywords?), + do: [format_fragment(kind, elems, keywords?)] + + defp format_each_fragment([_, _] = fragments, acc, keywords?) do + result = + case fragments do + [diff: script1, diff: script2] -> + [script1, {:eq, ", "}, script2] + + [{:diff, script}, {kind, elems}] -> + [script, {kind, ", "}, format_fragment(kind, elems, keywords?)] + + [{kind, elems}, {:diff, script}] -> + [format_fragment(kind, elems, keywords?), {kind, ", "}, script] + + [del: elems1, ins: elems2] -> + [format_fragment(:del, elems1, keywords?), format_fragment(:ins, elems2, keywords?)] + + [{:eq, elems1}, {kind, elems2}] -> + [format_fragment(:eq, elems1, keywords?), {kind, ", "}, format_fragment(kind, elems2, keywords?)] + + [{kind, elems1}, {:eq, elems2}] -> + [format_fragment(kind, elems1, keywords?), {kind, ", "}, format_fragment(:eq, elems2, keywords?)] + end + Enum.reverse(acc, result) + end + + defp format_each_fragment([{:diff, script} | rest], acc, keywords?) do + format_each_fragment(rest, [{:eq, ", "}, script | acc], keywords?) + end + + defp format_each_fragment([{kind, elems} | rest], acc, keywords?) do + new_acc = [{kind, ", "}, format_fragment(kind, elems, keywords?) | acc] + format_each_fragment(rest, new_acc, keywords?) + end + + defp format_fragment(kind, elems, keywords?) do + formatter = fn + {key, val} when keywords? -> + format_key_value(key, val, true) + elem -> + inspect(elem) + end + {kind, Enum.map_join(elems, ", ", formatter)} + end + + defp find_script(envelope, max, _paths, _keywords?) when envelope > max do + nil + end + + defp find_script(envelope, max, paths, keywords?) do + case each_diagonal(-envelope, envelope, paths, [], keywords?) do + {:done, edits} -> + compact_reverse(edits, []) + {:next, paths} -> find_script(envelope + 1, max, paths, keywords?) + end + end + + defp compact_reverse([], acc), + do: acc + + defp compact_reverse([{:diff, _} = fragment | rest], acc), + do: compact_reverse(rest, [fragment | acc]) + + defp compact_reverse([{kind, char} | rest], [{kind, chars} | acc]), + do: compact_reverse(rest, [{kind, [char | chars]} | acc]) + + defp compact_reverse([{kind, char} | rest], acc), + do: compact_reverse(rest, [{kind, [char]} | acc]) + + defp each_diagonal(diag, limit, _paths, next_paths, _keywords?) when diag > limit do + {:next, Enum.reverse(next_paths)} + end + + defp each_diagonal(diag, limit, paths, next_paths, keywords?) do + {path, rest} = proceed_path(diag, limit, paths, keywords?) + with {:cont, path} <- follow_snake(path) do + each_diagonal(diag + 2, limit, rest, [path | next_paths], keywords?) + end + end + + defp proceed_path(0, 0, [path], _keywords?), do: {path, []} + + defp proceed_path(diag, limit, [path | _] = paths, keywords?) when diag == -limit do + {move_down(path, keywords?), paths} + end + + defp proceed_path(diag, limit, [path], keywords?) when diag == limit do + {move_right(path, keywords?), []} + end + + defp proceed_path(_diag, _limit, [path1, path2 | rest], keywords?) do + if elem(path1, 1) > elem(path2, 1) do + {move_right(path1, keywords?), [path2 | rest]} + else + {move_down(path2, keywords?), [path2 | rest]} + end + end + + defp script_keyword_inner({key, val1}, {key, val2}, true), + do: [{:eq, format_key(key, true)}, script_inner(val1, val2)] + + defp script_keyword_inner(_pair1, _pair2, true), + do: nil + + defp script_keyword_inner(elem1, elem2, false), + do: script(elem1, elem2) + + defp move_right({x, x, [elem1 | rest1] = list1, [elem2 | rest2], edits}, keywords?) do + if result = script_keyword_inner(elem1, elem2, keywords?) do + {x + 1, x + 1, rest1, rest2, [{:diff, result} | edits]} + else + {x + 1, x, list1, rest2, [{:ins, elem2} | edits]} + end + end + + defp move_right({x, y, list1, [elem | rest], edits}, _keywords?) do + {x + 1, y, list1, rest, [{:ins, elem} | edits]} + end + + defp move_right({x, y, list1, [], edits}, _keywords?) do + {x + 1, y, list1, [], edits} + end + + defp move_down({x, x, [elem1 | rest1], [elem2 | rest2] = list2, edits}, keywords?) do + if result = script_keyword_inner(elem1, elem2, keywords?) do + {x + 1, x + 1, rest1, rest2, [{:diff, result} | edits]} + else + {x, x + 1, rest1, list2, [{:del, elem1} | edits]} + end + end + + defp move_down({x, y, [elem | rest], list2, edits}, _keywords?) do + {x, y + 1, rest, list2, [{:del, elem} | edits]} + end + + defp move_down({x, y, [], list2, edits}, _keywords?) do + {x, y + 1, [], list2, edits} + end + + defp follow_snake({x, y, [elem | rest1], [elem | rest2], edits}) do + follow_snake({x + 1, y + 1, rest1, rest2, [{:eq, elem} | edits]}) + end + + defp follow_snake({_x, _y, [], [], edits}) do + {:done, edits} + end + + defp follow_snake(path) do + {:cont, path} + end + + defp script_list([], [], acc) do + [[_ | elem_diff] | rest] = Enum.reverse(acc) + [{:eq, "["}, [elem_diff | rest], {:eq, "]"}] + end + + defp script_list([], [elem | rest], acc) do + elem_diff = [ins: inspect(elem)] + script_list([], rest, [[ins: ", "] ++ elem_diff | acc]) + end + + defp script_list([elem | rest], [], acc) do + elem_diff = [del: inspect(elem)] + script_list(rest, [], [[del: ", "] ++ elem_diff | acc]) + end + + defp script_list([elem | rest1], [elem | rest2], acc) do + elem_diff = [eq: inspect(elem)] + script_list(rest1, rest2, [[eq: ", "] ++ elem_diff | acc]) + end + + defp script_list([elem1 | rest1], [elem2 | rest2], acc) do + elem_diff = script_inner(elem1, elem2) + script_list(rest1, rest2, [[eq: ", "] ++ elem_diff | acc]) + end + + defp script_list(last, [elem | rest], acc) do + joiner_diff = [del: " |", ins: ",", eq: " "] + elem_diff = script_inner(last, elem) + new_acc = [joiner_diff ++ elem_diff | acc] + script_list([], rest, new_acc) + end + + defp script_list([elem | rest], last, acc) do + joiner_diff = [del: ",", ins: " |", eq: " "] + elem_diff = script_inner(elem, last) + new_acc = [joiner_diff ++ elem_diff | acc] + script_list(rest, [], new_acc) + end + + defp script_list(last1, last2, acc) do + elem_diff = + cond do + last1 == [] -> + [ins: " | " <> inspect(last2)] + last2 == [] -> + [del: " | " <> inspect(last1)] + true -> + [eq: " | "] ++ script_inner(last1, last2) + end + script_list([], [], [elem_diff | acc]) + end + + defp script_tuple({_tuple1, -1}, {_tuple2, -1}, acc) do + [[_ | elem_diff] | rest] = acc + [{:eq, "{"}, [elem_diff | rest], {:eq, "}"}] + end + + defp script_tuple({tuple1, index1}, {_, index2} = right, acc) + when index1 > index2 do + elem = elem(tuple1, index1) + elem_diff = [del: ", ", del: inspect(elem)] + script_tuple({tuple1, index1 - 1}, right, [elem_diff | acc]) + end + + defp script_tuple({_, index1} = left, {tuple2, index2}, acc) + when index1 < index2 do + elem = elem(tuple2, index2) + elem_diff = [ins: ", ", ins: inspect(elem)] + script_tuple(left, {tuple2, index2 - 1}, [elem_diff | acc]) + end + + defp script_tuple({tuple1, index}, {tuple2, index}, acc) do + elem1 = elem(tuple1, index) + elem2 = elem(tuple2, index) + elem_diff = script_inner(elem1, elem2) + script_tuple({tuple1, index - 1}, {tuple2, index - 1}, [[eq: ", "] ++ elem_diff | acc]) + end + + defp script_map(left, right, name) do + {surplus, altered, missing, same} = map_difference(left, right) + + keywords? = + Inspect.List.keyword?(surplus) and + Inspect.List.keyword?(altered) and + Inspect.List.keyword?(missing) and + Inspect.List.keyword?(same) + + result = Enum.reduce(missing, [], fn({key, val}, acc) -> + map_pair = format_key_value(key, val, keywords?) + [[ins: ", ", ins: map_pair] | acc] + end) + result = + if same == [] and altered == [] and missing != [] and surplus != [] do + [[_ | elem_diff] | rest] = result + [elem_diff | rest] + else + result + end + + result = Enum.reduce(surplus, result, fn({key, val}, acc) -> + map_pair = format_key_value(key, val, keywords?) + [[del: ", ", del: map_pair] | acc] + end) + + result = Enum.reduce(altered, result, fn({key, {val1, val2}}, acc) -> + value_diff = script_inner(val1, val2) + [[{:eq, ", "}, {:eq, format_key(key, keywords?)}, value_diff] | acc] + end) + + result = Enum.reduce(same, result, fn({key, val}, acc) -> + map_pair = format_key_value(key, val, keywords?) + [[eq: ", ", eq: map_pair] | acc] + end) + + [[_ | elem_diff] | rest] = result + [{:eq, "%" <> name <> "{"}, [elem_diff | rest], {:eq, "}"}] + end + + defp map_difference(map1, map2) do + {surplus, altered, same} = + Enum.reduce(map1, {[], [], []}, fn({key, val1}, {surplus, altered, same}) -> + case Map.fetch(map2, key) do + {:ok, ^val1} -> + {surplus, altered, [{key, val1} | same]} + {:ok, val2} -> + {surplus, [{key, {val1, val2}} | altered], same} + :error -> + {[{key, val1} | surplus], altered, same} + end + end) + missing = Enum.reduce(map2, [], fn({key, _} = pair, acc) -> + if Map.has_key?(map1, key), do: acc, else: [pair | acc] + end) + {surplus, altered, missing, same} + end + + defp format_key(key, false) do + inspect(key) <> " => " + end + + defp format_key(key, true) when is_nil(key) or is_boolean(key) do + inspect(key) <> ": " + end + + defp format_key(key, true) do + ":" <> result = inspect(key) + result <> ": " + end + + defp format_key_value(key, value, keyword?) do + format_key(key, keyword?) <> inspect(value) + end + + defp script_inner(term, term) do + [eq: inspect(term)] + end + + defp script_inner(left, right) do + if result = script(left, right) do + result + else + [del: inspect(left), ins: inspect(right)] + end + end +end diff --git a/lib/ex_unit/lib/ex_unit/doc_test.ex b/lib/ex_unit/lib/ex_unit/doc_test.ex index 7b336b4c035..5c3b40d8f2b 100644 --- a/lib/ex_unit/lib/ex_unit/doc_test.ex +++ b/lib/ex_unit/lib/ex_unit/doc_test.ex @@ -1,34 +1,34 @@ defmodule ExUnit.DocTest do @moduledoc """ ExUnit.DocTest implements functionality similar to [Python's - doctest](http://docs.python.org/2/library/doctest.html). + doctest](https://docs.python.org/2/library/doctest.html). - In a nutshell, it allows us to generate tests from the code - examples existing in a module/function/macro's documentation. - In order to do that, one needs to invoke the `doctest/1` macro - from their test case and write their examples according - to some guidelines. + It allows us to generate tests from the code + examples in a module/function/macro's documentation. + To do this, invoke the `doctest/1` macro from within + your test case and ensure your code examples are written + according to the syntax and guidelines below. - The syntax for examples is as follows. Every new test starts - on a new line, with an `iex>` prefix. Multiline expressions - can be employed if the following lines start with either - `...>` (recommended) or `iex>` prefix. + ## Syntax - The expected result should start at the next line after `iex>` + Every new test starts on a new line, with an `iex>` prefix. + Multiline expressions can be used by prefixing subsequent lines with either + `...>` (recommended) or `iex>`. + + The expected result should start at the next line after the `iex>` or `...>` line(s) and is terminated either by a newline, new - `iex>` prefix or end of the string literal. + `iex>` prefix or the end of the string literal. ## Examples - Currently, the only way to run doctests is to include them into - an ExUnit case with a `doctest` macro: + To run doctests include them in an ExUnit case with a `doctest` macro: defmodule MyModule.Test do use ExUnit.Case, async: true doctest MyModule end - The `doctest` macro is going to loop through all functions and + The `doctest` macro loops through all functions and macros defined in `MyModule`, parsing their documentation in search of code examples. @@ -42,7 +42,7 @@ defmodule ExUnit.DocTest do iex> Enum.map [1, 2, 3], fn(x) -> ...> x * 2 ...> end - [2,4,6] + [2, 4, 6] Multiple results can be checked within the same test: @@ -57,35 +57,70 @@ defmodule ExUnit.DocTest do iex> a = 1 1 - iex> a + 1 # will fail with a "function a/0 undefined" error + iex> a + 1 # will fail with a "undefined function a/0" error 2 - Similarly to iex you can use numbers in your "prompts": + If you don't want to assert for every result in a doctest, you can omit + the result: + + iex> pid = spawn fn -> :ok end + iex> is_pid(pid) + true + + This is useful when the result is something variable (like a PID in the + example above) or when the result is a complicated data structure and you + don't want to show it all, but just parts of it or some of its properties. + + Similarly to IEx you can use numbers in your "prompts": - iex(1)> [1+2, + iex(1)> [1 + 2, ...(1)> 3] - [3,3] + [3, 3] - This is useful in two use cases: + This is useful in two cases: * being able to refer to specific numbered scenarios - * copy-pasting examples from an actual iex session + * copy-pasting examples from an actual IEx session - We also allow you to select or skip some functions when calling - `doctest`. See the documentation for more info. + You can also select or skip functions when calling + `doctest`. See the documentation on the `:except` and `:only` options below + for more info. ## Opaque types - Some types internal structure are kept hidden and instead show a - user-friendly structure when inspecting the value. The idiom in - Elixir is to print those data types as `#Name<...>`. Doctest will - test these values by doing a string compare. + Some types' internal structures are kept hidden and instead show a + user-friendly structure when inspected. The idiom in + Elixir is to print those data types in the format `#Name<...>`. Because those + values are treated as comments in Elixir code due to the leading + `#` sign, they require special care when being used in doctests. + + Imagine you have a map that contains a MapSet and is printed as: + + %{users: #MapSet<[:foo, :bar]>} + + If you try to match on such an expression, `doctest` will fail to compile. + There are two ways to resolve this. - iex> Enum.into([a: 10, b: 20], HashDict.new) - #HashDict<[b: 20, a: 10]> + The first is to rely on the fact that doctest can compare internal + structures as long as they are at the root. So one could write: - The above example will be tested with the following match: - `"#HashDict<[b: 20, a: 10]>" = inspect(Enum.into([a: 10, b: 20], HashDict.new))`. + iex> map = %{users: Enum.into([:foo, :bar], MapSet.new)} + iex> map.users + #MapSet<[:foo, :bar]> + + Whenever a doctest starts with "#Name<", `doctest` will perform a string + comparison. For example, the above test will perform the following match: + + inspect(map.users) == "#MapSet<[:foo, :bar]>" + + Alternatively, since doctest results are actually evaluated, you can have + the MapSet building expression as the doctest result: + + iex> %{users: Enum.into([:foo, :bar], MapSet.new)} + %{users: Enum.into([:foo, :bar], MapSet.new)} + + The downside of this approach is that the doctest result is not really + what users would see in the terminal. ## Exceptions @@ -114,6 +149,15 @@ defmodule ExUnit.DocTest do defmodule Error do defexception [:message] + + def exception(opts) do + module = Keyword.fetch!(opts, :module) + message = Keyword.fetch!(opts, :message) + + file = module.__info__(:compile)[:source] |> Path.relative_to_cwd + info = Exception.format_file_line(file, opts[:line]) + %__MODULE__{message: info <> " " <> message} + end end @doc """ @@ -122,22 +166,22 @@ defmodule ExUnit.DocTest do Calling `doctest(Module)` will generate tests for all doctests found in the module `Module` - Options can also be supplied: + Options can also be given: - * `:except` — generate tests for all functions except those listed - (list of `{function, arity}` tuples). + * `:except` - generates tests for all functions except those listed + (list of `{function, arity}` tuples, and/or `:moduledoc`). - * `:only` — generate tests only for functions listed - (list of `{function, arity}` tuples). + * `:only` - generates tests only for functions listed + (list of `{function, arity}` tuples, and/or `:moduledoc`). - * `:import` — when true, one can test a function defined in the module + * `:import` - when `true`, one can test a function defined in the module without referring to the module name. However, this is not feasible when - there is a clash with a module like Kernel. In these cases, `import` + there is a clash with a module like Kernel. In these cases, `:import` should be set to `false` and a full `M.f` construct should be used. ## Examples - doctest MyModule, except: [trick_fun: 1] + doctest MyModule, except: [:moduledoc, trick_fun: 1] This macro is auto-imported with every `ExUnit.Case`. """ @@ -149,9 +193,10 @@ defmodule ExUnit.DocTest do end end - tests = quote bind_quoted: binding do + tests = quote bind_quoted: binding() do file = "(for doctest at) " <> Path.relative_to_cwd(mod.__info__(:compile)[:source]) for {name, test} <- ExUnit.DocTest.__doctests__(mod, opts) do + @tag :doctest @file file test name, do: unquote(test) end @@ -175,11 +220,9 @@ defmodule ExUnit.DocTest do defp filter_by_opts(tests, opts) do only = opts[:only] || [] except = opts[:except] || [] - - Stream.filter(tests, fn(test) -> - fa = test.fun_arity - Enum.all?(except, &(&1 != fa)) and Enum.all?(only, &(&1 == fa)) - end) + tests + |> Stream.reject(&(&1.fun_arity in except)) + |> Stream.filter(&(Enum.empty?(only) or &1.fun_arity in only)) end ## Compilation of extracted tests @@ -188,7 +231,7 @@ defmodule ExUnit.DocTest do {test_name(test, module, n), test_content(test, module, do_import)} end - defp test_name(%{fun_arity: nil}, m, n) do + defp test_name(%{fun_arity: :moduledoc}, m, n) do "moduledoc at #{inspect m} (#{n})" end @@ -196,19 +239,18 @@ defmodule ExUnit.DocTest do "doc at #{inspect m}.#{f}/#{a} (#{n})" end - defp test_content(%{exprs: exprs, line: line, fun_arity: fun_arity}, module, do_import) do - file = module.__info__(:compile)[:source] |> List.to_string - location = [line: line, file: Path.relative_to_cwd(file)] + defp test_content(%{exprs: exprs, line: line}, module, do_import) do + file = module.__info__(:compile)[:source] |> Path.relative_to_cwd + location = [line: line, file: file] stack = Macro.escape [{module, :__MODULE__, 0, location}] if multiple_exceptions?(exprs) do - {fun, arity} = fun_arity - raise Error, message: "multiple exceptions in one doctest case are not supported. " - "Invalid doctest for #{inspect module}.#{fun}/#{arity}" + raise Error, line: line, module: module, + message: "multiple exceptions in one doctest case are not supported" end tests = Enum.map exprs, fn {expr, expected} -> - test_case_content(expr, expected, module, line, file, stack) + test_case_content(expr, expected, location, stack) end quote do @@ -239,17 +281,18 @@ defmodule ExUnit.DocTest do reraise e, stack error -> + original_stack = System.stacktrace reraise ExUnit.AssertionError, - [message: "Doctest failed: got #{inspect(error.__struct__)} with message #{Exception.message(error)}", - expr: unquote(whole_expr)], - stack + [message: "Doctest failed: got #{inspect(error.__struct__)} with message " <> inspect(Exception.message(error)), + expr: unquote(String.trim(whole_expr))], + original_stack end end end - defp test_case_content(expr, {:test, expected}, module, line, file, stack) do - expr_ast = string_to_quoted(module, line, file, expr) - expected_ast = string_to_quoted(module, line, file, expected) + defp test_case_content(expr, {:test, expected}, location, stack) do + expr_ast = string_to_quoted(location, stack, expr) + expected_ast = string_to_quoted(location, stack, expected) quote do expected = unquote(expected_ast) @@ -258,16 +301,19 @@ defmodule ExUnit.DocTest do actual -> reraise ExUnit.AssertionError, [message: "Doctest failed", - expr: "#{unquote(String.strip(expr))} === #{unquote(String.strip(expected))}", + expr: "#{unquote(String.trim(expr))} === #{unquote(String.trim(expected))}", left: actual], unquote(stack) end end end - defp test_case_content(expr, {:inspect, expected}, module, line, file, stack) do - expr_ast = quote do: inspect(unquote(string_to_quoted(module, line, file, expr))) - expected_ast = string_to_quoted(module, line, file, expected) + defp test_case_content(expr, {:inspect, expected}, location, stack) do + expr_ast = + quote do + inspect(unquote(string_to_quoted(location, stack, expr))) + end + expected_ast = string_to_quoted(location, stack, expected) quote do expected = unquote(expected_ast) @@ -276,37 +322,47 @@ defmodule ExUnit.DocTest do actual -> reraise ExUnit.AssertionError, [message: "Doctest failed", - expr: "inspect(#{unquote(String.strip(expr))}) === #{unquote(String.strip(expected))}", + expr: "inspect(#{unquote(String.trim(expr))}) === #{unquote(String.trim(expected))}", left: actual], unquote(stack) end end end - defp test_case_content(expr, {:error, exception, message}, module, line, file, stack) do - expr_ast = string_to_quoted(module, line, file, expr) + defp test_case_content(expr, {:error, exception, message}, location, stack) do + expr_ast = string_to_quoted(location, stack, expr) quote do stack = unquote(stack) - expr = unquote(String.strip(expr)) - spec = inspect(unquote(exception)) <> " with message " <> inspect(unquote(message)) + expr = unquote(String.trim(expr)) try do unquote(expr_ast) rescue error -> - unless error.__struct__ == unquote(exception) and - Exception.message(error) == unquote(message) do - got = inspect(error.__struct__) <> " with message " <> inspect(Exception.message(error)) - reraise ExUnit.AssertionError, - [message: "Doctest failed: expected exception #{spec} but got #{got}", - expr: expr], - stack + actual_exception = error.__struct__ + actual_message = Exception.message(error) + message = + cond do + actual_exception != unquote(exception) -> + "Doctest failed: expected exception #{inspect(unquote(exception))} but got #{inspect(actual_exception)} with message #{inspect(actual_message)}" + actual_message != unquote(message) -> + "Doctest failed: wrong message for #{inspect(actual_exception)}\n" <> + "expected:\n" <> + " #{inspect(unquote(message))}\n" <> + "actual:\n" <> + " #{inspect(actual_message)}" + true -> + nil + end + + if message do + reraise ExUnit.AssertionError, [message: message, expr: expr], stack end else _ -> reraise ExUnit.AssertionError, - [message: "Doctest failed: expected exception #{spec} but nothing was raised", + [message: "Doctest failed: expected exception #{inspect(unquote(exception))} but nothing was raised", expr: expr], stack end @@ -318,9 +374,7 @@ defmodule ExUnit.DocTest do [quote do: import(unquote(mod))] end - defp string_to_quoted(module, line, file, expr) do - location = [line: line, file: Path.relative_to_cwd(file)] - stack = Macro.escape [{module, :__MODULE__, 0, location}] + defp string_to_quoted(location, stack, expr) do try do Code.string_to_quoted!(expr, location) rescue @@ -329,7 +383,7 @@ defmodule ExUnit.DocTest do quote do reraise ExUnit.AssertionError, [message: "Doctest did not compile, got: #{unquote(message)}", - expr: unquote(String.strip(expr))], + expr: unquote(String.trim(expr))], unquote(stack) end end @@ -339,88 +393,111 @@ defmodule ExUnit.DocTest do defp extract(module) do all_docs = Code.get_docs(module, :all) - moduledocs = extract_from_moduledoc(all_docs[:moduledoc]) - docs = for doc <- all_docs[:docs] do - extract_from_doc(doc) - end |> Enum.concat + unless all_docs do + raise Error, module: module, message: + "could not retrieve the documentation for module #{inspect module}. " <> + "The module was not compiled with documentation or its BEAM file cannot be accessed" + end + + moduledocs = extract_from_moduledoc(all_docs[:moduledoc], module) + + docs = for doc <- all_docs[:docs], + doc <- extract_from_doc(doc, module), + do: doc moduledocs ++ docs end - defp extract_from_moduledoc({_, doc}) when doc in [false, nil], do: [] + defp extract_from_moduledoc({_, doc}, _module) when doc in [false, nil], do: [] - defp extract_from_moduledoc({line, doc}) do - extract_tests(line, doc) + defp extract_from_moduledoc({line, doc}, module) do + for test <- extract_tests(line, doc, module) do + normalize_test(test, :moduledoc) + end end - defp extract_from_doc({_, _, _, _, doc}) when doc in [false, nil], do: [] + defp extract_from_doc({_, _, _, _, doc}, _module) when doc in [false, nil], do: [] - defp extract_from_doc({fa, line, _, _, doc}) do - for test <- extract_tests(line, doc) do - %{test | fun_arity: fa} + defp extract_from_doc({fa, line, _, _, doc}, module) do + for test <- extract_tests(line, doc, module) do + normalize_test(test, fa) end end - defp extract_tests(line, doc) do - lines = String.split(doc, ~r/\n/, trim: false) |> adjust_indent - extract_tests(lines, line, "", "", [], true) + defp extract_tests(line_no, doc, module) do + all_lines = String.split(doc, "\n", trim: false) + lines = adjust_indent(all_lines, line_no + 1, module) + extract_tests(lines, "", "", [], true, module) end - defp adjust_indent(lines) do - adjust_indent(lines, [], 0, :text) + @iex_prompt ["iex>", "iex("] + @dot_prompt ["...>", "...("] + + defp adjust_indent(lines, line_no, module) do + adjust_indent(:text, lines, line_no, [], 0, module) end - defp adjust_indent([], adjusted_lines, _indent, _) do + defp adjust_indent(_kind, [], _line_no, adjusted_lines, _indent, _module) do Enum.reverse adjusted_lines end - @iex_prompt ["iex>", "iex("] - @dot_prompt ["...>", "...("] - - defp adjust_indent([line|rest], adjusted_lines, indent, :text) do - case String.starts_with?(String.lstrip(line), @iex_prompt) do - true -> adjust_indent([line|rest], adjusted_lines, get_indent(line, indent), :prompt) - false -> adjust_indent(rest, adjusted_lines, indent, :text) + defp adjust_indent(:text, [line | rest], line_no, adjusted_lines, indent, module) do + case String.starts_with?(String.trim_leading(line), @iex_prompt) do + true -> + adjust_indent(:prompt, [line | rest], line_no, adjusted_lines, get_indent(line, indent), module) + false -> + adjust_indent(:text, rest, line_no + 1, adjusted_lines, indent, module) end end - defp adjust_indent([line|rest], adjusted_lines, indent, check) when check in [:prompt, :after_prompt] do + defp adjust_indent(kind, [line | rest], line_no, adjusted_lines, indent, module) + when kind in [:prompt, :after_prompt] do stripped_line = strip_indent(line, indent) - case String.lstrip(line) do + case String.trim_leading(line) do "" -> - raise Error, message: "expected non-blank line to follow iex> prompt" + raise Error, line: line_no, module: module, + message: "expected non-blank line to follow iex> prompt" ^stripped_line -> :ok _ -> - raise Error, message: "indentation level mismatch: #{inspect line}, should have been #{indent} spaces" - end + n_spaces = if indent == 1, + do: "#{indent} space", + else: "#{indent} spaces" - if String.starts_with?(stripped_line, @iex_prompt ++ @dot_prompt) do - adjust_indent(rest, [stripped_line|adjusted_lines], indent, :after_prompt) - else - next = if check == :prompt, do: :after_prompt, else: :code - adjust_indent(rest, [stripped_line|adjusted_lines], indent, next) + raise Error, line: line_no, module: module, + message: "indentation level mismatch: #{inspect line}, should have been #{n_spaces}" end + + adjusted_lines = [{stripped_line, line_no} | adjusted_lines] + + next = + cond do + kind == :prompt -> :after_prompt + String.starts_with?(stripped_line, @iex_prompt ++ @dot_prompt) -> :after_prompt + true -> :code + end + + adjust_indent(next, rest, line_no + 1, adjusted_lines, indent, module) end - defp adjust_indent([line|rest], adjusted_lines, indent, :code) do + defp adjust_indent(:code, [line | rest], line_no, adjusted_lines, indent, module) do stripped_line = strip_indent(line, indent) cond do stripped_line == "" -> - adjust_indent(rest, [stripped_line|adjusted_lines], 0, :text) - String.starts_with?(String.lstrip(line), @iex_prompt) -> - adjust_indent([line|rest], adjusted_lines, indent, :prompt) + adjust_indent(:text, rest, line_no + 1, [{stripped_line, line_no} | adjusted_lines], 0, module) + String.starts_with?(String.trim_leading(line), @iex_prompt) -> + adjust_indent(:prompt, [line | rest], line_no, adjusted_lines, indent, module) true -> - adjust_indent(rest, [stripped_line|adjusted_lines], indent, :code) + adjust_indent(:code, rest, line_no + 1, [{stripped_line, line_no} | adjusted_lines], indent, module) end end defp get_indent(line, current_indent) do - case Regex.run ~r/iex/, line, return: :index do - [{pos, _len}] -> pos - nil -> current_indent + case :binary.match line, "iex" do + {pos, _len} -> pos + :nomatch -> current_indent end end @@ -433,105 +510,145 @@ defmodule ExUnit.DocTest do end end - defp extract_tests([], _line, "", "", [], _) do + @fences ["```", "~~~"] + + defp extract_tests(lines, expr_acc, expected_acc, acc, new_test, module) + defp extract_tests([], "", "", [], _, _) do [] end - defp extract_tests([], _line, "", "", acc, _) do - Enum.reverse(reverse_last_test(acc)) + defp extract_tests([], "", "", acc, _, _) do + Enum.reverse(acc) end # End of input and we've still got a test pending. - defp extract_tests([], _, expr_acc, expected_acc, [test=%{exprs: exprs}|t], _) do - test = %{test | exprs: [{expr_acc, {:test, expected_acc}} | exprs]} - Enum.reverse(reverse_last_test([test|t])) + defp extract_tests([], expr_acc, expected_acc, [test | rest], _, _) do + test = add_expr(test, expr_acc, expected_acc) + Enum.reverse([test | rest]) end # We've encountered the next test on an adjacent line. Put them into one group. - defp extract_tests([<< "iex>", _ :: binary>>|_] = list, line, expr_acc, expected_acc, [test=%{exprs: exprs}|t], newtest) when expr_acc != "" and expected_acc != "" do - test = %{test | exprs: [{expr_acc, {:test, expected_acc}} | exprs]} - extract_tests(list, line, "", "", [test|t], newtest) + defp extract_tests([{"iex>" <> _, _} | _] = list, expr_acc, expected_acc, [test | rest], new_test, module) + when expr_acc != "" and expected_acc != "" do + test = add_expr(test, expr_acc, expected_acc) + extract_tests(list, "", "", [test | rest], new_test, module) end # Store expr_acc and start a new test case. - defp extract_tests([<< "iex>", string :: binary>>|lines], line, "", expected_acc, acc, true) do - acc = reverse_last_test(acc) - test = %{line: line, fun_arity: nil, exprs: []} - extract_tests(lines, line, string, expected_acc, [test|acc], false) + defp extract_tests([{"iex>" <> string, line_no} | lines], "", expected_acc, acc, true, module) do + test = %{line: line_no, fun_arity: nil, exprs: []} + extract_tests(lines, string, expected_acc, [test | acc], false, module) end # Store expr_acc. - defp extract_tests([<< "iex>", string :: binary>>|lines], line, "", expected_acc, acc, false) do - extract_tests(lines, line, string, expected_acc, acc, false) + defp extract_tests([{"iex>" <> string, _} | lines], "", expected_acc, acc, false, module) do + extract_tests(lines, string, expected_acc, acc, false, module) end # Still gathering expr_acc. Synonym for the next clause. - defp extract_tests([<< "iex>", string :: binary>>|lines], line, expr_acc, expected_acc, acc, newtest) do - extract_tests(lines, line, expr_acc <> "\n" <> string, expected_acc, acc, newtest) + defp extract_tests([{"iex>" <> string, _} | lines], expr_acc, expected_acc, acc, new_test, module) do + extract_tests(lines, expr_acc <> "\n" <> string, expected_acc, acc, new_test, module) end # Still gathering expr_acc. Synonym for the previous clause. - defp extract_tests([<< "...>", string :: binary>>|lines], line, expr_acc, expected_acc, acc, newtest) when expr_acc != "" do - extract_tests(lines, line, expr_acc <> "\n" <> string, expected_acc, acc, newtest) + defp extract_tests([{"...>" <> string, _} | lines], expr_acc, expected_acc, acc, new_test, module) + when expr_acc != "" do + extract_tests(lines, expr_acc <> "\n" <> string, expected_acc, acc, new_test, module) end # Expression numbers are simply skipped. - defp extract_tests([<< "iex(", _ :: 8, string :: binary>>|lines], line, expr_acc, expected_acc, acc, newtest) do - extract_tests(["iex" <> skip_iex_number(string)|lines], line, expr_acc, expected_acc, acc, newtest) + defp extract_tests([{<<"iex(", _>> <> string = line, line_no} | lines], + expr_acc, expected_acc, acc, new_test, module) do + extract_tests([{"iex" <> skip_iex_number(string, module, line_no, line), line_no} | lines], + expr_acc, expected_acc, acc, new_test, module) end # Expression numbers are simply skipped redux. - defp extract_tests([<< "...(", _ :: 8, string :: binary>>|lines], line, expr_acc, expected_acc, acc, newtest) do - extract_tests(["..." <> skip_iex_number(string)|lines], line, expr_acc, expected_acc, acc, newtest) + defp extract_tests([{<<"...(", _>> <> string, line_no} = line | lines], + expr_acc, expected_acc, acc, new_test, module) do + extract_tests([{"..." <> skip_iex_number(string, module, line_no, line), line_no} | lines], + expr_acc, expected_acc, acc, new_test, module) end # Skip empty or documentation line. - defp extract_tests([_|lines], line, "", "", acc, _) do - extract_tests(lines, line, "", "", acc, true) + defp extract_tests([_ | lines], "", "", acc, _, module) do + extract_tests(lines, "", "", acc, true, module) end - # Encountered an empty line, store pending test - defp extract_tests([""|lines], line, expr_acc, expected_acc, [test=%{exprs: exprs}|t], _) do - test = %{test | exprs: [{expr_acc, {:test, expected_acc}} | exprs]} - extract_tests(lines, line, "", "", [test|t], true) + # Encountered end of fenced code block, store pending test + defp extract_tests([{<> <> _, _} | lines], expr_acc, expected_acc, + [test | rest], _new_test, module) + when fence in @fences and expr_acc != "" do + test = add_expr(test, expr_acc, expected_acc) + extract_tests(lines, "", "", [test | rest], true, module) end - # Exception test. - defp extract_tests([<< "** (", string :: binary >>|lines], line, expr_acc, "", [test=%{exprs: exprs}|t], newtest) do - test = %{test | exprs: [{expr_acc, extract_error(string, "")} | exprs]} - extract_tests(lines, line, "", "", [test|t], newtest) + # Encountered an empty line, store pending test + defp extract_tests([{"", _} | lines], expr_acc, expected_acc, [test | rest], _new_test, module) do + test = add_expr(test, expr_acc, expected_acc) + extract_tests(lines, "", "", [test | rest], true, module) end # Finally, parse expected_acc. - defp extract_tests([expected|lines], line, expr_acc, expected_acc, [test=%{exprs: exprs}|t]=acc, newtest) do - if expected =~ ~r/^#[A-Z][\w\.]*<.*>$/ do - expected = expected_acc <> "\n" <> inspect(expected) - test = %{test | exprs: [{expr_acc, {:inspect, expected}} | exprs]} - extract_tests(lines, line, "", "", [test|t], newtest) - else - extract_tests(lines, line, expr_acc, expected_acc <> "\n" <> expected, acc, newtest) - end + defp extract_tests([{expected, _} | lines], expr_acc, "", acc, new_test, module) do + extract_tests(lines, expr_acc, expected, acc, new_test, module) + end + + defp extract_tests([{expected, _} | lines], expr_acc, expected_acc, acc, new_test, module) do + extract_tests(lines, expr_acc, expected_acc <> "\n" <> expected, acc, new_test, module) end - defp extract_error(<< ")", t :: binary >>, acc) do - {:error, Module.concat([acc]), String.strip(t)} + defp skip_iex_number(")>" <> string, _module, _line_no, _line) do + ">" <> string end - defp extract_error(<< h, t :: binary >>, acc) do - extract_error(t, << acc :: binary, h >>) + defp skip_iex_number("", module, line_no, line) do + message = + "unknown IEx prompt: #{inspect line}.\nAccepted formats are: iex>, iex(1)>, ...>, ...(1)>}" + raise Error, line: line_no, module: module, message: message end - defp skip_iex_number(<< ")", ">", string :: binary >>) do - ">" <> string + defp skip_iex_number(<<_>> <> string, module, line_no, line) do + skip_iex_number(string, module, line_no, line) end - defp skip_iex_number(<< _ :: 8, string :: binary >>) do - skip_iex_number(string) + defp normalize_test(%{exprs: exprs} = test, fa) do + %{test | fun_arity: fa, exprs: Enum.reverse(exprs)} end - defp reverse_last_test([]), do: [] - defp reverse_last_test([test=%{exprs: exprs} | t]) do - test = %{test | exprs: Enum.reverse(exprs)} - [test | t] + defp add_expr(%{exprs: exprs} = test, expr, expected) do + %{test | exprs: [{expr, tag_expected(expected)} | exprs]} end + + defp tag_expected(string) do + case string do + "** (" <> error -> + [mod, message] = :binary.split(error, ")") + {:error, Module.concat([mod]), String.trim_leading(message)} + _ -> + if is_inspected?(string) do + {:inspect, inspect(string)} + else + {:test, string} + end + end + end + + defp is_inspected?(<>) when char in ?A..?Z, + do: is_inspected_end?(rest) + defp is_inspected?(_), + do: false + + defp is_inspected_end?(<>) when char in ?A..?Z, + do: is_inspected_end?(rest) + defp is_inspected_end?(<>) + when char in ?A..?Z + when char in ?a..?z + when char in ?0..?9 + when char == ?_, + do: is_inspected_end?(rest) + defp is_inspected_end?(<>), + do: true + defp is_inspected_end?(_), + do: false end diff --git a/lib/ex_unit/lib/ex_unit/event_manager.ex b/lib/ex_unit/lib/ex_unit/event_manager.ex index ef0dc0fb838..05f6034952e 100644 --- a/lib/ex_unit/lib/ex_unit/event_manager.ex +++ b/lib/ex_unit/lib/ex_unit/event_manager.ex @@ -1,54 +1,72 @@ -# This module publishes events during the test suite run. -# This is used, for example, by formatters to print user -# information as well as internal statistics for ExUnit. defmodule ExUnit.EventManager do @moduledoc false + @timeout 30_000 - def start_link() do - :gen_event.start_link() - end - - def add_handler(ref, handler, args) do - :gen_event.add_handler(ref, handler, args) - end + # TODO: Remove support for GenEvent formatters on 2.0 - def delete_handler(ref, handler, args) do - :gen_event.delete_handler(ref, handler, args) - end + @doc """ + Starts an event manager that publishes events during the suite run. - def which_handlers(ref) do - :gen_event.which_handlers(ref) + This is what power formatters as well as the + internal statistics server for ExUnit. + """ + def start_link() do + import Supervisor.Spec + child = worker(GenServer, [], restart: :temporary) + {:ok, sup} = Supervisor.start_link([child], strategy: :simple_one_for_one) + {:ok, event} = :gen_event.start_link() + {:ok, {sup, event}} end - def call(ref, handler, request) do - :gen_event.call(ref, handler, request) + def stop({sup, event}) do + for {_, pid, _, _} <- Supervisor.which_children(sup) do + GenServer.stop(pid, :normal, @timeout) + end + Supervisor.stop(sup) + :gen_event.stop(event) end - def call(ref, handler, request, timeout) do - :gen_event.call(ref, handler, request, timeout) + def add_handler({sup, event}, handler, opts) do + if Code.ensure_loaded?(handler) and function_exported?(handler, :handle_call, 2) do + IO.warn "passing GenEvent handlers (#{inspect(handler)} in this case) in " <> + "the :formatters option of ExUnit is deprecated, please pass a " <> + "GenServer instead. Check the documentation for the ExUnit.Formatter " <> + "module for more information" + :gen_event.add_handler(event, handler, opts) + else + Supervisor.start_child(sup, [handler, opts]) + end end def suite_started(ref, opts) do - :gen_event.notify(ref, {:suite_started, opts}) + notify(ref, {:suite_started, opts}) end - def suite_finished(ref, load_us, run_us) do - :gen_event.notify(ref, {:suite_finished, load_us, run_us}) + def suite_finished(ref, run_us, load_us) do + notify(ref, {:suite_finished, run_us, load_us}) end def case_started(ref, test_case) do - :gen_event.notify(ref, {:case_started, test_case}) + notify(ref, {:case_started, test_case}) end def case_finished(ref, test_case) do - :gen_event.notify(ref, {:case_finished, test_case}) + notify(ref, {:case_finished, test_case}) end def test_started(ref, test) do - :gen_event.notify(ref, {:test_started, test}) + notify(ref, {:test_started, test}) end def test_finished(ref, test) do - :gen_event.notify(ref, {:test_finished, test}) + notify(ref, {:test_finished, test}) + end + + defp notify({sup, event}, msg) do + :gen_event.notify(event, msg) + for {_, pid, _, _} <- Supervisor.which_children(sup) do + GenServer.cast(pid, msg) + end + :ok end end diff --git a/lib/ex_unit/lib/ex_unit/filters.ex b/lib/ex_unit/lib/ex_unit/filters.ex index e7708ecffef..300918c3be2 100644 --- a/lib/ex_unit/lib/ex_unit/filters.ex +++ b/lib/ex_unit/lib/ex_unit/filters.ex @@ -14,10 +14,11 @@ defmodule ExUnit.Filters do """ @spec parse_path(String.t) :: {String.t, any} def parse_path(file) do - case Regex.run(~r/^(.+):(\d+)$/, file, capture: :all_but_first) do - [file, line_number] -> - {file, exclude: [:test], include: [line: line_number]} - nil -> + {paths, [line]} = file |> String.split(":") |> Enum.split(-1) + case Integer.parse(line) do + {_, ""} -> + {Enum.join(paths, ":"), exclude: [:test], include: [line: line]} + _ -> {file, []} end end @@ -77,20 +78,29 @@ defmodule ExUnit.Filters do :ok iex> ExUnit.Filters.eval([foo: "bar"], [:foo], %{foo: "baz"}, []) - {:error, :foo} + {:error, "due to foo filter"} """ - @spec eval(t, t, map, [ExUnit.Test.t]) :: :ok | {:error, atom} + @spec eval(t, t, map, [ExUnit.Test.t]) :: :ok | {:error, binary} def eval(include, exclude, tags, collection) when is_map(tags) do - excluded = Enum.find_value exclude, &has_tag(&1, tags, collection) - if !excluded or Enum.any?(include, &has_tag(&1, tags, collection)) do - :ok - else - {:error, excluded} + skip? = not Enum.any?(include, &has_tag(&1, %{skip: true}, collection)) + + case Map.fetch(tags, :skip) do + {:ok, msg} when is_binary(msg) and skip? -> + {:error, msg} + {:ok, true} when skip? -> + {:error, "due to skip tag"} + _ -> + excluded = Enum.find_value exclude, &has_tag(&1, tags, collection) + if !excluded or Enum.any?(include, &has_tag(&1, tags, collection)) do + :ok + else + {:error, "due to #{excluded} filter"} + end end end - defp has_tag({:line, line}, tags, collection) do + defp has_tag({:line, line}, %{line: _} = tags, collection) do line = to_integer(line) tags.line <= line and closest_test_before_line(line, collection).tags.line == tags.line diff --git a/lib/ex_unit/lib/ex_unit/formatter.ex b/lib/ex_unit/lib/ex_unit/formatter.ex index 0978ccd4721..df992846a2e 100644 --- a/lib/ex_unit/lib/ex_unit/formatter.ex +++ b/lib/ex_unit/lib/ex_unit/formatter.ex @@ -1,31 +1,30 @@ defmodule ExUnit.Formatter do @moduledoc """ - This module holds helper functions related to formatting and contains - documentation about the formatting protocol. + Helper functions for formatting and the formatting protocols. - Formatters are registered at the `ExUnit.EventManager` event manager and - will be send events by the runner. + Formatters are `GenServer`s specified during ExUnit configuration + that receive a series of events as casts. The following events are possible: * `{:suite_started, opts}` - - the suite has started with the specified options to the runner. + the suite has started with the specified options to the runner. * `{:suite_finished, run_us, load_us}` - - the suite has finished. `run_us` and `load_us` are the run and load - times in microseconds respectively. + the suite has finished. `run_us` and `load_us` are the run and load + times in microseconds respectively. * `{:case_started, test_case}` - - a test case has started. See `ExUnit.TestCase` for details. + a test case has started. See `ExUnit.TestCase` for details. * `{:case_finished, test_case}` - - a test case has finished. See `ExUnit.TestCase` for details. + a test case has finished. See `ExUnit.TestCase` for details. - * `{:test_started, test_case}` - - a test case has started. See `ExUnit.Test` for details. + * `{:test_started, test}` - + a test has started. See `ExUnit.Test` for details. - * `{:test_finished, test_case}` - - a test case has finished. See `ExUnit.Test` for details. + * `{:test_finished, test}` - + a test has finished. See `ExUnit.Test` for details. """ @@ -35,11 +34,10 @@ defmodule ExUnit.Formatter do @type run_us :: pos_integer @type load_us :: pos_integer | nil - import Exception, only: [format_stacktrace_entry: 1] + import Exception, only: [format_stacktrace_entry: 1, format_file_line: 3] - @label_padding " " @counter_padding " " - @inspect_padding @counter_padding <> @label_padding + @no_value ExUnit.AssertionError.no_value @doc """ Formats time taken running the test suite. @@ -68,8 +66,8 @@ defmodule ExUnit.Formatter do run_us = run_us |> normalize_us load_us = load_us |> normalize_us - ms = run_us + load_us - "Finished in #{format_us ms} seconds (#{format_us load_us}s on load, #{format_us run_us}s on tests)" + total_us = run_us + load_us + "Finished in #{format_us total_us} seconds (#{format_us load_us}s on load, #{format_us run_us}s on tests)" end defp normalize_us(us) do @@ -86,12 +84,12 @@ defmodule ExUnit.Formatter do end @doc """ - Formats filters used to constain cases to be run. + Formats filters used to constrain cases to be run. ## Examples - iex> format_filters([run: true, slow: false], :include) - "Including tags: [run: true, slow: false]" + iex> format_filters([run: true, slow: false], :include) + "Including tags: [run: true, slow: false]" """ @spec format_filters(Keyword.t, atom) :: String.t @@ -105,121 +103,267 @@ defmodule ExUnit.Formatter do @doc """ Receives a test and formats its failure. """ - def format_test_failure(test, {kind, reason, stack}, counter, width, formatter) do + def format_test_failure(test, failures, counter, width, formatter) do %ExUnit.Test{name: name, case: case, tags: tags} = test - test_info(with_counter(counter, "#{name} (#{inspect case})"), formatter) - <> test_location(with_location(tags), formatter) - <> format_kind_reason(kind, reason, width, formatter) - <> format_stacktrace(stack, case, name, formatter) + + test_info(with_counter(counter, "#{name} (#{inspect case})"), formatter) <> + test_location(with_location(tags), formatter) <> + Enum.map_join(Enum.with_index(failures), "", fn {{kind, reason, stack}, index} -> + {text, stack} = format_kind_reason(test, kind, reason, stack, width, formatter) + failure_header(failures, index) <> text <> format_stacktrace(stack, case, name, formatter) + end) <> + report(tags, failures, width, formatter) + end + + + @doc false + def format_assertion_error(%ExUnit.AssertionError{} = struct) do + format_assertion_error(%{}, struct, [], :infinity, fn _, msg -> msg end, "") end + def format_assertion_error(test, struct, stack, width, formatter, counter_padding) do + label_padding_size = if has_value?(struct.right), do: 7, else: 6 + padding_size = label_padding_size + byte_size(@counter_padding) + inspect = &inspect_multiline(&1, padding_size, width) + {left, right} = format_sides(struct, formatter, inspect) + binding = if(struct.binding == [], do: ExUnit.AssertionError.no_value(), else: struct.binding) + + [ + note: if_value(struct.message, &format_message(&1, formatter)), + code: if_value(struct.expr, &code_multiline(&1, padding_size)), + code: unless_value(struct.expr, fn -> get_code(test, stack) || @no_value end), + left: left, + right: right, + variables: if_value(binding, &format_binding(&1, width)), + ] + |> format_meta(formatter, label_padding_size) + |> make_into_lines(counter_padding) + end + + defp report(tags, failures, width, formatter) do + case Map.take(tags, List.wrap(tags[:report])) do + report when map_size(report) == 0 -> + "" + report -> + report_spacing(failures) <> + extra_info("tags:", formatter) <> + Enum.map_join(report, "", fn {key, value} -> + prefix = " #{key}: " + prefix <> inspect_multiline(value, byte_size(prefix), width) <> "\n" + end) + end + end + + defp report_spacing([_]), do: "" + defp report_spacing(_), do: "\n" + @doc """ Receives a test case and formats its failure. """ - def format_test_case_failure(test_case, {kind, reason, stacktrace}, counter, width, formatter) do + def format_test_case_failure(test_case, failures, counter, width, formatter) do %ExUnit.TestCase{name: name} = test_case - test_case_info(with_counter(counter, "#{inspect name}: "), formatter) - <> format_kind_reason(kind, reason, width, formatter) - <> format_stacktrace(stacktrace, name, nil, formatter) + test_case_info(with_counter(counter, "#{inspect name}: "), formatter) <> + Enum.map_join(Enum.with_index(failures), "", fn {{kind, reason, stack}, index} -> + {text, stack} = format_kind_reason(test_case, kind, reason, stack, width, formatter) + failure_header(failures, index) <> text <> format_stacktrace(stack, name, nil, formatter) + end) end - defp format_kind_reason(:error, %ExUnit.AssertionError{} = struct, width, formatter) do - width = if width == :infinity, do: width, else: width - byte_size(@inspect_padding) + defp format_kind_reason(test, :error, %ExUnit.AssertionError{} = struct, stack, width, formatter) do + {format_assertion_error(test, struct, stack, width, formatter, @counter_padding), stack} + end - fields = - [note: if_value(struct.message, &format_banner(&1, formatter)), - code: if_value(struct.expr, &code_multiline(&1, width)), - lhs: if_value(struct.left, &inspect_multiline(&1, width)), - rhs: if_value(struct.right, &inspect_multiline(&1, width))] + defp format_kind_reason(test, :error, %FunctionClauseError{} = struct, stack, _width, formatter) do + {blamed, stack} = Exception.blame(:error, struct, stack) + banner = Exception.format_banner(:error, struct) + blamed = FunctionClauseError.blame(blamed, &inspect/1, &blame_match(&1, &2, formatter)) + message = error_info(banner, formatter) <> "\n" <> pad(String.trim_leading(blamed, "\n")) + {message <> format_code(test, stack, formatter), stack} + end - fields - |> filter_interesting_fields - |> format_each_reason(formatter) - |> make_into_lines(@counter_padding) + defp format_kind_reason(test, kind, reason, stack, _width, formatter) do + message = error_info(Exception.format_banner(kind, reason), formatter) + {message <> format_code(test, stack, formatter), stack} end - defp format_kind_reason(kind, reason, _width, formatter) do - error_info Exception.format_banner(kind, reason), formatter + defp format_code(test, stack, formatter) do + if snippet = get_code(test, stack) do + " " <> formatter.(:extra_info, "code: ") <> snippet <> "\n" + else + "" + end end - defp filter_interesting_fields(fields) do - Enum.filter(fields, fn {_, value} -> - value != ExUnit.AssertionError.no_value - end) + defp get_code(%{case: case, name: name}, stack) do + info = Enum.find_value(stack, fn {^case, ^name, _, info} -> info; _ -> nil end) + file = info[:file] + line = info[:line] + if line > 0 && file && File.exists?(file) do + file |> File.stream! |> Enum.at(line - 1) |> String.trim + end end + defp get_code(%{}, _) do + nil + end + + defp blame_match(%{match?: true, node: node}, _, formatter), + do: formatter.(:blame_same, Macro.to_string(node)) + defp blame_match(%{match?: false, node: node}, _, formatter), + do: formatter.(:blame_diff, Macro.to_string(node)) + defp blame_match(_, string, _formatter), + do: string - defp format_each_reason(reasons, formatter) do - Enum.map(reasons, fn {label, value} -> - format_label(label, formatter) <> value - end) + defp format_meta(fields, formatter, padding_size) do + for {label, value} <- fields, has_value?(value) do + format_label(label, formatter, padding_size) <> value + end end defp if_value(value, fun) do - if value == ExUnit.AssertionError.no_value do + if has_value?(value) do + fun.(value) + else value + end + end + + defp unless_value(value, fun) do + if has_value?(value) do + @no_value else - fun.(value) + fun.() end end - defp format_label(:note, _formatter) do - "" + defp has_value?(value) do + value != @no_value end - defp format_label(label, formatter) do - formatter.(:error_info, String.ljust("#{label}:", byte_size(@label_padding))) + defp format_label(:note, _formatter, _padding_size), do: "" + + defp format_label(label, formatter, padding_size) do + formatter.(:extra_info, String.pad_trailing("#{label}:", padding_size)) end - defp format_banner(value, formatter) do + defp format_message(value, formatter) do + value = String.replace(value, "\n", "\n" <> @counter_padding) formatter.(:error_info, value) end - defp code_multiline(expr, _width) when is_binary(expr) do - expr - |> String.replace("\n", "\n" <> @inspect_padding) + defp code_multiline(expr, padding_size) when is_binary(expr) do + padding = String.duplicate(" ", padding_size) + String.replace(expr, "\n", "\n" <> padding) end + defp code_multiline({fun, _, [expr]}, padding_size) when is_atom(fun) do + code_multiline(Atom.to_string(fun) <> " " <> Macro.to_string(expr), padding_size) + end + defp code_multiline(expr, padding_size) do + code_multiline(Macro.to_string(expr), padding_size) + end + + defp format_binding(binding, width) do + padding = @counter_padding <> " " + padding_size = byte_size(padding) + + result = + Enum.map_join(binding, "\n" <> padding, fn {var, value} -> + "#{var} = #{inspect_multiline(value, padding_size, width)}" + end) - defp code_multiline(expr, width) do - code_multiline(expr |> Macro.to_string, width) + "\n" <> padding <> result end - defp inspect_multiline(expr, width) do - expr - |> inspect(pretty: true, width: width) - |> String.replace("\n", "\n" <> @inspect_padding) + defp inspect_multiline(expr, padding_size, width) do + padding = String.duplicate(" ", padding_size) + width = if width == :infinity, do: width, else: width - padding_size + inspect(expr, [pretty: true, width: width]) + |> String.replace("\n", "\n" <> padding) end defp make_into_lines(reasons, padding) do padding <> Enum.join(reasons, "\n" <> padding) <> "\n" end + defp format_sides(struct, formatter, inspect) do + %{left: left, right: right} = struct + + case format_diff(left, right, formatter) do + {left, right} -> + {IO.iodata_to_binary(left), IO.iodata_to_binary(right)} + nil -> + {if_value(left, inspect), if_value(right, inspect)} + end + end + + defp format_diff(left, right, formatter) do + if has_value?(left) and has_value?(right) and formatter.(:diff_enabled?, false) do + if script = edit_script(left, right) do + colorize_diff(script, formatter, {[], []}) + end + end + end + + defp colorize_diff(script, formatter, acc) when is_list(script) do + Enum.reduce(script, acc, &colorize_diff(&1, formatter, &2)) + end + + defp colorize_diff({:eq, content}, _formatter, {left, right}) do + {[left | content], [right | content]} + end + + defp colorize_diff({:del, content}, formatter, {left, right}) do + format = colorize_format(content, :diff_delete, :diff_delete_whitespace) + {[left | formatter.(format, content)], right} + end + + defp colorize_diff({:ins, content}, formatter, {left, right}) do + format = colorize_format(content, :diff_insert, :diff_insert_whitespace) + {left, [right | formatter.(format, content)]} + end + + defp colorize_format(content, normal, whitespace) do + if String.trim_leading(content) == "", do: whitespace, else: normal + end + + defp edit_script(left, right) do + task = Task.async(ExUnit.Diff, :script, [left, right]) + case Task.yield(task, 1_500) || Task.shutdown(task, :brutal_kill) do + {:ok, script} -> script + nil -> nil + end + end + defp format_stacktrace([], _case, _test, _color) do "" end defp format_stacktrace(stacktrace, test_case, test, color) do extra_info("stacktrace:", color) <> - Enum.map_join(stacktrace, - fn(s) -> stacktrace_info format_stacktrace_entry(s, test_case, test), color end) + Enum.map_join(stacktrace, fn entry -> + stacktrace_info format_stacktrace_entry(entry, test_case, test), color + end) end defp format_stacktrace_entry({test_case, test, _, location}, test_case, test) do - "#{location[:file]}:#{location[:line]}" + format_file_line(location[:file], location[:line], " (test)") end - defp format_stacktrace_entry(s, _test_case, _test) do - format_stacktrace_entry(s) + defp format_stacktrace_entry(entry, _test_case, _test) do + format_stacktrace_entry(entry) end defp with_location(tags) do "#{Path.relative_to_cwd(tags[:file])}:#{tags[:line]}" end + defp failure_header([_], _), do: "" + defp failure_header(_, i), do: "\n#{@counter_padding}Failure ##{i+1}\n" + defp with_counter(counter, msg) when counter < 10 do " #{counter}) #{msg}" end defp with_counter(counter, msg) when counter < 100 do " #{counter}) #{msg}" end defp with_counter(counter, msg) do "#{counter}) #{msg}" end - defp test_case_info(msg, nil), do: msg <> "failure on setup_all callback, tests invalidated\n" + defp test_case_info(msg, nil), do: msg <> "failure on setup_all callback, test invalidated\n" defp test_case_info(msg, formatter), do: test_case_info(formatter.(:test_case_info, msg), nil) defp test_info(msg, nil), do: msg <> "\n" @@ -228,15 +372,17 @@ defmodule ExUnit.Formatter do defp test_location(msg, nil), do: " " <> msg <> "\n" defp test_location(msg, formatter), do: test_location(formatter.(:location_info, msg), nil) - defp error_info(msg, nil) do - " " <> String.replace(msg, "\n", "\n ") <> <<"\n">> + defp pad(msg) do + " " <> String.replace(msg, "\n", "\n ") <> "\n" end - defp error_info(msg, formatter), do: error_info(formatter.(:error_info, msg), nil) + defp error_info(msg, nil), do: pad(msg) + defp error_info(msg, formatter), do: pad(formatter.(:error_info, msg)) - defp extra_info(msg, nil), do: " " <> msg <> "\n" - defp extra_info(msg, formatter), do: extra_info(formatter.(:extra_info, msg), nil) + defp extra_info(msg, nil), do: pad(msg) + defp extra_info(msg, formatter), do: pad(formatter.(:extra_info, msg)) + defp stacktrace_info("", _formatter), do: "" defp stacktrace_info(msg, nil), do: " " <> msg <> "\n" defp stacktrace_info(msg, formatter), do: stacktrace_info(formatter.(:stacktrace_info, msg), nil) end diff --git a/lib/ex_unit/lib/ex_unit/on_exit_handler.ex b/lib/ex_unit/lib/ex_unit/on_exit_handler.ex index 773e702555a..40423bc9d74 100644 --- a/lib/ex_unit/lib/ex_unit/on_exit_handler.ex +++ b/lib/ex_unit/lib/ex_unit/on_exit_handler.ex @@ -7,15 +7,15 @@ defmodule ExUnit.OnExitHandler do end @spec register(pid) :: :ok - def register(pid) do + def register(pid) when is_pid(pid) do Agent.update(@name, &Map.put(&1, pid, [])) end - @spec add(pid, term, fun) :: :ok | :error - def add(pid, ref, callback) do + @spec add(pid, term, (() -> term)) :: :ok | :error + def add(pid, name_or_ref, callback) when is_pid(pid) and is_function(callback, 0) do Agent.get_and_update(@name, fn map -> if entries = Map.get(map, pid) do - entries = List.keystore(entries, ref, 0, {ref, callback}) + entries = List.keystore(entries, name_or_ref, 0, {name_or_ref, callback}) {:ok, Map.put(map, pid, entries)} else {:error, map} @@ -23,15 +23,15 @@ defmodule ExUnit.OnExitHandler do end) end - @spec run(pid) :: :ok | {Exception.kind, term, Exception.stacktrace} - def run(pid) do - callbacks = Agent.get_and_update(@name, &Map.pop(&1, pid)) - exec_on_exit_callbacks(Enum.reverse(callbacks)) + @spec run(pid, timeout) :: :ok | {Exception.kind, term, Exception.stacktrace} + def run(pid, timeout) when is_pid(pid) do + callbacks = Agent.get_and_update(@name, &Map.pop(&1, pid, [])) + exec_on_exit_callbacks(Enum.reverse(callbacks), timeout) end - defp exec_on_exit_callbacks(callbacks) do + defp exec_on_exit_callbacks(callbacks, timeout) do {runner_pid, runner_monitor, state} = - Enum.reduce callbacks, {nil, nil, nil}, &exec_on_exit_callback/2 + Enum.reduce(callbacks, {nil, nil, nil}, &exec_on_exit_callback(&1, timeout, &2)) if is_pid(runner_pid) and Process.alive?(runner_pid) do send(runner_pid, :shutdown) @@ -43,10 +43,13 @@ defmodule ExUnit.OnExitHandler do state || :ok end - defp exec_on_exit_callback({_ref, callback}, {runner_pid, runner_monitor, state}) do + defp exec_on_exit_callback({_name_or_ref, callback}, timeout, {runner_pid, runner_monitor, state}) do {runner_pid, runner_monitor} = ensure_alive_callback_runner(runner_pid, runner_monitor) send(runner_pid, {:run, self(), callback}) + receive_runner_reply(runner_pid, runner_monitor, state, timeout) + end + defp receive_runner_reply(runner_pid, runner_monitor, state, timeout) do receive do {^runner_pid, nil} -> {runner_pid, runner_monitor, state} @@ -54,6 +57,19 @@ defmodule ExUnit.OnExitHandler do {runner_pid, runner_monitor, state || error} {:DOWN, ^runner_monitor, :process, ^runner_pid, error} -> {nil, nil, state || {{:EXIT, runner_pid}, error, []}} + after + timeout -> + case Process.info(runner_pid, :current_stacktrace) do + {:current_stacktrace, stacktrace} -> + Process.exit(runner_pid, :kill) + receive do + {:DOWN, ^runner_monitor, :process, ^runner_pid, _} -> :ok + end + exception = ExUnit.TimeoutError.exception(timeout: timeout, type: :on_exit) + {nil, nil, state || {:error, exception, stacktrace}} + nil -> + receive_runner_reply(runner_pid, runner_monitor, state, timeout) + end end end @@ -83,6 +99,6 @@ defmodule ExUnit.OnExitHandler do nil catch kind, error -> - {kind, Exception.normalize(kind, error), System.stacktrace} + {kind, error, System.stacktrace} end end diff --git a/lib/ex_unit/lib/ex_unit/runner.ex b/lib/ex_unit/lib/ex_unit/runner.ex index 4bed446d6ea..e83c45936f0 100644 --- a/lib/ex_unit/lib/ex_unit/runner.ex +++ b/lib/ex_unit/lib/ex_unit/runner.ex @@ -2,79 +2,85 @@ defmodule ExUnit.Runner do @moduledoc false alias ExUnit.EventManager, as: EM - @stop_timeout 30_000 - def run(async, sync, opts, load_us) do - opts = normalize_opts(opts) - - {:ok, pid} = EM.start_link - formatters = [ExUnit.RunnerStats|opts[:formatters]] - Enum.each formatters, &(:ok = EM.add_handler(pid, &1, opts)) + def run(opts, load_us) do + {opts, config} = configure(opts) - config = %{ - seed: opts[:seed], - max_cases: opts[:max_cases], - sync_cases: [], - async_cases: [], - taken_cases: 0, - include: opts[:include], - exclude: opts[:exclude], - manager: pid - } + :erlang.system_flag(:backtrace_depth, + Keyword.fetch!(opts, :stacktrace_depth)) {run_us, _} = :timer.tc fn -> EM.suite_started(config.manager, opts) - loop %{config | sync_cases: shuffle(config, sync), - async_cases: shuffle(config, async)} + loop(config, 0) end EM.suite_finished(config.manager, run_us, load_us) - EM.call(config.manager, ExUnit.RunnerStats, :stop, @stop_timeout) + result = ExUnit.RunnerStats.stats(config.stats) + EM.stop(config.manager) + result end - defp normalize_opts(opts) do - opts = - if opts[:trace] do - Keyword.put_new(opts, :max_cases, 1) - else - Keyword.put(opts, :trace, false) - end + defp configure(opts) do + opts = normalize_opts(opts) + + {:ok, manager} = EM.start_link + {:ok, stats} = EM.add_handler(manager, ExUnit.RunnerStats, opts) + Enum.each opts[:formatters], &EM.add_handler(manager, &1, opts) + + config = %{ + capture_log: opts[:capture_log], + exclude: opts[:exclude], + include: opts[:include], + manager: manager, + stats: stats, + max_cases: opts[:max_cases], + seed: opts[:seed], + cases: :async, + timeout: opts[:timeout], + trace: opts[:trace] + } + + {opts, config} + end + defp normalize_opts(opts) do {include, exclude} = ExUnit.Filters.normalize(opts[:include], opts[:exclude]) opts |> Keyword.put(:exclude, exclude) |> Keyword.put(:include, include) - |> Keyword.put_new(:color, IO.ANSI.terminal?) - |> Keyword.put_new(:max_cases, :erlang.system_info(:schedulers_online)) - |> Keyword.put_new(:seed, :erlang.now |> elem(2)) end - defp loop(config) do - available = config.max_cases - config.taken_cases + defp loop(%{cases: :async} = config, taken) do + available = config.max_cases - taken cond do # No cases available, wait for one available <= 0 -> - wait_until_available config + wait_until_available(config, taken) # Slots are available, start with async cases - tuple = take_async_cases(config, available) -> - {config, cases} = tuple - spawn_cases(config, cases) + cases = ExUnit.Server.take_async_cases(available) -> + spawn_cases(config, cases, taken) - # No more async cases, wait for them to finish - config.taken_cases > 0 -> - wait_until_available config + true -> + cases = ExUnit.Server.take_sync_cases() + loop(%{config | cases: cases}, taken) + end + end + + defp loop(%{cases: cases} = config, taken) do + case cases do + _ when taken > 0 -> + wait_until_available(config, taken) # So we can start all sync cases - tuple = take_sync_cases(config) -> - {config, cases} = tuple - spawn_cases(config, cases) + [h | t] -> + spawn_cases(%{config | cases: t}, [h], taken) # No more cases, we are done! - true -> + [] -> config end end @@ -82,14 +88,14 @@ defmodule ExUnit.Runner do # Loop expecting messages from the spawned cases. Whenever # a test case has finished executing, decrease the taken # cases counter and attempt to spawn new ones. - defp wait_until_available(config) do + defp wait_until_available(config, taken) do receive do {_pid, :case_finished, _test_case} -> - loop %{config | taken_cases: config.taken_cases - 1} + loop(config, taken - 1) end end - defp spawn_cases(config, cases) do + defp spawn_cases(config, cases, taken) do pid = self() Enum.each cases, fn case_name -> @@ -98,7 +104,7 @@ defmodule ExUnit.Runner do end end - loop %{config | taken_cases: config.taken_cases + length(cases)} + loop(config, taken + length(cases)) end defp run_case(config, pid, case_name) do @@ -120,7 +126,7 @@ defmodule ExUnit.Runner do # tests but we do send the notifications to formatter. Enum.each pending, &run_test(config, &1, []) EM.case_finished(config.manager, test_case) - send pid, {self, :case_finished, test_case} + send pid, {self(), :case_finished, test_case} end defp prepare_tests(config, tests) do @@ -129,29 +135,29 @@ defmodule ExUnit.Runner do exclude = config.exclude for test <- tests do - tags = Map.put(test.tags, :test, test.name) + tags = Map.merge(test.tags, %{test: test.name, case: test.case}) case ExUnit.Filters.eval(include, exclude, tags, tests) do :ok -> %{test | tags: tags} - {:error, tag} -> %{test | state: {:skip, "due to #{tag} filter"}} + {:error, msg} -> %{test | state: {:skip, msg}} end end end defp spawn_case(config, test_case, tests) do - parent = self + parent = self() {case_pid, case_ref} = spawn_monitor(fn -> - ExUnit.OnExitHandler.register(self) + ExUnit.OnExitHandler.register(self()) case exec_case_setup(test_case) do {:ok, test_case, context} -> - Enum.each(tests, &run_test(config, &1, context)) - send parent, {self, :case_finished, test_case, []} + Enum.each tests, &run_test(config, &1, context) + send parent, {self(), :case_finished, test_case, []} {:error, test_case} -> - failed_tests = Enum.map(tests, & %{&1 | state: {:invalid, test_case}}) - send parent, {self, :case_finished, test_case, failed_tests} + failed_tests = Enum.map tests, & %{&1 | state: {:invalid, test_case}} + send parent, {self(), :case_finished, test_case, failed_tests} end exit(:shutdown) @@ -160,131 +166,185 @@ defmodule ExUnit.Runner do {test_case, pending} = receive do {^case_pid, :case_finished, test_case, tests} -> + receive do + {:DOWN, ^case_ref, :process, ^case_pid, _} -> :ok + end {test_case, tests} {:DOWN, ^case_ref, :process, ^case_pid, error} -> - test_case = %{test_case | state: {:failed, {{:EXIT, case_pid}, error, []}}} + test_case = %{test_case | state: failed({:EXIT, case_pid}, error, [])} {test_case, []} end - {exec_on_exit(test_case, case_pid), pending} + timeout = get_timeout(%{}, config) + {exec_on_exit(test_case, case_pid, timeout), pending} end defp exec_case_setup(%ExUnit.TestCase{name: case_name} = test_case) do - {:ok, context} = case_name.__ex_unit__(:setup_all, %{case: case_name}) - {:ok, test_case, context} + {:ok, test_case, case_name.__ex_unit__(:setup_all, %{case: case_name})} catch kind, error -> - failed = {:failed, {kind, Exception.normalize(kind, error), pruned_stacktrace}} + failed = failed(kind, error, pruned_stacktrace()) {:error, %{test_case | state: failed}} end - defp run_test(config, test, context) do - EM.test_started(config.manager, test) + defp run_test(true, config, test, context) do + run_test([], config, test, context) + end + + defp run_test(false, config, test, context) do + spawn_test(config, test, context) + end - if nil?(test.state) do - test = spawn_test(config, test, Map.merge(test.tags, context)) + defp run_test(opts, config, test, context) do + ref = make_ref() + try do + ExUnit.CaptureLog.capture_log(opts, fn -> + send self(), {ref, spawn_test(config, test, context)} + end) + catch + :exit, :noproc -> + message = + "could not run test, it uses @tag :capture_log" <> + " but the :logger application is not running" + %{test | state: failed(:error, RuntimeError.exception(message), [])} + else + logged -> + receive do + {^ref, test} -> %{test | logs: logged} + end end + end + + defp run_test(config, %{tags: tags} = test, context) do + EM.test_started(config.manager, test) + + test = + if is_nil(test.state) do + capture_log? = Map.get(tags, :capture_log, config.capture_log) + run_test(capture_log?, config, test, Map.merge(tags, context)) + else + test + end EM.test_finished(config.manager, test) end - defp spawn_test(_config, test, context) do + defp spawn_test(config, test, context) do parent = self() {test_pid, test_ref} = spawn_monitor(fn -> - ExUnit.OnExitHandler.register(self) + ExUnit.OnExitHandler.register(self()) {us, test} = :timer.tc(fn -> case exec_test_setup(test, context) do - {:ok, test, context} -> - exec_test(test, context) + {:ok, test} -> + exec_test(test) {:error, test} -> test end end) - send parent, {self, :test_finished, %{test | time: us}} + send parent, {self(), :test_finished, %{test | time: us}} exit(:shutdown) end) - test = - receive do - {^test_pid, :test_finished, test} -> - test - {:DOWN, ^test_ref, :process, ^test_pid, error} -> - %{test | state: {:failed, {{:EXIT, test_pid}, error, []}}} - end + timeout = get_timeout(test.tags, config) + test = receive_test_reply(test, test_pid, test_ref, timeout) - exec_on_exit(test, test_pid) + exec_on_exit(test, test_pid, timeout) + end + + defp receive_test_reply(test, test_pid, test_ref, timeout) do + receive do + {^test_pid, :test_finished, test} -> + receive do + {:DOWN, ^test_ref, :process, ^test_pid, _} -> :ok + end + test + {:DOWN, ^test_ref, :process, ^test_pid, error} -> + %{test | state: failed({:EXIT, test_pid}, error, [])} + after + timeout -> + case Process.info(test_pid, :current_stacktrace) do + {:current_stacktrace, stacktrace} -> + Process.exit(test_pid, :kill) + receive do + {:DOWN, ^test_ref, :process, ^test_pid, _} -> :ok + end + exception = ExUnit.TimeoutError.exception(timeout: timeout, type: test.tags.type) + %{test | state: failed(:error, exception, stacktrace)} + nil -> + receive_test_reply(test, test_pid, test_ref, timeout) + end + end end defp exec_test_setup(%ExUnit.Test{case: case} = test, context) do - {:ok, context} = case.__ex_unit__(:setup, context) - {:ok, test, context} + {:ok, %{test | tags: case.__ex_unit__(:setup, context)}} catch - kind2, error2 -> - failed = {:failed, {kind2, Exception.normalize(kind2, error2), pruned_stacktrace()}} - {:error, %{test | state: failed}} + kind, error -> + {:error, %{test | state: failed(kind, error, pruned_stacktrace())}} end - defp exec_test(%ExUnit.Test{case: case, name: name} = test, context) do + defp exec_test(%ExUnit.Test{case: case, name: name, tags: context} = test) do apply(case, name, [context]) test catch kind, error -> - failed = {:failed, {kind, Exception.normalize(kind, error), pruned_stacktrace()}} - %{test | state: failed} + %{test | state: failed(kind, error, pruned_stacktrace())} end - defp exec_on_exit(test_or_case, pid) do - case ExUnit.OnExitHandler.run(pid) do + defp exec_on_exit(test_or_case, pid, timeout) do + case ExUnit.OnExitHandler.run(pid, timeout) do :ok -> test_or_case {kind, reason, stack} -> - state = test_or_case.state || {:failed, {kind, reason, prune_stacktrace(stack)}} + state = test_or_case.state || failed(kind, reason, prune_stacktrace(stack)) %{test_or_case | state: state} end end ## Helpers + defp get_timeout(tags, config) do + if config.trace do + :infinity + else + Map.get(tags, :timeout, config.timeout) + end + end + defp shuffle(%{seed: 0}, list) do Enum.reverse(list) end defp shuffle(%{seed: seed}, list) do - :random.seed(3172, 9814, seed) + _ = :rand.seed(:exsplus, {3172, 9814, seed}) Enum.shuffle(list) end - defp take_async_cases(config, count) do - case config.async_cases do - [] -> nil - cases -> - {response, remaining} = Enum.split(cases, count) - {%{config | async_cases: remaining}, response} - end + defp failed(:error, %ExUnit.MultiError{errors: errors}, _stack) do + {:failed, + Enum.map(errors, fn {kind, reason, stack} -> + {kind, Exception.normalize(kind, reason), prune_stacktrace(stack)} + end)} end - defp take_sync_cases(config) do - case config.sync_cases do - [h|t] -> {%{config | sync_cases: t}, [h]} - [] -> nil - end + defp failed(kind, reason, stack) do + {:failed, [{kind, Exception.normalize(kind, reason), stack}]} end defp pruned_stacktrace, do: prune_stacktrace(System.stacktrace) # Assertions can pop-up in the middle of the stack - defp prune_stacktrace([{ExUnit.Assertions, _, _, _}|t]), do: prune_stacktrace(t) + defp prune_stacktrace([{ExUnit.Assertions, _, _, _} | t]), do: prune_stacktrace(t) # As soon as we see a Runner, it is time to ignore the stacktrace - defp prune_stacktrace([{ExUnit.Runner, _, _, _}|_]), do: [] + defp prune_stacktrace([{ExUnit.Runner, _, _, _} | _]), do: [] # All other cases - defp prune_stacktrace([h|t]), do: [h|prune_stacktrace(t)] + defp prune_stacktrace([h | t]), do: [h | prune_stacktrace(t)] defp prune_stacktrace([]), do: [] end - diff --git a/lib/ex_unit/lib/ex_unit/runner_stats.ex b/lib/ex_unit/lib/ex_unit/runner_stats.ex index d2e6d07a99f..1c7dbe0252a 100644 --- a/lib/ex_unit/lib/ex_unit/runner_stats.ex +++ b/lib/ex_unit/lib/ex_unit/runner_stats.ex @@ -1,31 +1,41 @@ -# Small event consumer to handle runner statistics. defmodule ExUnit.RunnerStats do @moduledoc false - use GenEvent + use GenServer def init(_opts) do - {:ok, %{total: 0, failures: 0}} + {:ok, %{total: 0, failures: 0, skipped: 0}} end - def handle_call(:stop, map) do - {:remove_handler, map} + def stats(pid) do + GenServer.call(pid, :stats, :infinity) end - def handle_event({:test_finished, %ExUnit.Test{state: {tag, _}}}, - %{total: total, failures: failures} = map) when tag in [:failed, :invalid] do - {:ok, %{map | total: total + 1, failures: failures + 1}} + def handle_call(:stats, _from, map) do + {:reply, map, map} end - def handle_event({:test_finished, %ExUnit.Test{state: {:skip, _}}}, map) do - {:ok, map} + def handle_cast({:test_finished, %ExUnit.Test{state: {tag, _}}}, + %{total: total, failures: failures} = map) when tag in [:failed, :invalid] do + {:noreply, %{map | total: total + 1, failures: failures + 1}} end - def handle_event({:test_finished, _}, %{total: total} = map) do - {:ok, %{map | total: total + 1}} + def handle_cast({:test_finished, %ExUnit.Test{state: {:skip, _}}}, + %{total: total, skipped: skipped} = map) do + {:noreply, %{map | total: total + 1, skipped: skipped + 1}} end - def handle_event(_, map) do - {:ok, map} + def handle_cast({:case_finished, %ExUnit.TestCase{state: {:failed, _failures}} = test_case}, + %{failures: failures, total: total} = map) do + test_count = length(test_case.tests) + {:noreply, %{map | failures: failures + test_count, total: total + test_count}} end -end \ No newline at end of file + + def handle_cast({:test_finished, _}, %{total: total} = map) do + {:noreply, %{map | total: total + 1}} + end + + def handle_cast(_, map) do + {:noreply, map} + end +end diff --git a/lib/ex_unit/lib/ex_unit/server.ex b/lib/ex_unit/lib/ex_unit/server.ex index d21a3b83cab..a5f399683af 100644 --- a/lib/ex_unit/lib/ex_unit/server.ex +++ b/lib/ex_unit/lib/ex_unit/server.ex @@ -1,93 +1,87 @@ defmodule ExUnit.Server do @moduledoc false - @timeout 30_000 use GenServer def start_link() do - :gen_server.start_link({:local, __MODULE__}, __MODULE__, :ok, []) - end - - ## Before run API - - def start_load() do - :gen_server.cast(__MODULE__, :start_load) + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) end def add_async_case(name) do - :gen_server.cast(__MODULE__, {:add_async_case, name}) + GenServer.cast(__MODULE__, {:add_async_case, name}) end def add_sync_case(name) do - :gen_server.cast(__MODULE__, {:add_sync_case, name}) + GenServer.cast(__MODULE__, {:add_sync_case, name}) end - ## After run API - - def start_run() do - :gen_server.call(__MODULE__, :start_run, @timeout) + def cases_loaded do + GenServer.call(__MODULE__, :cases_loaded) end - ## Capture Device API - - def add_device(device) do - :gen_server.call(__MODULE__, {:add_device, device}) + def take_async_cases(count) do + timeout = Application.fetch_env!(:ex_unit, :case_load_timeout) + GenServer.call(__MODULE__, {:take_async_cases, count}, timeout) end - def remove_device(device) do - :gen_server.call(__MODULE__, {:remove_device, device}) + def take_sync_cases() do + timeout = Application.fetch_env!(:ex_unit, :case_load_timeout) + GenServer.call(__MODULE__, :take_sync_cases, timeout) end ## Callbacks def init(:ok) do - config = %{async_cases: HashSet.new, sync_cases: HashSet.new, - start_load: :os.timestamp, captured_devices: HashSet.new} - {:ok, config} + {:ok, %{ + loaded: System.monotonic_time, + waiting: nil, + async_cases: [], + sync_cases: [], + }} end - def handle_call(:start_run, _from, config) do - load_us = - if start_load = config.start_load do - :timer.now_diff(:os.timestamp, start_load) - end + # Called on demand until we are signaled all cases are loaded. + def handle_call({:take_async_cases, count}, from, %{waiting: nil} = state) do + {:noreply, take_cases(%{state | waiting: {from, count}})} + end - {:reply, - {config.async_cases, config.sync_cases, load_us}, - %{config | async_cases: HashSet.new, sync_cases: HashSet.new, start_load: nil}} + # Called once after all async cases have been sent and reverts the state. + def handle_call(:take_sync_cases, _from, %{waiting: nil, loaded: :done, async_cases: []} = state) do + {:reply, state.sync_cases, + %{state | sync_cases: [], loaded: System.monotonic_time}} end - def handle_call({:add_device, device}, _from, config) do - {:reply, - not(device in config.captured_devices), - %{config | captured_devices: Set.put(config.captured_devices, device)}} + def handle_call(:cases_loaded, _from, %{loaded: loaded} = state) when is_integer(loaded) do + diff = System.convert_time_unit(System.monotonic_time - loaded, :native, :microsecond) + {:reply, diff, take_cases(%{state | loaded: :done})} end - def handle_call({:remove_device, device}, _from, config) do - {:reply, :ok, - %{config | captured_devices: Set.delete(config.captured_devices, device)}} + def handle_cast({:add_async_case, name}, %{async_cases: cases, loaded: loaded} = state) + when is_integer(loaded) do + {:noreply, take_cases(%{state | async_cases: [name | cases]})} end - def handle_call(request, from, config) do - super(request, from, config) + def handle_cast({:add_sync_case, name}, %{sync_cases: cases, loaded: loaded} = state) + when is_integer(loaded) do + {:noreply, %{state | sync_cases: [name | cases]}} end - def handle_cast(:start_load, config) do - {:noreply, - %{config | start_load: :os.timestamp}} + defp take_cases(%{waiting: nil} = state) do + state end - def handle_cast({:add_async_case, name}, config) do - {:noreply, - %{config | async_cases: Set.put(config.async_cases, name)}} + defp take_cases(%{waiting: {from, _count}, async_cases: [], loaded: :done} = state) do + GenServer.reply(from, nil) + %{state | waiting: nil} end - def handle_cast({:add_sync_case, name}, config) do - {:noreply, - %{config | sync_cases: Set.put(config.sync_cases, name)}} + defp take_cases(%{async_cases: []} = state) do + state end - def handle_cast(request, config) do - super(request, config) + defp take_cases(%{waiting: {from, count}, async_cases: cases} = state) do + {reply, cases} = Enum.split(cases, count) + GenServer.reply(from, reply) + %{state | async_cases: cases, waiting: nil} end end diff --git a/lib/ex_unit/mix.exs b/lib/ex_unit/mix.exs index 35aea1a5f26..18d81e6735a 100644 --- a/lib/ex_unit/mix.exs +++ b/lib/ex_unit/mix.exs @@ -12,14 +12,20 @@ defmodule ExUnit.Mixfile do mod: {ExUnit, []}, env: [ # Calculated on demand - # max_cases: :erlang.system_info(:schedulers_online), - # color: IO.ANSI.terminal?, + # max_cases: System.schedulers_online * 2, # seed: rand(), + assert_receive_timeout: 100, autorun: true, - trace: false, + capture_log: false, + case_load_timeout: 60_000, + colors: [], + exclude: [], formatters: [ExUnit.CLIFormatter], include: [], - exclude: []]] + refute_receive_timeout: 100, + stacktrace_depth: 20, + timeout: 60_000, + trace: false]] end end diff --git a/lib/ex_unit/test/ex_unit/assertions_test.exs b/lib/ex_unit/test/ex_unit/assertions_test.exs index 546f58dc47d..0a853f949b4 100644 --- a/lib/ex_unit/test/ex_unit/assertions_test.exs +++ b/lib/ex_unit/test/ex_unit/assertions_test.exs @@ -11,6 +11,16 @@ alias ExUnit.AssertionsTest.Value defmodule ExUnit.AssertionsTest do use ExUnit.Case, async: true + defmacrop assert_ok(arg) do + quote do + assert {:ok, val} = ok(unquote(arg)) + end + end + + test "assert inside macro" do + assert_ok 42 + end + test "assert with true value" do true = assert Value.truthy end @@ -24,12 +34,12 @@ defmodule ExUnit.AssertionsTest do end end - test "assert when value evalutes to false" do + test "assert when value evaluates to false" do try do "This should never be tested" = assert Value.falsy rescue error in [ExUnit.AssertionError] -> - "Value.falsy()" = error.expr |> Macro.to_string + "assert(Value.falsy())" = error.expr |> Macro.to_string "Expected truthy, got false" = error.message end end @@ -41,7 +51,7 @@ defmodule ExUnit.AssertionsTest do error in [ExUnit.AssertionError] -> 1 = error.right 2 = error.left - "1 + 1 == 1" = error.expr |> Macro.to_string + "assert(1 + 1 == 1)" = error.expr |> Macro.to_string end end @@ -52,7 +62,54 @@ defmodule ExUnit.AssertionsTest do error in [ExUnit.AssertionError] -> 1 = error.left 2 = error.right - "1 == 1 + 1" = error.expr |> Macro.to_string + "assert(1 == 1 + 1)" = error.expr |> Macro.to_string + end + end + + test "assert shows binding in the asserted expression" do + # No binding is shown because the RHS is only a top-level variable + # and thus it's already printed as the RHS of the match. + try do + int1 = 1 + int2 = 2 + assert ^int1 = int2 + rescue + error in [ExUnit.AssertionError] -> + [] = error.binding + end + + try do + int1 = 1 + int2 = 2 + assert ^int1 = int2 * 2 + rescue + error in [ExUnit.AssertionError] -> + [int2: 2] = error.binding + end + + try do + int1 = 1 + int2 = 2 + assert int1 * 10 == int2 + rescue + error in [ExUnit.AssertionError] -> + [int1: 1] = error.binding + end + end + + test "assert shows only binding in the asserted expression when assertion fails" do + try do + bin1 = <<1, 2, 3>> + bin2 = <<1, 2, 4>> + + # Let's have a variable that we don't use in the assertion in the __ENV__. + bin3 = bin1 <> bin2 + _ = bin3 + + assert String.starts_with?(bin1, bin2) + rescue + error in [ExUnit.AssertionError] -> + [bin1: <<1, 2, 3>>, bin2: <<1, 2, 4>>] = error.binding end end @@ -66,7 +123,7 @@ defmodule ExUnit.AssertionsTest do raise "refute was supposed to fail" rescue error in [ExUnit.AssertionError] -> - "Value.truthy()" = error.expr |> Macro.to_string + "refute(Value.truthy())" = Macro.to_string(error.expr) "Expected false or nil, got true" = error.message end end @@ -75,32 +132,223 @@ defmodule ExUnit.AssertionsTest do {2, 1} = (assert {2, 1} = Value.tuple) end + test "assert match with pinned variable" do + a = 1 + {2, 1} = (assert {2, ^a} = Value.tuple) + + try do + assert {^a, 1} = Value.tuple + rescue + error in [ExUnit.AssertionError] -> + "match (=) failed\n" <> + "The following variables were pinned:\n" <> + " a = 1" = error.message + "assert({^a, 1} = Value.tuple())" = Macro.to_string(error.expr) + end + end + + test "assert match with pinned variable from another context" do + var!(a, Elixir) = 1 + {2, 1} = (assert {2, ^var!(a, Elixir)} = Value.tuple) + + try do + assert {^var!(a, Elixir), 1} = Value.tuple + rescue + error in [ExUnit.AssertionError] -> + "match (=) failed" = error.message + "assert({^var!(a, Elixir), 1} = Value.tuple())" = Macro.to_string(error.expr) + end + end + + test "assert match?" do + true = assert match?({2, 1}, Value.tuple) + + try do + "This should never be tested" = assert match?({:ok, _}, error(true)) + rescue + error in [ExUnit.AssertionError] -> + "match (match?) failed" = error.message + "assert(match?({:ok, _}, error(true)))" = Macro.to_string(error.expr) + "{:error, true}" = Macro.to_string(error.right) + end + end + + test "refute match?" do + false = refute match?({1, 1}, Value.tuple) + + try do + "This should never be tested" = refute match?({:error, _}, error(true)) + rescue + error in [ExUnit.AssertionError] -> + "match (match?) succeeded, but should have failed" = error.message + "refute(match?({:error, _}, error(true)))" = Macro.to_string(error.expr) + "{:error, true}" = Macro.to_string(error.right) + end + end + + test "assert match? with pinned variable" do + a = 1 + try do + "This should never be tested" = assert(match?({^a, 1}, Value.tuple)) + rescue + error in [ExUnit.AssertionError] -> + "match (match?) failed\n" <> + "The following variables were pinned:\n" <> + " a = 1" = error.message + "assert(match?({^a, 1}, Value.tuple()))" = Macro.to_string(error.expr) + end + end + + test "refute match? with pinned variable" do + a = 2 + try do + "This should never be tested" = refute(match?({^a, 1}, Value.tuple)) + rescue + error in [ExUnit.AssertionError] -> + "match (match?) succeeded, but should have failed\n" <> + "The following variables were pinned:\n" <> + " a = 2" = error.message + "refute(match?({^a, 1}, Value.tuple()))" = Macro.to_string(error.expr) + end + end + test "assert receive waits" do - parent = self + parent = self() spawn fn -> send parent, :hello end :hello = assert_receive :hello end + test "assert receive with message in mailbox after timeout, but before reading mailbox tells user to increase timeout" do + parent = self() + # This is testing a race condition, so it's not + # guaranteed this works under all loads of the system + timeout = 100 + spawn fn -> Process.send_after parent, :hello, timeout end + + try do + assert_receive :hello, timeout + rescue + error in [ExUnit.AssertionError] -> + true = error.message =~ "Found message matching :hello after 100ms" or + error.message =~ "No message matching :hello after 100ms" + end + end + test "assert received does not wait" do - send self, :hello + send self(), :hello :hello = assert_received :hello end - test "assert received when different" do + @received :hello + + test "assert received with module attribute" do + send self(), :hello + :hello = assert_received @received + end + + test "assert received with pinned variable" do + status = :valid + send self(), {:status, :invalid} + try do + "This should never be tested" = assert_received {:status, ^status} + rescue + error in [ExUnit.AssertionError] -> + "No message matching {:status, ^status} after 0ms.\n" <> + "The following variables were pinned:\n" <> + " status = :valid\n" <> + "Process mailbox:\n" <> + " {:status, :invalid}" = error.message + end + end + + test "assert received with multiple identical pinned variables" do + status = :valid + send self(), {:status, :invalid, :invalid} + try do + "This should never be tested" = assert_received { + :status, + ^status, + ^status + } + rescue + error in [ExUnit.AssertionError] -> + "No message matching {:status, ^status, ^status} after 0ms.\n" <> + "The following variables were pinned:\n" <> + " status = :valid\n" <> + "Process mailbox:\n" <> + " {:status, :invalid, :invalid}" = error.message + end + end + + test "assert received with multiple unique pinned variables" do + status = :valid + other_status = :invalid + send self(), {:status, :invalid, :invalid} + try do + "This should never be tested" = assert_received { + :status, + ^status, + ^other_status + } + rescue + error in [ExUnit.AssertionError] -> + "No message matching {:status, ^status, ^other_status} after 0ms.\n" <> + "The following variables were pinned:\n" <> + " status = :valid\n" <> + " other_status = :invalid\n" <> + "Process mailbox:\n" <> + " {:status, :invalid, :invalid}" = error.message + end + end + + test "assert received when empty mailbox" do + try do + "This should never be tested" = assert_received :hello + rescue + error in [ExUnit.AssertionError] -> + "No message matching :hello after 0ms.\nThe process mailbox is empty." = error.message + end + end + + test "assert received when different message" do + send self(), {:message, :not_expected, :at_all} try do "This should never be tested" = assert_received :hello rescue error in [ExUnit.AssertionError] -> - "No message matching :hello" = error.message + "No message matching :hello after 0ms.\n" <> + "Process mailbox:\n" <> + " {:message, :not_expected, :at_all}" = error.message + end + end + + test "assert received when different message having more than 10 on mailbox" do + for i <- 1..11, do: send(self(), {:message, i}) + try do + "This should never be tested" = assert_received x when x == :hello + rescue + error in [ExUnit.AssertionError] -> + "No message matching x when x == :hello after 0ms.\nProcess mailbox:" <> + "\n {:message, 1}\n {:message, 2}\n {:message, 3}" <> + "\n {:message, 4}\n {:message, 5}\n {:message, 6}" <> + "\n {:message, 7}\n {:message, 8}\n {:message, 9}" <> + "\n {:message, 10}\nShowing only 10 of 11 messages." = error.message end end test "assert received leaks" do - send self, {:hello, :world} + send self(), {:hello, :world} assert_received {:hello, world} :world = world end + test "assert received does not leak external variables used in guards" do + send self(), {:hello, :world} + guard_world = :world + assert_received {:hello, world} when world == guard_world + :world = world + end + test "refute received does not wait" do false = refute_received :hello end @@ -110,7 +358,7 @@ defmodule ExUnit.AssertionsTest do end test "refute received when equal" do - send self, :hello + send self(), :hello try do "This should never be tested" = refute_received :hello rescue @@ -130,7 +378,7 @@ defmodule ExUnit.AssertionsTest do error in [ExUnit.AssertionError] -> 'foo' = error.left 'bar' = error.right - "'foo' in 'bar'" = error.expr |> Macro.to_string + "assert('foo' in 'bar')" = Macro.to_string(error.expr) end end @@ -145,33 +393,58 @@ defmodule ExUnit.AssertionsTest do error in [ExUnit.AssertionError] -> 'foo' = error.left ['foo', 'bar'] = error.right - "'foo' in ['foo', 'bar']" = error.expr |> Macro.to_string + "refute('foo' in ['foo', 'bar'])" = error.expr |> Macro.to_string end end test "assert match" do - {:ok, true} = assert {:ok, _} = {:ok, true} + {:ok, true} = assert {:ok, _} = ok(true) + end + + test "assert match with bitstrings" do + "foobar" = assert "foo" <> bar = "foobar" + "bar" = bar end test "assert match when no match" do try do - "This should never be tested" = assert {:ok, _} = "bar" + assert {:ok, _} = error(true) + rescue + error in [ExUnit.AssertionError] -> + "match (=) failed" = error.message + "assert({:ok, _} = error(true))" = error.expr |> Macro.to_string + "{:error, true}" = error.right |> Macro.to_string + end + end + + test "assert match when falsy but not match" do + try do + assert {:ok, _x} = nil + rescue + error in [ExUnit.AssertionError] -> + "match (=) failed" = error.message + "assert({:ok, _x} = nil)" = error.expr |> Macro.to_string + "nil" = error.right |> Macro.to_string + end + end + + test "assert match when falsy" do + try do + assert _x = nil rescue error in [ExUnit.AssertionError] -> - "match (=) failed" = error.message - "{:ok, _} = \"bar\"" = error.expr |> Macro.to_string - "bar" = error.right + "Expected truthy, got nil" = error.message + "assert(_x = nil)" = error.expr |> Macro.to_string end end test "refute match when no match" do try do - "This should never be tested" = refute _ = "bar" + "This should never be tested" = refute _ = ok(true) rescue error in [ExUnit.AssertionError] -> - "bar" = error.right - "_ = \"bar\"" = error.expr |> Macro.to_string - "match (=) succeeded, but should have failed" = error.message + "refute(_ = ok(true))" = error.expr |> Macro.to_string + "Expected false or nil, got {:ok, true}" = error.message end end @@ -205,7 +478,7 @@ defmodule ExUnit.AssertionsTest do test "assert raise with no error" do "This should never be tested" = assert_raise ArgumentError, fn -> - # nothing + nil end rescue error in [ExUnit.AssertionError] -> @@ -226,10 +499,21 @@ defmodule ExUnit.AssertionsTest do end rescue error in [ExUnit.AssertionError] -> - "Expected exception ArgumentError but got UndefinedFunctionError (undefined function: Not.Defined.function/3)" = error.message + "Expected exception ArgumentError but got UndefinedFunctionError " <> + "(function Not.Defined.function/3 is undefined (module Not.Defined is not available))" = error.message + end + + test "assert raise with some other error includes stacktrace from original error" do + "This should never be tested" = assert_raise ArgumentError, fn -> + Not.Defined.function(1, 2, 3) + end + rescue + ExUnit.AssertionError -> + stacktrace = System.stacktrace + [{Not.Defined, :function, [1, 2, 3], _} | _] = stacktrace end - test "assert raise with erlang error" do + test "assert raise with Erlang error" do assert_raise SyntaxError, fn -> List.flatten(1) end @@ -238,6 +522,32 @@ defmodule ExUnit.AssertionsTest do "Expected exception SyntaxError but got FunctionClauseError (no function clause matching in :lists.flatten/1)" = error.message end + test "assert raise comparing messages (for equality)" do + assert_raise RuntimeError, "foo", fn -> + raise RuntimeError, "bar" + end + rescue + error in [ExUnit.AssertionError] -> + "Wrong message for RuntimeError" <> + "\nexpected:" <> + "\n \"foo\"" <> + "\nactual:" <> + "\n \"bar\"" = error.message + end + + test "assert raise comparing messages (with a regex)" do + assert_raise RuntimeError, ~r/ba[zk]/, fn -> + raise RuntimeError, "bar" + end + rescue + error in [ExUnit.AssertionError] -> + "Wrong message for RuntimeError" <> + "\nexpected:" <> + "\n ~r/ba[zk]/" <> + "\nactual:" <> + "\n \"bar\"" = error.message + end + test "assert greater than operator" do true = assert 2 > 1 end @@ -246,9 +556,9 @@ defmodule ExUnit.AssertionsTest do "This should never be tested" = assert 1 > 2 rescue error in [ExUnit.AssertionError] -> - 1 = error.left - 2 = error.right - "1 > 2" = error.expr |> Macro.to_string + 1 = error.left + 2 = error.right + "assert(1 > 2)" = error.expr |> Macro.to_string end test "assert less or equal than operator" do @@ -259,7 +569,7 @@ defmodule ExUnit.AssertionsTest do "This should never be tested" = assert 2 <= 1 rescue error in [ExUnit.AssertionError] -> - "2 <= 1" = error.expr |> Macro.to_string + "assert(2 <= 1)" = error.expr |> Macro.to_string 2 = error.left 1 = error.right end @@ -276,6 +586,44 @@ defmodule ExUnit.AssertionsTest do "assertion" = error.message end + test "assert lack of equality" do + try do + "This should never be tested" = assert "one" != "one" + rescue + error in [ExUnit.AssertionError] -> + "Assertion with != failed, both sides are exactly equal" = error.message + "one" = error.left + end + + try do + "This should never be tested" = assert 2 != 2.0 + rescue + error in [ExUnit.AssertionError] -> + "Assertion with != failed" = error.message + 2 = error.left + 2.0 = error.right + end + end + + test "refute equality" do + try do + "This should never be tested" = refute "one" == "one" + rescue + error in [ExUnit.AssertionError] -> + "Refute with == failed, both sides are exactly equal" = error.message + "one" = error.left + end + + try do + "This should never be tested" = refute 2 == 2.0 + rescue + error in [ExUnit.AssertionError] -> + "Refute with == failed" = error.message + 2 = error.left + 2.0 = error.right + end + end + test "assert in delta" do true = assert_in_delta(1.1, 1.2, 0.2) end @@ -346,7 +694,7 @@ defmodule ExUnit.AssertionsTest do end test "flunk" do - "This should never be tested" = flunk + "This should never be tested" = flunk() rescue error in [ExUnit.AssertionError] -> "Flunked!" = error.message @@ -358,4 +706,27 @@ defmodule ExUnit.AssertionsTest do error in [ExUnit.AssertionError] -> "This should raise an error" = error.message end + + test "flunk with wrong argument type" do + "This should never be tested" = flunk ["flunk takes a binary, not a list"] + rescue + error -> + "no function clause matching in ExUnit.Assertions.flunk/1" = FunctionClauseError.message error + end + + test "AssertionError.message/1 is nicely formatted" do + assert :a = :b + rescue + error in [ExUnit.AssertionError] -> + """ + + + match (=) failed + code: assert :a = :b + right: :b + """ = Exception.message(error) + end + + defp ok(val), do: {:ok, val} + defp error(val), do: {:error, val} end diff --git a/lib/ex_unit/test/ex_unit/callbacks_test.exs b/lib/ex_unit/test/ex_unit/callbacks_test.exs index 219540ad107..cca1c70ce7c 100644 --- a/lib/ex_unit/test/ex_unit/callbacks_test.exs +++ b/lib/ex_unit/test/ex_unit/callbacks_test.exs @@ -5,16 +5,20 @@ defmodule ExUnit.CallbacksTest do import ExUnit.CaptureIO + def start_counter(_) do + [counter: []] + end + test "callbacks run custom code with context" do defmodule CallbacksTest do use ExUnit.Case setup_all do - {:ok, [context: :setup_all]} + [context: :setup_all] end setup do - {:ok, [initial_setup: true]} + %{initial_setup: true} end setup context do @@ -28,8 +32,41 @@ defmodule ExUnit.CallbacksTest do end end + ExUnit.Server.cases_loaded() + assert capture_io(fn -> ExUnit.run end) =~ + "1 test, 0 failures" + end + + test "named callbacks run custom code in order" do + defmodule NamedCallbacksTest do + use ExUnit.Case + + import ExUnit.CallbacksTest + setup_all :start_counter + + setup :store_1 + setup [:store_2, :store_3] + + setup context do + [counter: [4 | context.counter]] + end + + setup :store_5 + + test "callbacks", context do + assert context[:counter] == [5, 4, 3, 2, 1] + end + + defp store(context, number), do: [counter: [number | context.counter]] + defp store_1(context), do: store(context, 1) + defp store_2(context), do: store(context, 2) + defp store_3(context), do: store(context, 3) + defp store_5(context), do: store(context, 5) + end + + ExUnit.Server.cases_loaded() assert capture_io(fn -> ExUnit.run end) =~ - "1 tests, 0 failures" + "1 test, 0 failures" end test "doesn't choke on setup errors" do @@ -37,7 +74,7 @@ defmodule ExUnit.CallbacksTest do use ExUnit.Case setup _ do - :ok = error + :ok = error() end test "ok" do @@ -47,6 +84,7 @@ defmodule ExUnit.CallbacksTest do defp error, do: :error end + ExUnit.Server.cases_loaded() assert capture_io(fn -> ExUnit.run end) =~ "** (MatchError) no match of right hand side value: :error" end @@ -56,7 +94,7 @@ defmodule ExUnit.CallbacksTest do use ExUnit.Case setup_all _ do - :ok = error + :ok = error() end test "ok" do @@ -66,6 +104,7 @@ defmodule ExUnit.CallbacksTest do defp error, do: :error end + ExUnit.Server.cases_loaded() assert capture_io(fn -> ExUnit.run end) =~ "** (MatchError) no match of right hand side value: :error" end @@ -75,13 +114,14 @@ defmodule ExUnit.CallbacksTest do use ExUnit.Case test "ok" do - on_exit fn -> :ok = error end + on_exit fn -> :ok = error() end :ok end defp error, do: :error end + ExUnit.Server.cases_loaded() assert capture_io(fn -> ExUnit.run end) =~ "** (MatchError) no match of right hand side value: :error" end @@ -96,6 +136,7 @@ defmodule ExUnit.CallbacksTest do end end + ExUnit.Server.cases_loaded() assert capture_io(fn -> ExUnit.run end) =~ ">) killed" end @@ -124,7 +165,7 @@ defmodule ExUnit.CallbacksTest do receive do: (:ready -> :ok) on_exit fn -> - send pid, {:on_exit, self} + send pid, {:on_exit, self()} assert_receive :done IO.puts "on_exit run" end @@ -137,9 +178,10 @@ defmodule ExUnit.CallbacksTest do end end + ExUnit.Server.cases_loaded() output = capture_io(fn -> ExUnit.run end) assert output =~ "on_exit run" - assert output =~ "1 tests, 0 failures" + assert output =~ "1 test, 0 failures" end test "runs multiple on_exit exits and overrides by ref" do @@ -187,7 +229,8 @@ defmodule ExUnit.CallbacksTest do end end - no_formatters! + no_formatters!() + ExUnit.Server.cases_loaded() output = capture_io(fn -> ExUnit.run end) assert output =~ """ @@ -230,7 +273,8 @@ defmodule ExUnit.CallbacksTest do end end - no_formatters! + no_formatters!() + ExUnit.Server.cases_loaded() output = capture_io(fn -> ExUnit.run end) assert output =~ """ @@ -239,16 +283,56 @@ defmodule ExUnit.CallbacksTest do on_exit setup_all run """ end + + test "raises an error when setting an invalid callback in setup" do + defmodule SetupErrorTest do + use ExUnit.Case + + setup do + {:ok, "foo"} + end + + test "ok" do + :ok + end + end + + ExUnit.Server.cases_loaded() + assert capture_io(fn -> ExUnit.run end) =~ + "** (RuntimeError) expected ExUnit callback in " <> + "ExUnit.CallbacksTest.SetupErrorTest to return " <> + ":ok | keyword | map, got {:ok, \"foo\"} instead" + end + + test "raises an error when overriding a reserved callback key in setup" do + defmodule SetupReservedTest do + use ExUnit.Case + + setup do + {:ok, file: "foo"} + end + + test "ok" do + :ok + end + end + + ExUnit.Server.cases_loaded() + assert capture_io(fn -> ExUnit.run end) =~ + "** (RuntimeError) ExUnit callback in " <> + "ExUnit.CallbacksTest.SetupReservedTest is " <> + "trying to set reserved field :file to \"foo\"" + end end defmodule ExUnit.CallbacksNoTests do use ExUnit.Case, async: true setup_all do - raise "Never run" + raise "never run" end setup do - raise "Never run" + raise "never run" end end diff --git a/lib/ex_unit/test/ex_unit/capture_io_test.exs b/lib/ex_unit/test/ex_unit/capture_io_test.exs index e6606812f16..827846bb4da 100644 --- a/lib/ex_unit/test/ex_unit/capture_io_test.exs +++ b/lib/ex_unit/test/ex_unit/capture_io_test.exs @@ -12,13 +12,13 @@ defmodule ExUnit.CaptureIOTest do case Enum.split_while(chars, fn(c) -> c != stop_char end) do {l, []} -> {:more, this_far ++ l} - {l, [stop_char|rest]} -> + {l, [stop_char | rest]} -> {:done, this_far ++ l ++ [stop_char], rest} end end def get_line(device \\ Process.group_leader) do - send device, {:io_request, self, device, {:get_until, :unicode, "", __MODULE__, :until_new_line, [?\n]}} + send device, {:io_request, self(), device, {:get_until, :unicode, "", __MODULE__, :until_new_line, [?\n]}} receive do {:io_reply, _, data} -> data end @@ -28,9 +28,27 @@ defmodule ExUnit.CaptureIOTest do import ExUnit.CaptureIO doctest ExUnit.CaptureIO, import: true + test "no leakage on failures" do + group_leader = Process.group_leader() + + test = self() + assert_raise ArgumentError, fn -> + capture_io(fn -> + send(test, {:string_io, Process.group_leader()}) + raise ArgumentError + end) + end + + receive do + {:string_io, pid} -> + ref = Process.monitor(pid) + assert_receive {:DOWN, ^ref, _, _, _} + end + assert Process.group_leader() == group_leader + end + test "with no output" do - assert capture_io(fn -> - end) == "" + assert capture_io(fn -> nil end) == "" end test "with put chars" do @@ -49,7 +67,7 @@ defmodule ExUnit.CaptureIOTest do assert capture_io(fn -> spawn(fn -> :io.put_chars("a") end) - :timer.sleep(10) + Process.sleep(10) end) == "a" assert capture_io(fn -> @@ -224,7 +242,7 @@ defmodule ExUnit.CaptureIOTest do capture_io(":erl. mof*,,l", fn -> assert :io.scan_erl_form('>') == {:ok, [{:":", 1}, {:atom, 1, :erl}, {:dot, 1}], 1} - assert :io.scan_erl_form('>') == {:ok, [{:atom, 1, :mof}, {:*, 1}, {:"," , 1}, {:",", 1}, {:atom, 1, :l}], 1} + assert :io.scan_erl_form('>') == {:ok, [{:atom, 1, :mof}, {:*, 1}, {:",", 1}, {:",", 1}, {:atom, 1, :l}], 1} assert :io.scan_erl_form('>') == {:eof, 1} end) @@ -267,7 +285,7 @@ defmodule ExUnit.CaptureIOTest do end) end - test "with multiple io requests" do + test "with multiple IO requests" do assert capture_io(fn -> send_and_receive_io({:requests, [{:put_chars, :unicode, "a"}, {:put_chars, :unicode, "b"}]}) @@ -279,7 +297,7 @@ defmodule ExUnit.CaptureIOTest do end) end - test "with unknown io request" do + test "with unknown IO request" do assert capture_io(fn -> send_and_receive_io(:unknown) end) == "" @@ -289,36 +307,47 @@ defmodule ExUnit.CaptureIOTest do end) end - test "with assert inside" do - group_leader = :erlang.group_leader + test "device re-registering" do + {_pid, ref} = spawn_monitor(fn -> + capture_io(:stderr, fn -> + spawn_link(Kernel, :exit, [:shutdown]) + Process.sleep(:infinity) + end) + end) + # Assert the process is down then invoke capture_io + # to trigger the ExUnit.Server, ensuring the DOWN + # message from previous capture_io has been processed + assert_receive {:DOWN, ^ref, _, _, :shutdown} + _ = capture_io(fn -> "trigger" end) + assert capture_io(:stderr, fn -> nil end) + end + + test "with assert inside" do try do capture_io(fn -> assert false end) rescue error in [ExUnit.AssertionError] -> - "Expected truthy, got false" = error.message + assert error.message == "Expected truthy, got false" end - - # Ensure no leakage on failures - assert group_leader == :erlang.group_leader end test "capture :stderr by two processes" do - spawn(fn -> capture_io(:stderr, fn -> :timer.sleep(100) end) end) - :timer.sleep(10) + spawn(fn -> capture_io(:stderr, fn -> Process.sleep(100) end) end) + Process.sleep(10) assert_raise RuntimeError, "IO device registered at :standard_error is already captured", fn -> - capture_io(:stderr, fn -> end) + capture_io(:stderr, fn -> nil end) end - :timer.sleep(100) + Process.sleep(100) end defp send_and_receive_io(req) do - send :erlang.group_leader, {:io_request, self, self, req} - s = self + pid = self() + send :erlang.group_leader, {:io_request, pid, pid, req} receive do - {:io_reply, ^s, res} -> res + {:io_reply, ^pid, res} -> res end end end diff --git a/lib/ex_unit/test/ex_unit/capture_log_test.exs b/lib/ex_unit/test/ex_unit/capture_log_test.exs new file mode 100644 index 00000000000..c0f96260959 --- /dev/null +++ b/lib/ex_unit/test/ex_unit/capture_log_test.exs @@ -0,0 +1,92 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule ExUnit.CaptureLogTest do + use ExUnit.Case + + require Logger + + import ExUnit.CaptureLog + + setup_all do + :ok = Logger.remove_backend(:console) + on_exit(fn -> Logger.add_backend(:console, flush: true) end) + end + + test "no output" do + assert capture_log(fn -> nil end) == "" + end + + test "assert inside" do + group_leader = Process.group_leader() + + try do + capture_log(fn -> + assert false + end) + rescue + error in [ExUnit.AssertionError] -> + assert error.message == "Expected truthy, got false" + end + + # Ensure no leakage on failures + assert group_leader == Process.group_leader() + refute_received {:gen_event_EXIT, _, _} + end + + test "level aware" do + assert capture_log([level: :warn], fn -> + Logger.info "here" + end) == "" + end + + @tag timeout: 2_000 + test "capture removal on exit" do + {_pid, ref} = spawn_monitor(fn -> + capture_log(fn -> + spawn_link(Kernel, :exit, [:shutdown]) + Process.sleep(:infinity) + end) + end) + + assert_receive {:DOWN, ^ref, _, _, :shutdown} + wait_capture_removal() + end + + test "log tracking" do + logged = + assert capture_log(fn -> + Logger.info "one" + + logged = capture_log(fn -> Logger.error "one" end) + send(test = self(), {:nested, logged}) + + Logger.warn "two" + + spawn(fn -> + Logger.debug "three" + send(test, :done) + end) + receive do: (:done -> :ok) + end) + + assert logged =~ "[info] one\n" + assert logged =~ "[warn] two\n" + assert logged =~ "[debug] three\n" + assert logged =~ "[error] one\n" + + receive do + {:nested, logged} -> + assert logged =~ "[error] one\n" + refute logged =~ "[warn] two\n" + end + end + + defp wait_capture_removal() do + case :gen_event.which_handlers(Logger) do + [Logger.Config] -> :ok + _otherwise -> + Process.sleep(20) + wait_capture_removal() + end + end +end diff --git a/lib/ex_unit/test/ex_unit/case_test.exs b/lib/ex_unit/test/ex_unit/case_test.exs index 01125b3a98b..f7d7f62bfe7 100644 --- a/lib/ex_unit/test/ex_unit/case_test.exs +++ b/lib/ex_unit/test/ex_unit/case_test.exs @@ -3,6 +3,10 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule ExUnit.CaseTest do use ExUnit.Case, async: true + ExUnit.Case.register_attribute __MODULE__, :foo + ExUnit.Case.register_attribute __MODULE__, :bar, accumulate: true + ExUnit.Case.register_attribute __MODULE__, :baz + @moduletag :moduletag test "defines test case info" do @@ -19,13 +23,14 @@ defmodule ExUnit.CaseTest do assert context[:case] == __MODULE__ assert context[:test] == __ENV__.function |> elem(0) assert context[:line] == line + assert context[:async] == true assert context[:hello] == true assert context[:world] == :good end test "reset tags", context do - assert nil?(context[:hello]) - assert nil?(context[:world]) + assert is_nil(context[:hello]) + assert is_nil(context[:world]) end test "module tags", context do @@ -36,4 +41,17 @@ defmodule ExUnit.CaseTest do test "module tags can be overridden", context do assert context[:moduletag] == :overridden end + + @foo :hello + @bar :world + test "registered attributes are in context", context do + assert context.registered.foo == :hello + assert context.registered.bar == [:world] + assert context.registered.baz == nil + end + + test "registered attributes are set per test", context do + assert context.registered.foo == nil + assert context.registered.bar == [] + end end diff --git a/lib/ex_unit/test/ex_unit/describe_test.exs b/lib/ex_unit/test/ex_unit/describe_test.exs new file mode 100644 index 00000000000..5648c925686 --- /dev/null +++ b/lib/ex_unit/test/ex_unit/describe_test.exs @@ -0,0 +1,86 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule ExUnit.DescribeTest do + use ExUnit.Case, async: true + + @moduletag [attribute_tag: :from_module] + + setup _ do + [setup_tag: :from_module] + end + + describe "tags" do + @describetag attribute_tag: :from_describe + + test "from describe have higher precedence", context do + assert context.attribute_tag == :from_describe + end + + @tag attribute_tag: :from_test + test "from test have higher precedence", context do + assert context.attribute_tag == :from_test + end + end + + describe "setup" do + setup _ do + [setup_tag: :from_describe] + end + + test "from describe has higher precedence", context do + assert context.setup_tag == :from_describe + end + end + + describe "failures" do + test "when using setup_all inside describe" do + assert_raise RuntimeError, ~r"cannot invoke setup_all/2 inside describe", fn -> + defmodule Sample do + use ExUnit.Case + + describe "hello" do + setup_all do + [hello: "world"] + end + end + end + end + end + + test "when using describe inside describe" do + assert_raise RuntimeError, ~r"cannot call describe/2 inside another describe", fn -> + defmodule Sample do + use ExUnit.Case + + describe "hello" do + describe "another" do + end + end + end + end + end + + test "when using non-string describe name" do + assert_raise ArgumentError, ~r"describe name must be a string, got: :not_allowed", fn -> + defmodule Sample do + use ExUnit.Case + + describe :not_allowed do + end + end + end + end + end + + describe "test names" do + test "merge describe information", context do + assert context.test == :"test test names merge describe information" + end + end + + test "attributes from outside describe", context do + assert context.attribute_tag == :from_module + assert context.setup_tag == :from_module + assert context.test == :"test attributes from outside describe" + end +end diff --git a/lib/ex_unit/test/ex_unit/diff_test.exs b/lib/ex_unit/test/ex_unit/diff_test.exs new file mode 100644 index 00000000000..af7aa7e0b80 --- /dev/null +++ b/lib/ex_unit/test/ex_unit/diff_test.exs @@ -0,0 +1,314 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule ExUnit.DiffTest do + use ExUnit.Case, async: true + + import ExUnit.Diff + + defmodule User do + defstruct [:age] + end + + test "numbers" do + int1 = 491512235 + int2 = 490512035 + expected = [eq: "49", del: "1", ins: "0", eq: "512", del: "2", ins: "0", eq: "35"] + assert script(int1, int2) == expected + assert script(42.0, 43.0) == [eq: "4", del: "2", ins: "3", eq: ".0"] + assert script(int1, 43.0) == nil + end + + test "strings" do + string1 = "fox hops over \"the dog" + string2 = "fox jumps over the lazy cat" + expected = [ + {:eq, "\""}, + [eq: "fox ", del: "ho", ins: "jum", eq: "ps over ", del: "\\\"", eq: "the ", del: "dog", ins: "lazy cat"], + {:eq, "\""} + ] + assert script(string1, string2) == expected + assert script(string1, <<193, 31>>) == nil + + # Filtered due to bag distance + assert script("aaa", "bba") == nil + assert script("aaa", "baa") == [{:eq, "\""}, [ins: "b", eq: "aa", del: "a"], {:eq, "\""}] + + assert script("", "") == [eq: "\"\""] + end + + test "lists" do + list1 = ["Tvo", nil, :ok, {}, :ok] + list2 = ["Two", :ok, self(), {true}] + + expected = [ + {:eq, "["}, + [ + [{:eq, "\""}, [eq: "T", del: "v", ins: "w", eq: "o"], {:eq, "\""}], {:eq, ", "}, + {:del, "nil"}, {:del, ", "}, + {:eq, ":ok"}, {:eq, ", "}, + {:ins, inspect(self())}, {:ins, ", "}, + [{:eq, "{"}, [[ins: "true"]], {:eq, "}"}], {:del, ", "}, {:del, ":ok"} + ], + {:eq, "]"} + ] + assert script(list1, list2) == expected + + list1 = [1, "2", 1] + list2 = [1, 1, 2] + expected = [ + {:eq, "["}, + [eq: "1", eq: ", ", del: "\"2\"", del: ", ", eq: "1", ins: ", ", ins: "2"], + {:eq, "]"} + ] + assert script(list1, list2) == expected + + list1 = [1, 1, "1", 2] + list2 = [1, 1, 2] + expected = [ + {:eq, "["}, + [eq: "1, 1", eq: ", ", del: "\"1\"", del: ", ", eq: "2"], + {:eq, "]"} + ] + assert script(list1, list2) == expected + + list1 = [1, 2] + list2 = [1, 1, 2] + expected = [ + {:eq, "["}, + [{:eq, "1"}, {:eq, ", "}, [del: "2", ins: "1"], {:ins, ", "}, {:ins, "2"}], + {:eq, "]"} + ] + assert script(list1, list2) == expected + + list1 = [] + list2 = [1, 2] + expected = [{:eq, "["}, [ins: "1, 2"], {:eq, "]"}] + assert script(list1, list2) == expected + + list1 = [1, 2] + list2 = [] + expected = [{:eq, "["}, [del: "1, 2"], {:eq, "]"}] + assert script(list1, list2) == expected + + assert script([], []) == [eq: "[]"] + end + + test "charlists" do + charlist1 = 'fox hops over \'the dog' + charlist2 = 'fox jumps over the lazy cat' + expected = [ + {:eq, "'"}, + [eq: "fox ", del: "ho", ins: "jum", eq: "ps over ", del: "\\'", eq: "the ", del: "dog", ins: "lazy cat"], + {:eq, "'"} + ] + assert script(charlist1, charlist2) == expected + end + + test "keyword lists" do + keyword1 = [file: "nofile", line: 1] + keyword2 = [file: nil, lime: 1] + expected = [ + {:eq, "["}, + [ + [{:eq, "file: "}, [del: "\"nofile\"", ins: "nil"]], + {:eq, ", "}, + {:del, "line: 1"}, {:ins, "lime: 1"} + ], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + keyword1 = [file: nil, line: 1] + keyword2 = [file: "nofile"] + expected = [ + {:eq, "["}, + [ + [{:eq, "file: "}, [del: "nil", ins: "\"nofile\""]], + {:del, ", "}, + {:del, "line: 1"} + ], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + keyword1 = [file: "nofile"] + keyword2 = [file: nil, line: 1] + expected = [ + {:eq, "["}, + [ + [{:eq, "file: "}, [del: "\"nofile\"", ins: "nil"]], + {:ins, ", "}, + {:ins, "line: 1"} + ], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + keyword1 = [file: "nofile", line: 1] + keyword2 = [file: nil, line: 1] + expected = [ + {:eq, "["}, + [ + [{:eq, "file: "}, [del: "\"nofile\"", ins: "nil"]], + {:eq, ", "}, + {:eq, "line: 1"} + ], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + keyword1 = [line: 1, file: "nofile"] + keyword2 = [line: 1, file: nil] + expected = [ + {:eq, "["}, + [ + {:eq, "line: 1"}, + {:eq, ", "}, + [{:eq, "file: "}, [del: "\"nofile\"", ins: "nil"]] + ], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + keyword1 = [file: "one", line: 1] + keyword2 = [file: "two", line: 2] + expected = [ + {:eq, "["}, + [ + [{:eq, "file: "}, [del: "\"one\"", ins: "\"two\""]], + {:eq, ", "}, + [{:eq, "line: "}, [del: "1", ins: "2"]] + ], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + keyword1 = [file: "nofile"] + keyword2 = [file: nil] + expected = [ + {:eq, "["}, + [{:eq, "file: "}, [del: "\"nofile\"", ins: "nil"]], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + keyword1 = [file: nil, line: 1] + keyword2 = [line: 1] + expected = [ + {:eq, "["}, + [{:del, "file: nil"}, {:del, ", "}, {:eq, "line: 1"}], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + keyword1 = [file: nil] + keyword2 = [] + expected = [{:eq, "["}, [{:del, "file: nil"}], {:eq, "]"}] + assert script(keyword1, keyword2) == expected + + keyword1 = [] + keyword2 = [file: nil] + expected = [{:eq, "["}, [{:ins, "file: nil"}], {:eq, "]"}] + assert script(keyword1, keyword2) == expected + + keyword1 = [port: 4000, max_connections: 1000] + keyword2 = [max_connections: 1000, port: 4000] + expected = [ + {:eq, "["}, + [del: "port: 4000", del: ", ", eq: "max_connections: 1000", ins: ", ", ins: "port: 4000"], + {:eq, "]"} + ] + assert script(keyword1, keyword2) == expected + + assert script(["foo-bar": 1], []) == [{:eq, "["}, [{:del, "\"foo-bar\": 1"}], {:eq, "]"}] + end + + test "improper lists" do + expected = [{:eq, "["}, [[eq: "1"], [eq: ", ", eq: "2"], [ins: " | 3"]], {:eq, "]"}] + assert script([1, 2], [1, 2 | 3]) == expected + expected = [{:eq, "["}, [[eq: "1"], [eq: ", ", eq: "2"], [del: " | 3"]], {:eq, "]"}] + assert script([1, 2 | 3], [1, 2]) == expected + + expected = [{:eq, "["}, [[eq: "1"], [del: ",", ins: " |", eq: " ", del: "\"a\"", ins: "\"b\""]], {:eq, "]"}] + assert script([1, "a"], [1 | "b"]) == expected + expected = [{:eq, "["}, [[eq: "1"], [del: " |", ins: ",", eq: " ", del: "\"b\"", ins: "\"a\""]], {:eq, "]"}] + assert script([1 | "b"], [1, "a"]) == expected + + expected = [{:eq, "["}, [[eq: "1"], [eq: " | ", del: "2", ins: "3"]], {:eq, "]"}] + assert script([1 | 2], [1 | 3]) == expected + + expected = [{:eq, "["}, [[eq: "1"], [eq: ", ", del: "'b'", ins: "'a'"], [eq: " | ", eq: "3"]], {:eq, "]"}] + assert script([1, 'b' | 3], [1, 'a' | 3]) == expected + expected = [{:eq, "["}, [[del: "'a'", ins: "'b'"], [eq: ", ", eq: "2"], [eq: " | ", eq: "3"]], {:eq, "]"}] + assert script(['a', 2 | 3], ['b', 2 | 3]) == expected + end + + test "tuples" do + tuple1 = {:hex, '1.1'} + tuple2 = {:hex, '0.1', [{:ex_doc}]} + expected = [ + {:eq, "{"}, + [[eq: ":hex"], [{:eq, ", "}, {:eq, "'"}, [del: "1", ins: "0", eq: ".1"], {:eq, "'"}],[ins: ", ", ins: "[{:ex_doc}]"]], + {:eq, "}"} + ] + assert script(tuple1, tuple2) == expected + assert script(tuple1, {}) == [{:eq, "{"}, [[del: ":hex"], [del: ", ", del: "'1.1'"]], {:eq, "}"}] + assert script({}, tuple1) == [{:eq, "{"}, [[ins: ":hex"], [ins: ", ", ins: "'1.1'"]], {:eq, "}"}] + + assert script({}, {}) == [eq: "{}"] + end + + test "maps" do + map1 = Enum.into(1..15, %{}, &{&1, &1}) |> Map.delete(13) + map2 = Enum.reduce(5..10, map1, &Map.delete(&2, &1)) |> Map.put(13, 13) |> Map.put(12, 32) + expected = [ + {:eq, "%{"}, + [ + [eq: "1 => 1"], [eq: ", ", eq: "2 => 2"], [eq: ", ", eq: "3 => 3"], + [eq: ", ", eq: "4 => 4"], [eq: ", ", eq: "11 => 11"], [eq: ", ", eq: "14 => 14"], + [eq: ", ", eq: "15 => 15"], + [{:eq, ", "}, {:eq, "12 => "}, [del: "1", ins: "3", eq: "2"]], + [del: ", ", del: "5 => 5"], [del: ", ", del: "6 => 6"], [del: ", ", del: "7 => 7"], + [del: ", ", del: "8 => 8"], [del: ", ", del: "9 => 9"], [del: ", ", del: "10 => 10"], + [ins: ", ", ins: "13 => 13"], + ], + {:eq, "}"} + ] + assert script(map1, map2) == expected + + map1 = %{baz: 12} + map2 = %{foo: 12, bar: 12, baz: 12} + expected = [{:eq, "%{"}, [[eq: "baz: 12"], [ins: ", ", ins: "bar: 12"], [ins: ", ", ins: "foo: 12"]], {:eq, "}"}] + assert script(map1, map2) == expected + expected = [{:eq, "%{"}, [[eq: "baz: 12"], [del: ", ", del: "bar: 12"], [del: ", ", del: "foo: 12"]], {:eq, "}"}] + assert script(map2, map1) == expected + assert script(map1, %{}) == [{:eq, "%{"}, [[del: "baz: 12"]], {:eq, "}"}] + assert script(%{}, map1) == [{:eq, "%{"}, [[ins: "baz: 12"]], {:eq, "}"}] + expected = [{:eq, "%{"}, [[del: "baz: 12"], [ins: "foo: 12"]], {:eq, "}"}] + assert script(map1, %{foo: 12}) == expected + + assert script(%{"foo-bar": 1}, %{}) == [{:eq, "%{"}, [[del: "\"foo-bar\": 1"]], {:eq, "}"}] + assert script(%{}, %{}) == [eq: "%{}"] + + assert script(%{nil: 42}, %{}) == [{:eq, "%{"}, [[del: "nil: 42"]], {:eq, "}"}] + assert script(%{true: 42}, %{}) == [{:eq, "%{"}, [[del: "true: 42"]], {:eq, "}"}] + assert script(%{false: 42}, %{}) == [{:eq, "%{"}, [[del: "false: 42"]], {:eq, "}"}] + end + + test "structs" do + user1 = %User{age: 16} + user2 = %User{age: 21} + expected = [{:eq, "%ExUnit.DiffTest.User{"}, [[{:eq, "age: "}, [ins: "2", eq: "1", del: "6"]]], {:eq, "}"}] + assert script(user1, user2) == expected + assert script(%User{}, %{}) == nil + assert script(%User{}, %ExUnit.Test{}) == nil + end + + test "not supported" do + bin1 = <<147, 1, 2, 31>> + bin2 = <<193, 1, 31>> + assert script(bin1, bin2) == nil + assert script(:foo, :bar) == nil + assert script(:foo, "bar") == nil + end +end diff --git a/lib/ex_unit/test/ex_unit/doc_test_test.exs b/lib/ex_unit/test/ex_unit/doc_test_test.exs index 9b67cfde36f..6fcdeec4a53 100644 --- a/lib/ex_unit/test/ex_unit/doc_test_test.exs +++ b/lib/ex_unit/test/ex_unit/doc_test_test.exs @@ -4,12 +4,18 @@ import ExUnit.TestHelpers defmodule ExUnit.DocTestTest.GoodModule do @doc """ - iex> test_fun + iex> one() 1 - iex> test_fun + 1 + iex> one() + 1 2 """ - def test_fun, do: 1 + def one, do: 1 + + @doc ~S""" + iex> ~S(f#{o}o) + "f\#{o}o" + """ + def test_sigil, do: :ok @doc """ iex> a = 1 @@ -35,16 +41,23 @@ defmodule ExUnit.DocTestTest.GoodModule do """ def exception_test, do: :ok + @doc ~S""" + iex> raise "foo\nbar" + ** (RuntimeError) foo + bar + """ + def multiline_exception_test, do: :ok + @doc """ - iex> Enum.into([a: 0, b: 1, c: 2], HashDict.new) - #HashDict<[c: 2, b: 1, a: 0]> + iex> Enum.into([:a, :b, :c], MapSet.new) + #MapSet<[:a, :b, :c]> """ def inspect1_test, do: :ok @doc """ - iex> x = Enum.into([a: 0, b: 1, c: 2], HashDict.new) + iex> x = Enum.into([:a, :b, :c], MapSet.new) ...> x - #HashDict<[c: 2, b: 1, a: 0]> + #MapSet<[:a, :b, :c]> """ def inspect2_test, do: :ok end |> write_beam @@ -61,46 +74,54 @@ end |> write_beam defmodule ExUnit.DocTestTest.SomewhatGoodModuleWithOnly do @doc """ - iex> test_fun + iex> one() 1 - iex> test_fun + 1 + iex> one() + 1 2 """ - def test_fun, do: 1 + def one, do: 1 @doc """ - iex> test_fun - 1 - iex> test_fun + 1 - 1 + iex> two() + 2 + iex> two() + 1 + 100 """ - def test_fun1, do: 1 + def two, do: 2 end |> write_beam defmodule ExUnit.DocTestTest.SomewhatGoodModuleWithExcept do + @moduledoc """ + iex> 1 + 1 + 1 + """ + @doc """ - iex> test_fun + iex> one() 1 - iex> test_fun + 1 + iex> one() + 1 2 """ - def test_fun, do: 1 + def one, do: 1 @doc """ - iex> test_fun - 1 - iex> test_fun + 1 - 1 + iex> two() + 2 + iex> two() + 1 + 100 """ - def test_fun1, do: 1 + def two, do: 2 end |> write_beam defmodule ExUnit.DocTestTest.NoImport do @doc """ - iex> ExUnit.DocTestTest.NoImport.min(1, 2) + iex> ExUnit.DocTestTest.NoImport.max(1, 2) + {:ok, 2} + + iex> max(1, 2) 2 """ - def min(a, b), do: max(a, b) + def max(a, b), do: {:ok, Kernel.max(a, b)} end |> write_beam defmodule ExUnit.DocTestTest.Invalid do @@ -113,7 +134,7 @@ defmodule ExUnit.DocTestTest.Invalid do 3 iex> :oops - #HashDict<[]> + #MapSet<[]> iex> Hello.world :world @@ -125,6 +146,42 @@ defmodule ExUnit.DocTestTest.Invalid do ** (RuntimeError) hello """ + + @doc """ + iex> 1 + * 1 + 1 + """ + def a(), do: :ok + + @doc """ + iex> 1 + * 1 + 1 + """ + defmacro b(), do: :ok + + @doc """ + ``` + iex> 1 + 2 + 3 + ``` + """ + def indented_not_enough, do: :ok + + @doc ~S''' + ``` + iex> 1 + 2 + 3 + ``` + ''' + def indented_too_much, do: :ok + + @doc """ + ``` + iex> 1 + 2 + 3 + ``` + """ + def dedented_past_fence, do: :ok end |> write_beam defmodule ExUnit.DocTestTest.IndentationHeredocs do @@ -166,7 +223,38 @@ defmodule ExUnit.DocTestTest.IndentationNotEnough do iex> 1 + 2 3 ''' - def not_enough, do: :ok + def test_fun, do: :ok +end |> write_beam + +defmodule ExUnit.DocTestTest.FencedHeredocs do + @doc ~S''' + Receives a test and formats its failure. + + ## Examples + + ``` + iex> 1 + 2 + 3 + ``` + + ``` + iex> 1 + 2 + 3 + ``` + + ``` + iex> 1 + 2 + 3 + ``` + ''' + def heredocs, do: :ok + + @doc ~S''' + ``` + iex> 1 + 2 + 3 + ''' + def incomplete, do: :ok end |> write_beam defmodule ExUnit.DocTestTest.Incomplete do @@ -174,7 +262,97 @@ defmodule ExUnit.DocTestTest.Incomplete do iex> 1 + 2 ''' - def not_enough, do: :ok + def test_fun, do: :ok +end |> write_beam + +defmodule ExUnit.DocTestTest.FenceIncomplete do + @doc ~S''' + ``` + iex> 1 + 2 + 3 + ''' + def test_fun, do: :ok +end |> write_beam + +defmodule ExUnit.DocTestTest.Numbered do + @doc """ + iex(1)> 1 + + ...(1)> 2 + 3 + """ + def test_fun(), do: :ok +end |> write_beam() + +defmodule ExUnit.DocTestTest.Host do + @doc """ + iex(foo@bar)1> 1 + + ...(foo@bar)1> 2 + 3 + """ + def test_fun(), do: :ok +end |> write_beam() + +defmodule ExUnit.DocTestTest.Haiku do + @moduledoc """ + This module describes the ancient Japanese poem form known as Haiku. + + The Inspect protocol has been overriden for `%Haiku{}` + so that Haikus are shown in a pretty-printed fashion. + + This module is part of the DocTest test suite, + to ensure that DocTest can handle opaque inspect types + which contain unicode and possibly consist of multiple lines. + """ + + defstruct [:first_phrase, :second_phrase, :third_phrase, :author] + + @doc """ + Creates a new Haiku. + Optionally pass in the `author` as fourth argument. + + ## Examples: + + # Simple Haiku, inspect output consists of multiple lines. + iex> ExUnit.DocTestTest.Haiku.new("Haikus are easy", "But sometimes they don't make sense", "Refrigerator") + #Haiku< + Haikus are easy + But sometimes they don't make sense + Refrigerator + > + + # Haiku with Unicode characters (Japanese Kanji, em-dash). + iex> ExUnit.DocTestTest.Haiku.new("古池や", "蛙飛びこむ", "水の音", "Matsuo Basho") + #Haiku< + 古池や + 蛙飛びこむ + 水の音 + ― Matsuo Basho + > + + """ + def new(first, second, third, author \\ "") + when is_binary(first) and is_binary(second) and is_binary(third) and is_binary(author) do + %__MODULE__{ + first_phrase: first, + second_phrase: second, + third_phrase: third, + author: author + } + end + + defimpl Inspect do + def inspect(haiku, _opts) do + author = if haiku.author == "", do: "", else: "\n ― #{haiku.author}" + """ + #Haiku< + #{haiku.first_phrase} + #{haiku.second_phrase} + #{haiku.third_phrase}#{author} + > + """ + |> String.trim_trailing("\n") + end + end end |> write_beam defmodule ExUnit.DocTestTest do @@ -185,79 +363,191 @@ defmodule ExUnit.DocTestTest do # doctest ExUnit.DocTest doctest ExUnit.DocTestTest.GoodModule, import: true - doctest ExUnit.DocTestTest.SomewhatGoodModuleWithOnly, only: [test_fun: 0], import: true - doctest ExUnit.DocTestTest.SomewhatGoodModuleWithExcept, except: [test_fun1: 0], import: true + doctest ExUnit.DocTestTest.SomewhatGoodModuleWithOnly, only: [one: 0], import: true + doctest ExUnit.DocTestTest.SomewhatGoodModuleWithExcept, except: [:moduledoc, two: 0], import: true doctest ExUnit.DocTestTest.NoImport doctest ExUnit.DocTestTest.IndentationHeredocs + doctest ExUnit.DocTestTest.FencedHeredocs + doctest ExUnit.DocTestTest.Haiku import ExUnit.CaptureIO + test "multiple functions filtered with :only" do + defmodule MultipleOnly do + use ExUnit.Case + doctest ExUnit.DocTestTest.SomewhatGoodModuleWithOnly, only: [one: 0, two: 0], import: true + end + + ExUnit.Server.cases_loaded() + assert capture_io(fn -> ExUnit.run end) =~ "2 tests, 1 failure" + end + test "doctest failures" do + # When adding or removing lines above this line, the tests below will + # fail because we are explicitly asserting some doctest lines from + # ActuallyCompiled in the format of "test/ex_unit/doc_test_test.exs:". defmodule ActuallyCompiled do use ExUnit.Case doctest ExUnit.DocTestTest.Invalid end - ExUnit.configure(seed: 0) + ExUnit.configure(seed: 0, colors: [enabled: false]) + ExUnit.Server.cases_loaded() output = capture_io(fn -> ExUnit.run end) + # Test order is not guaranteed, we can't match this as a string for each failing doctest + assert output =~ ~r/\d+\) test moduledoc at ExUnit\.DocTestTest\.Invalid \(\d+\) \(ExUnit\.DocTestTest\.ActuallyCompiled\)/ + assert output =~ """ 1) test moduledoc at ExUnit.DocTestTest.Invalid (1) (ExUnit.DocTestTest.ActuallyCompiled) - test/ex_unit/doc_test_test.exs:198 - Doctest did not compile, got: (SyntaxError) test/ex_unit/doc_test_test.exs:106: syntax error before: '*' + test/ex_unit/doc_test_test.exs:391 + Doctest did not compile, got: (SyntaxError) test/ex_unit/doc_test_test.exs:130: syntax error before: '*' code: 1 + * 1 stacktrace: - test/ex_unit/doc_test_test.exs:106: ExUnit.DocTestTest.Invalid (module) + test/ex_unit/doc_test_test.exs:130: ExUnit.DocTestTest.Invalid (module) """ assert output =~ """ 2) test moduledoc at ExUnit.DocTestTest.Invalid (2) (ExUnit.DocTestTest.ActuallyCompiled) - test/ex_unit/doc_test_test.exs:198 + test/ex_unit/doc_test_test.exs:391 Doctest failed code: 1 + hd(List.flatten([1])) === 3 - lhs: 2 + left: 2 stacktrace: - test/ex_unit/doc_test_test.exs:106: ExUnit.DocTestTest.Invalid (module) + test/ex_unit/doc_test_test.exs:133: ExUnit.DocTestTest.Invalid (module) """ assert output =~ """ 3) test moduledoc at ExUnit.DocTestTest.Invalid (3) (ExUnit.DocTestTest.ActuallyCompiled) - test/ex_unit/doc_test_test.exs:198 + test/ex_unit/doc_test_test.exs:391 Doctest failed - code: inspect(:oops) === "#HashDict<[]>" - lhs: ":oops" + code: inspect(:oops) === "#MapSet<[]>" + left: ":oops" stacktrace: - test/ex_unit/doc_test_test.exs:106: ExUnit.DocTestTest.Invalid (module) + test/ex_unit/doc_test_test.exs:136: ExUnit.DocTestTest.Invalid (module) """ + # The stacktrace points to the cause of the error assert output =~ """ 4) test moduledoc at ExUnit.DocTestTest.Invalid (4) (ExUnit.DocTestTest.ActuallyCompiled) - test/ex_unit/doc_test_test.exs:198 - Doctest failed: got UndefinedFunctionError with message undefined function: Hello.world/0 - code: Hello.world + test/ex_unit/doc_test_test.exs:391 + Doctest failed: got UndefinedFunctionError with message "function Hello.world/0 is undefined (module Hello is not available)" + code: Hello.world stacktrace: - test/ex_unit/doc_test_test.exs:106: ExUnit.DocTestTest.Invalid (module) + Hello.world() + (for doctest at) test/ex_unit/doc_test_test.exs:139: (test) """ assert output =~ """ 5) test moduledoc at ExUnit.DocTestTest.Invalid (5) (ExUnit.DocTestTest.ActuallyCompiled) - test/ex_unit/doc_test_test.exs:198 - Doctest failed: expected exception WhatIsThis with message "oops" but got RuntimeError with message "oops" + test/ex_unit/doc_test_test.exs:391 + Doctest failed: expected exception WhatIsThis but got RuntimeError with message "oops" code: raise "oops" stacktrace: - test/ex_unit/doc_test_test.exs:106: ExUnit.DocTestTest.Invalid (module) + test/ex_unit/doc_test_test.exs:142: ExUnit.DocTestTest.Invalid (module) """ assert output =~ """ 6) test moduledoc at ExUnit.DocTestTest.Invalid (6) (ExUnit.DocTestTest.ActuallyCompiled) - test/ex_unit/doc_test_test.exs:198 - Doctest failed: expected exception RuntimeError with message "hello" but got RuntimeError with message "oops" + test/ex_unit/doc_test_test.exs:391 + Doctest failed: wrong message for RuntimeError + expected: + "hello" + actual: + "oops" code: raise "oops" stacktrace: - test/ex_unit/doc_test_test.exs:106: ExUnit.DocTestTest.Invalid (module) + test/ex_unit/doc_test_test.exs:145: ExUnit.DocTestTest.Invalid (module) + """ + + assert output =~ """ + 7) test doc at ExUnit.DocTestTest.Invalid.a/0 (7) (ExUnit.DocTestTest.ActuallyCompiled) + test/ex_unit/doc_test_test.exs:391 + Doctest did not compile, got: (SyntaxError) test/ex_unit/doc_test_test.exs:151: syntax error before: '*' + code: 1 + * 1 + stacktrace: + test/ex_unit/doc_test_test.exs:151: ExUnit.DocTestTest.Invalid (module) + """ + + assert output =~ """ + 8) test doc at ExUnit.DocTestTest.Invalid.b/0 (8) (ExUnit.DocTestTest.ActuallyCompiled) + test/ex_unit/doc_test_test.exs:391 + Doctest did not compile, got: (SyntaxError) test/ex_unit/doc_test_test.exs:157: syntax error before: '*' + code: 1 + * 1 + stacktrace: + test/ex_unit/doc_test_test.exs:157: ExUnit.DocTestTest.Invalid (module) + """ + + assert output =~ """ + 9) test doc at ExUnit.DocTestTest.Invalid.dedented_past_fence/0 (9) (ExUnit.DocTestTest.ActuallyCompiled) + test/ex_unit/doc_test_test.exs:391 + Doctest did not compile, got: (SyntaxError) test/ex_unit/doc_test_test.exs:181: unexpected token: "`" (column 5, codepoint U+0060) + code: 3 + ``` + stacktrace: + test/ex_unit/doc_test_test.exs:180: ExUnit.DocTestTest.Invalid (module) + """ + + assert output =~ """ + 10) test doc at ExUnit.DocTestTest.Invalid.indented_not_enough/0 (10) (ExUnit.DocTestTest.ActuallyCompiled) + test/ex_unit/doc_test_test.exs:391 + Doctest did not compile, got: (SyntaxError) test/ex_unit/doc_test_test.exs:165: unexpected token: "`" (column 1, codepoint U+0060) + code: 3 + ` + stacktrace: + test/ex_unit/doc_test_test.exs:164: ExUnit.DocTestTest.Invalid (module) + """ + + assert output =~ """ + 11) test doc at ExUnit.DocTestTest.Invalid.indented_too_much/0 (11) (ExUnit.DocTestTest.ActuallyCompiled) + test/ex_unit/doc_test_test.exs:391 + Doctest did not compile, got: (SyntaxError) test/ex_unit/doc_test_test.exs:173: unexpected token: "`" (column 3, codepoint U+0060) + code: 3 + ``` + stacktrace: + test/ex_unit/doc_test_test.exs:172: ExUnit.DocTestTest.Invalid (module) """ end + test "IEx prefix contains a number" do + defmodule NumberedUsage do + use ExUnit.Case + doctest ExUnit.DocTestTest.Numbered + end + + ExUnit.Server.cases_loaded() + assert capture_io(fn -> ExUnit.run end) =~ "1 test, 0 failures" + end + + test "IEx prompt contains host" do + message = + ~s[unknown IEx prompt: "iex(foo@bar)1> 1 +".\nAccepted formats are: iex>, iex(1)>, ...>, ...(1)>] + + regex = ~r[test/ex_unit/doc_test_test\.exs:\d+: #{Regex.escape(message)}] + + assert_raise ExUnit.DocTest.Error, regex, fn -> + defmodule HostUsage do + use ExUnit.Case + doctest ExUnit.DocTestTest.Host + end + end + end + + test "tags tests as doctests" do + defmodule DoctestTag do + use ExUnit.Case + doctest ExUnit.DocTestTest.NoImport + + setup test do + assert test.doctest + :ok + end + end + + ExUnit.Server.cases_loaded() + assert capture_io(fn -> ExUnit.run end) =~ "2 tests, 0 failures" + end + test "multiple exceptions in one test case is not supported" do assert_raise ExUnit.DocTest.Error, ~r"multiple exceptions in one doctest case are not supported", fn -> defmodule NeverCompiled do @@ -267,33 +557,66 @@ defmodule ExUnit.DocTestTest do end end + test "fails on invalid module" do + assert_raise CompileError, ~r"module ExUnit\.DocTestTest\.Unknown is not loaded and could not be found", fn -> + defmodule NeverCompiled do + import ExUnit.DocTest + doctest ExUnit.DocTestTest.Unknown + end + end + end + + test "fails when there are no docs" do + assert_raise ExUnit.DocTest.Error, ~r"could not retrieve the documentation for module ExUnit\.DocTestTest", fn -> + defmodule NeverCompiled do + import ExUnit.DocTest + doctest ExUnit.DocTestTest + end + end + end + test "fails in indentation mismatch" do - assert_raise ExUnit.DocTest.Error, ~r/indentation level mismatch: " iex> bar = 2", should have been 2 spaces/, fn -> + assert_raise ExUnit.DocTest.Error, + ~r[test/ex_unit/doc_test_test\.exs:\d+: indentation level mismatch: " iex> bar = 2", should have been 2 spaces], fn -> defmodule NeverCompiled do import ExUnit.DocTest doctest ExUnit.DocTestTest.IndentationMismatchedPrompt end end - assert_raise ExUnit.DocTest.Error, ~r/indentation level mismatch: " 3", should have been 2 spaces/, fn -> + assert_raise ExUnit.DocTest.Error, + ~r[test/ex_unit/doc_test_test\.exs:\d+: indentation level mismatch: " 3", should have been 2 spaces], fn -> defmodule NeverCompiled do import ExUnit.DocTest doctest ExUnit.DocTestTest.IndentationTooMuch end end - assert_raise ExUnit.DocTest.Error, ~r/indentation level mismatch: \" 3\", should have been 4 spaces/, fn -> + assert_raise ExUnit.DocTest.Error, + ~r[test/ex_unit/doc_test_test\.exs:\d+: indentation level mismatch: \" 3\", should have been 4 spaces], fn -> defmodule NeverCompiled do import ExUnit.DocTest doctest ExUnit.DocTestTest.IndentationNotEnough end end + end - assert_raise ExUnit.DocTest.Error, ~r/expected non-blank line to follow iex> prompt/, fn -> + test "fails with improper termination" do + assert_raise ExUnit.DocTest.Error, + ~r[test/ex_unit/doc_test_test\.exs:\d+: expected non-blank line to follow iex> prompt], fn -> defmodule NeverCompiled do import ExUnit.DocTest doctest ExUnit.DocTestTest.Incomplete end end end + + test "fails on invalid use" do + assert_raise RuntimeError, ~r"cannot define test", fn -> + defmodule FunctionClashFail do + import ExUnit.DocTest + doctest ExUnit.DocTestTest.Invalid + end + end + end end diff --git a/lib/ex_unit/test/ex_unit/filters_test.exs b/lib/ex_unit/test/ex_unit/filters_test.exs index f81fe3af14d..a5212ee279f 100644 --- a/lib/ex_unit/test/ex_unit/filters_test.exs +++ b/lib/ex_unit/test/ex_unit/filters_test.exs @@ -8,8 +8,8 @@ defmodule ExUnit.FiltersTest do test "evaluating filters" do assert ExUnit.Filters.eval([], [:os], %{}, []) == :ok assert ExUnit.Filters.eval([], [os: :win], %{os: :unix}, []) == :ok - assert ExUnit.Filters.eval([], [:os], %{os: :unix}, []) == {:error, :os} - assert ExUnit.Filters.eval([], [os: :unix], %{os: :unix}, []) == {:error, :os} + assert ExUnit.Filters.eval([], [:os], %{os: :unix}, []) == {:error, "due to os filter"} + assert ExUnit.Filters.eval([], [os: :unix], %{os: :unix}, []) == {:error, "due to os filter"} assert ExUnit.Filters.eval([os: :win], [], %{}, []) == :ok assert ExUnit.Filters.eval([os: :win], [], %{os: :unix}, []) == :ok @@ -19,6 +19,15 @@ defmodule ExUnit.FiltersTest do assert ExUnit.Filters.eval([os: :win, os: :unix], [:os], %{os: :win}, []) == :ok end + test "evaluating filters with skip" do + assert ExUnit.Filters.eval([], [], %{}, []) == :ok + assert ExUnit.Filters.eval([], [], %{skip: true}, []) == {:error, "due to skip tag"} + assert ExUnit.Filters.eval([], [], %{skip: "skipped"}, []) == {:error, "skipped"} + assert ExUnit.Filters.eval([], [:os], %{skip: "skipped"}, []) == {:error, "skipped"} + assert ExUnit.Filters.eval([:skip], [], %{skip: true}, []) == :ok + assert ExUnit.Filters.eval([:skip], [], %{skip: "skipped"}, []) == :ok + end + test "evaluating filters matches integers" do assert ExUnit.Filters.eval([int: "1"], [], %{int: 1}, []) == :ok assert ExUnit.Filters.eval([int: "1"], [int: 5], %{int: 1}, []) == :ok @@ -34,7 +43,7 @@ defmodule ExUnit.FiltersTest do test "evaluating filter matches regexes" do assert ExUnit.Filters.eval([os: ~r"win"], [], %{os: :win}, []) == :ok - assert ExUnit.Filters.eval([os: ~r"mac"], [os: :unix], %{os: :unix}, []) == {:error, :os} + assert ExUnit.Filters.eval([os: ~r"mac"], [os: :unix], %{os: :unix}, []) == {:error, "due to os filter"} end test "evaluating filter uses special rules for line" do @@ -42,8 +51,8 @@ defmodule ExUnit.FiltersTest do assert ExUnit.Filters.eval([line: 3], [:line], %{line: 3}, tests) == :ok assert ExUnit.Filters.eval([line: 4], [:line], %{line: 3}, tests) == :ok - assert ExUnit.Filters.eval([line: 2], [:line], %{line: 3}, tests) == {:error, :line} - assert ExUnit.Filters.eval([line: 5], [:line], %{line: 3}, tests) == {:error, :line} + assert ExUnit.Filters.eval([line: 2], [:line], %{line: 3}, tests) == {:error, "due to line filter"} + assert ExUnit.Filters.eval([line: 5], [:line], %{line: 3}, tests) == {:error, "due to line filter"} end test "parsing filters" do diff --git a/lib/ex_unit/test/ex_unit/formatter_test.exs b/lib/ex_unit/test/ex_unit/formatter_test.exs index 9756b5f4d3d..eaf0f256fa5 100644 --- a/lib/ex_unit/test/ex_unit/formatter_test.exs +++ b/lib/ex_unit/test/ex_unit/formatter_test.exs @@ -6,25 +6,30 @@ defmodule ExUnit.FormatterTest do import ExUnit.Formatter doctest ExUnit.Formatter - def falsy, do: false - def formatter(_color, msg), do: msg - defmacrop catch_assertion(expr) do quote do try do unquote(expr) rescue - e -> e + ex -> ex end end end - defp case do + defp test_case do %ExUnit.TestCase{name: Hello} end defp test do - %ExUnit.Test{name: :world, case: Hello, tags: [file: __ENV__.file, line: 1]} + %ExUnit.Test{name: :world, case: Hello, tags: %{file: __ENV__.file, line: 1}} + end + + def falsy() do + false + end + + defp formatter(_kind, message) do + message end test "formats test case filters" do @@ -34,7 +39,7 @@ defmodule ExUnit.FormatterTest do end test "formats test errors" do - failure = {:error, catch_error(raise "oops"), []} + failure = [{:error, catch_error(raise "oops"), []}] assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """ 1) world (Hello) test/ex_unit/formatter_test.exs:1 @@ -43,7 +48,7 @@ defmodule ExUnit.FormatterTest do end test "formats test exits" do - failure = {:exit, 1, []} + failure = [{:exit, 1, []}] assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """ 1) world (Hello) test/ex_unit/formatter_test.exs:1 @@ -52,17 +57,17 @@ defmodule ExUnit.FormatterTest do end test "formats test exits with mfa" do - failure = {:exit, {:bye, {:m, :f, []}}, []} + failure = [{:exit, {:bye, {:mod, :fun, []}}, []}] assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """ 1) world (Hello) test/ex_unit/formatter_test.exs:1 - ** (exit) exited in: :m.f() + ** (exit) exited in: :mod.fun() ** (EXIT) :bye """ end test "formats test throws" do - failure = {:throw, 1, []} + failure = [{:throw, 1, []}] assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """ 1) world (Hello) test/ex_unit/formatter_test.exs:1 @@ -71,16 +76,54 @@ defmodule ExUnit.FormatterTest do end test "formats test EXITs" do - failure = {{:EXIT, self}, 1, []} + failure = [{{:EXIT, self()}, 1, []}] assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """ 1) world (Hello) test/ex_unit/formatter_test.exs:1 - ** (EXIT from #{inspect self}) 1 + ** (EXIT from #{inspect self()}) 1 + """ + end + + test "formats test errors with code snippets" do + stack = {Hello, :world, 1, [file: __ENV__.file, line: 3]} + failure = [{:error, catch_error(raise "oops"), [stack]}] + assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """ + 1) world (Hello) + test/ex_unit/formatter_test.exs:1 + ** (RuntimeError) oops + code: defmodule ExUnit.FormatterTest do + """ + end + + test "formats reports" do + test = test() + failure = [{:error, catch_error(raise "oops"), []}] + + test = update_in test.tags, + &Map.merge(&1, %{user_id: 1, report: :user_id}) + assert format_test_failure(test, failure, 1, 80, &formatter/2) == """ + 1) world (Hello) + test/ex_unit/formatter_test.exs:1 + ** (RuntimeError) oops + tags: + user_id: 1 + """ + + test = update_in test.tags, + &Map.merge(&1, %{many_ids: Enum.to_list(1..30), report: [:user_id, :many_ids]}) + assert format_test_failure(test, failure, 1, 80, &formatter/2) == """ + 1) world (Hello) + test/ex_unit/formatter_test.exs:1 + ** (RuntimeError) oops + tags: + many_ids: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, + 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30] + user_id: 1 """ end test "formats stacktraces" do - failure = {:error, catch_error(raise "oops"), [{Oops, :wrong, 1, [file: "formatter_test.exs", line: 1]}]} + failure = [{:error, catch_error(raise "oops"), [{Oops, :wrong, 1, [file: "formatter_test.exs", line: 1]}]}] assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """ 1) world (Hello) test/ex_unit/formatter_test.exs:1 @@ -91,46 +134,177 @@ defmodule ExUnit.FormatterTest do end test "formats assertions" do - failure = {:error, catch_assertion(assert ExUnit.FormatterTest.falsy), []} + failure = [{:error, catch_assertion(assert ExUnit.FormatterTest.falsy), []}] + assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """ + 1) world (Hello) + test/ex_unit/formatter_test.exs:1 + Expected truthy, got false + code: assert ExUnit.FormatterTest.falsy() + """ + end + + test "formats binding when there is some" do + many_ids = Enum.to_list(1..30) + no_ids = [] + failure = [{:error, catch_assertion(assert Enum.take(many_ids, 3) == no_ids), []}] assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """ 1) world (Hello) test/ex_unit/formatter_test.exs:1 + Assertion with == failed + code: assert Enum.take(many_ids, 3) == no_ids + left: [1, 2, 3] + right: [] + variables: + many_ids = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30] + """ + end + + test "formats multiple assertions" do + failure = [{:error, catch_assertion(assert ExUnit.FormatterTest.falsy), []}, + {:error, catch_assertion(assert 1 == 2), []}] + assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """ + 1) world (Hello) + test/ex_unit/formatter_test.exs:1 + + Failure #1 Expected truthy, got false - code: ExUnit.FormatterTest.falsy() + code: assert ExUnit.FormatterTest.falsy() + + Failure #2 + Assertion with == failed + code: assert 1 == 2 + left: 1 + right: 2 """ end - test "formats test case errors" do - failure = {:error, catch_error(raise "oops"), []} - assert format_test_case_failure(case(), failure, 1, 80, &formatter/2) =~ """ - 1) Hello: failure on setup_all callback, tests invalidated + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + defp trim_multiline_whitespace(string) do + String.replace(string, ~r"\n\s+\n", "\n\n") + end + + test "blames function clause error" do + {error, stack} = + try do + Access.fetch(:foo, :bar) + rescue + e -> {Exception.normalize(:error, e, System.stacktrace()), System.stacktrace()} + end + + failure = format_test_failure(test(), [{:error, error, [hd(stack)]}], 1, 80, &formatter/2) + + assert trim_multiline_whitespace(failure) =~ """ + 1) world (Hello) + test/ex_unit/formatter_test.exs:1 + ** (FunctionClauseError) no function clause matching in Access.fetch/2 + + The following arguments were given to Access.fetch/2: + + # 1 + :foo + + # 2 + :bar + + Attempted function clauses (showing 5 out of 5): + + def fetch(%struct{} = container, key) + """ + + assert failure =~ ~r"\(elixir\) lib/access\.ex:\d+: Access\.fetch/2" + end + end + + test "formats setup all errors" do + failure = [{:error, catch_error(raise "oops"), []}] + assert format_test_case_failure(test_case(), failure, 1, 80, &formatter/2) =~ """ + 1) Hello: failure on setup_all callback, test invalidated ** (RuntimeError) oops """ end test "formats assertions with operators with no limit" do - failure = {:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []} - assert format_test_case_failure(case(), failure, 1, :infinity, &formatter/2) =~ """ - 1) Hello: failure on setup_all callback, tests invalidated + failure = [{:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []}] + assert format_test_case_failure(test_case(), failure, 1, :infinity, &formatter/2) =~ """ + 1) Hello: failure on setup_all callback, test invalidated Assertion with == failed - code: [1, 2, 3] == [4, 5, 6] - lhs: [1, 2, 3] - rhs: [4, 5, 6] + code: assert [1, 2, 3] == [4, 5, 6] + left: [1, 2, 3] + right: [4, 5, 6] """ end test "formats assertions with operators with column limit" do - failure = {:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []} - assert format_test_case_failure(case(), failure, 1, 15, &formatter/2) =~ """ - 1) Hello: failure on setup_all callback, tests invalidated + failure = [{:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []}] + assert format_test_case_failure(test_case(), failure, 1, 15, &formatter/2) =~ """ + 1) Hello: failure on setup_all callback, test invalidated + Assertion with == failed + code: assert [1, 2, 3] == [4, 5, 6] + left: [1, + 2, + 3] + right: [4, + 5, + 6] + """ + end + + test "formats assertions with message with multiple lines" do + message = "Some meaningful error:\nuseful info\nanother useful info" + failure = [{:error, catch_assertion(assert(false, message)), []}] + assert format_test_case_failure(test_case(), failure, 1, :infinity, &formatter/2) =~ """ + 1) Hello: failure on setup_all callback, test invalidated + Some meaningful error: + useful info + another useful info + """ + end + + defmodule BadInspect do + defstruct key: 0 + + defimpl Inspect do + def inspect(struct, opts) when is_atom(opts) do + struct.unknown + end + end + end + + test "inspect failure" do + failure = [{:error, catch_assertion(assert :will_fail == %BadInspect{}), []}] + + message = "got FunctionClauseError with message \"no function clause matching " <> + "in Inspect.ExUnit.FormatterTest.BadInspect.inspect/2\" while inspecting " <> + "%{__struct__: ExUnit.FormatterTest.BadInspect, key: 0}" + + assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """ + 1) world (Hello) + test/ex_unit/formatter_test.exs:1 Assertion with == failed - code: [1, 2, 3] == [4, 5, 6] - lhs: [1, - 2, - 3] - rhs: [4, - 5, - 6] + code: assert :will_fail == %BadInspect{} + left: :will_fail + right: %Inspect.Error{message: #{inspect message}} + """ + end + + defmodule BadMessage do + defexception key: 0 + + def message(_message) do + raise "oops" + end + end + + test "message failure" do + failure = [{:error, catch_error(raise BadMessage), []}] + message = "got RuntimeError with message \"oops\" while retrieving Exception.message/1 " <> + "for %ExUnit.FormatterTest.BadMessage{key: 0}" + assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """ + 1) world (Hello) + test/ex_unit/formatter_test.exs:1 + ** (ExUnit.FormatterTest.BadMessage) #{message} """ end end diff --git a/lib/ex_unit/test/ex_unit/register_test.exs b/lib/ex_unit/test/ex_unit/register_test.exs new file mode 100644 index 00000000000..5ecd0766c5d --- /dev/null +++ b/lib/ex_unit/test/ex_unit/register_test.exs @@ -0,0 +1,74 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule ExUnit.RegisterTest do + use ExUnit.Case + + import ExUnit.CaptureIO + + test "singular test types" do + on_exit fn -> + ExUnit.configure(plural_rules: %{}) + end + + ExUnit.plural_rule("property", "properties") + + defmodule SingularTestTypeCase do + use ExUnit.Case + + :"property is true" = ExUnit.Case.register_test(__ENV__, :property, "is true", []) + def unquote(:"property is true")(_) do + assert succeed() + end + + test "test true" do + assert succeed() + end + + defp succeed, do: true + end + + ExUnit.Server.cases_loaded() + + assert capture_io(fn -> + assert ExUnit.run == %{failures: 0, skipped: 0, total: 2} + end) =~ "1 property, 1 test, 0 failures" + end + + test "plural test types" do + on_exit fn -> + ExUnit.configure(plural_rules: %{}) + end + + ExUnit.plural_rule("property", "properties") + + defmodule PluralTestTypeCase do + use ExUnit.Case + + :"property is true" = ExUnit.Case.register_test(__ENV__, :property, "is true", []) + def unquote(:"property is true")(_) do + assert succeed() + end + + :"property is also true" = ExUnit.Case.register_test(__ENV__, :property, "is also true", []) + def unquote(:"property is also true")(_) do + assert succeed() + end + + test "test true" do + assert succeed() + end + + test "test true also" do + assert succeed() + end + + defp succeed, do: true + end + + ExUnit.Server.cases_loaded() + + assert capture_io(fn -> + assert ExUnit.run == %{failures: 0, skipped: 0, total: 4} + end) =~ "2 properties, 2 tests, 0 failures" + end +end diff --git a/lib/ex_unit/test/ex_unit_test.exs b/lib/ex_unit/test/ex_unit_test.exs index 9513aef1c9d..b0ed32130a9 100644 --- a/lib/ex_unit/test/ex_unit_test.exs +++ b/lib/ex_unit/test/ex_unit_test.exs @@ -5,9 +5,9 @@ defmodule ExUnitTest do import ExUnit.CaptureIO - test "it supports many runs" do + test "supports many runs" do defmodule SampleTest do - use ExUnit.Case, async: false + use ExUnit.Case test "true" do assert false @@ -18,14 +18,16 @@ defmodule ExUnitTest do end end + ExUnit.Server.cases_loaded() + assert capture_io(fn -> - assert ExUnit.run == %{failures: 2, total: 2} + assert ExUnit.run == %{failures: 2, skipped: 0, total: 2} end) =~ "2 tests, 2 failures" end - test "it doesn't hang on exists" do + test "doesn't hang on exits" do defmodule EventServerTest do - use ExUnit.Case, async: false + use ExUnit.Case test "spawn and crash" do spawn_link(fn -> @@ -35,9 +37,67 @@ defmodule ExUnitTest do end end + ExUnit.Server.cases_loaded() + assert capture_io(fn -> - assert ExUnit.run == %{failures: 1, total: 1} - end) =~ "1 tests, 1 failures" + assert ExUnit.run == %{failures: 1, skipped: 0, total: 1} + end) =~ "1 test, 1 failure" + end + + test "supports timeouts" do + defmodule TimeoutTest do + use ExUnit.Case + + @tag timeout: 10 + test "ok" do + Process.sleep(:infinity) + end + end + + ExUnit.Server.cases_loaded() + + output = capture_io(fn -> ExUnit.run end) + assert output =~ "** (ExUnit.TimeoutError) test timed out after 10ms" + assert output =~ ~r"\(elixir\) lib/process\.ex:\d+: Process\.sleep/1" + end + + test "supports configured timeout" do + defmodule ConfiguredTimeoutTest do + use ExUnit.Case + + test "ok" do + Process.sleep(:infinity) + end + end + + ExUnit.configure(timeout: 5) + ExUnit.Server.cases_loaded() + output = capture_io(fn -> ExUnit.run end) + assert output =~ "** (ExUnit.TimeoutError) test timed out after 5ms" + after + ExUnit.configure(timeout: 60_000) + end + + test "sets max cases to one with trace enabled" do + old_config = ExUnit.configuration() + on_exit(fn -> ExUnit.configure(old_config) end) + + ExUnit.start(trace: true, max_cases: 10, autorun: false) + config = ExUnit.configuration() + assert config[:trace] + assert config[:max_cases] == 1 + assert config[:timeout] == 60_000 + end + + test "does not set timeout to infinity and the max cases to 1 with trace disabled" do + old_config = ExUnit.configuration() + on_exit(fn -> ExUnit.configure(old_config) end) + + ExUnit.start(trace: false, autorun: false) + config = ExUnit.configuration() + refute config[:trace] + assert config[:max_cases] == System.schedulers_online * 2 + assert config[:timeout] == 60_000 end test "filtering cases with tags" do @@ -56,29 +116,285 @@ defmodule ExUnitTest do test "three", do: :ok end - test_cases = ExUnit.Server.start_run + {result, output} = run_with_filter([], []) # Empty because it is already loaded + assert result == %{failures: 1, skipped: 0, total: 4} + assert output =~ "4 tests, 1 failure" - assert run_with_filter([], test_cases) == - %{failures: 1, total: 4} + {result, output} = run_with_filter([exclude: [even: true]], [ParityTest]) + assert result == %{failures: 0, skipped: 1, total: 4} + assert output =~ "4 tests, 0 failures, 1 skipped" - assert run_with_filter([exclude: [even: true]], test_cases) == - %{failures: 0, total: 3} + {result, output} = run_with_filter([exclude: :even], [ParityTest]) + assert result == %{failures: 0, skipped: 3, total: 4} + assert output =~ "4 tests, 0 failures, 3 skipped" - assert run_with_filter([exclude: :even], test_cases) == - %{failures: 0, total: 1} + {result, output} = run_with_filter([exclude: :even, include: [even: true]], [ParityTest]) + assert result == %{failures: 1, skipped: 2, total: 4} + assert output =~ "4 tests, 1 failure, 2 skipped" - assert run_with_filter([exclude: :even, include: [even: true]], test_cases) == - %{failures: 1, total: 2} + {result, output} = run_with_filter([exclude: :test, include: [even: true]], [ParityTest]) + assert result == %{failures: 1, skipped: 3, total: 4} + assert output =~ "4 tests, 1 failure, 3 skipped" + end + + test "log capturing" do + defmodule LogCapturingTest do + use ExUnit.Case + + require Logger + + setup_all do + :ok = Logger.remove_backend(:console) + on_exit(fn -> Logger.add_backend(:console, flush: true) end) + end + + @tag :capture_log + test "one" do + Logger.debug("one") + assert 1 == 1 + end - assert run_with_filter([exclude: :test, include: [even: true]], test_cases) == - %{failures: 1, total: 1} + @tag :capture_log + test "two" do + Logger.debug("two") + assert 1 == 2 + end + + @tag capture_log: [] + test "three" do + Logger.debug("three") + assert 1 == 2 + end + + test "four" do + Logger.debug("four") + assert 1 == 2 + end + end + + ExUnit.Server.cases_loaded() + output = capture_io(&ExUnit.run/0) + assert output =~ "[debug] two" + refute output =~ "[debug] one" + assert output =~ "[debug] three" + refute output =~ "[debug] four" end - defp run_with_filter(filters, {async, sync, load_us}) do - opts = Keyword.merge(ExUnit.configuration, filters) - capture_io fn -> - Process.put :capture_result, ExUnit.Runner.run(async, sync, opts, load_us) + test "supports multi errors" do + capture_io :stderr, fn -> + defmodule MultiTest do + use ExUnit.Case + + test "multi" do + error1 = + try do + assert 1 = 2 + rescue e in ExUnit.AssertionError -> + {:error, e, System.stacktrace} + end + + error2 = + try do + assert 3 > 4 + rescue e in ExUnit.AssertionError -> + {:error, e, System.stacktrace} + end + + raise ExUnit.MultiError, errors: [error1, error2] + end + end + end + + ExUnit.Server.cases_loaded() + + output = capture_io(fn -> + assert ExUnit.run == %{failures: 1, skipped: 0, total: 1} + end) + + assert output =~ "1 test, 1 failure" + assert output =~ "1) test multi (ExUnitTest.MultiTest)" + assert output =~ "Failure #1" + assert output =~ "Failure #2" + + assert_raise ExUnit.MultiError, ~r/oops/, fn -> + error = {:error, RuntimeError.exception("oops"), System.stacktrace} + raise ExUnit.MultiError, errors: [error] + end + end + + test "raises friendly error for duplicate test names" do + message = ~S("test duplicate" is already defined in ExUnitTest.TestWithSameNames) + + assert_raise ExUnit.DuplicateTestError, message, fn -> + defmodule TestWithSameNames do + use ExUnit.Case + + test "duplicate" do + assert true + end + + test "duplicate" do + assert true + end + end end - Process.get :capture_result + end + + test "produces error on not implemented tests" do + defmodule TestNotImplemented do + use ExUnit.Case + + setup context do + assert context[:not_implemented] + :ok + end + + test "this is not implemented yet" + end + + ExUnit.Server.cases_loaded() + + output = capture_io(fn -> + assert ExUnit.run == %{failures: 1, skipped: 0, total: 1} + end) + + assert output =~ "Not implemented" + assert output =~ "1 test, 1 failure" + end + + test "skips tagged test with skip" do + defmodule TestSkipped do + use ExUnit.Case + + setup context do + assert context[:not_implemented] + :ok + end + + @tag :skip + test "this will raise", do: raise "oops" + + @tag skip: "won't work" + test "this will also raise", do: raise "oops" + end + + ExUnit.Server.cases_loaded() + + output = capture_io(fn -> + assert ExUnit.run == %{failures: 0, skipped: 2, total: 2} + end) + + assert output =~ "2 tests, 0 failures, 2 skipped" + end + + test "filtering cases with :case tag" do + defmodule FirstTestCase do + use ExUnit.Case + test "ok", do: :ok + end + + defmodule SecondTestCase do + use ExUnit.Case + test "false", do: assert false + end + + {result, output} = run_with_filter([exclude: :case], []) # Empty because it is already loaded + assert result == %{failures: 0, skipped: 2, total: 2} + assert output =~ "2 tests, 0 failures, 2 skipped" + + {result, output} = + [exclude: :test, include: [case: "ExUnitTest.SecondTestCase"]] + |> run_with_filter([FirstTestCase, SecondTestCase]) + assert result == %{failures: 1, skipped: 1, total: 2} + assert output =~ "1) test false (ExUnitTest.SecondTestCase)" + assert output =~ "2 tests, 1 failure, 1 skipped" + end + + test "raises on reserved tag :file in module" do + assert_raise RuntimeError, "cannot set tag :file because it is reserved by ExUnit", fn -> + defmodule ReservedTagFile do + use ExUnit.Case + + @tag file: "oops" + test "sample", do: :ok + end + end + end + + test "raises on reserved tag :async in module" do + assert_raise RuntimeError, "cannot set tag :async because it is reserved by ExUnit", fn -> + defmodule ReservedTagAsync do + use ExUnit.Case + + @tag async: true + test "sample", do: :ok + end + end + end + + test "raises on reserved tag :file in setup" do + defmodule ReservedSetupTagFile do + use ExUnit.Case + + setup do + {:ok, file: :foo} + end + + test "sample", do: :ok + end + + ExUnit.Server.cases_loaded() + + output = capture_io(fn -> + assert ExUnit.run == %{failures: 1, skipped: 0, total: 1} + end) + + assert output =~ "trying to set reserved field :file" + end + + test "raises on reserved tag :async in setup" do + defmodule ReservedSetupTagAsync do + use ExUnit.Case + + setup do + {:ok, async: true} + end + + test "sample", do: :ok + end + + ExUnit.Server.cases_loaded() + + output = capture_io(fn -> + assert ExUnit.run == %{failures: 1, skipped: 0, total: 1} + end) + + assert output =~ "trying to set reserved field :async" + end + + test "does not raise on reserved tag in setup_all (lower priority)" do + defmodule ReservedSetupAllTag do + use ExUnit.Case + + setup_all do + {:ok, file: :foo} + end + + test "sample", do: :ok + end + + ExUnit.Server.cases_loaded() + + capture_io(fn -> + assert ExUnit.run == %{failures: 0, skipped: 0, total: 1} + end) + end + + defp run_with_filter(filters, cases) do + Enum.each(cases, &ExUnit.Server.add_sync_case/1) + ExUnit.Server.cases_loaded() + opts = Keyword.merge(ExUnit.configuration, filters) + output = capture_io fn -> Process.put(:capture_result, ExUnit.Runner.run(opts, nil)) end + {Process.get(:capture_result), output} end end diff --git a/lib/ex_unit/test/test_helper.exs b/lib/ex_unit/test/test_helper.exs index 3633e26c6e6..b406b9cae62 100644 --- a/lib/ex_unit/test/test_helper.exs +++ b/lib/ex_unit/test/test_helper.exs @@ -1,3 +1,4 @@ +Logger.configure_backend(:console, colors: [enabled: false]) ExUnit.start [trace: "--trace" in System.argv] # Beam files compiled on demand diff --git a/lib/iex/lib/iex.ex b/lib/iex/lib/iex.ex index e104879bc46..819ab230ff1 100644 --- a/lib/iex/lib/iex.ex +++ b/lib/iex/lib/iex.ex @@ -5,7 +5,7 @@ defmodule IEx do This module is the main entry point for Interactive Elixir and in this documentation we will talk a bit about how IEx works. - Notice that some of the functionality described here will not be available + Notice that some of the functionalities described here will not be available depending on your terminal. In particular, if you get a message saying that the smart terminal could not be run, some of the features described here won't work. @@ -15,6 +15,18 @@ defmodule IEx do IEx provides a bunch of helpers. They can be accessed by typing `h()` into the shell or as a documentation for the `IEx.Helpers` module. + ## Autocomplete + + To discover all available functions for a module, type the module name + followed by a dot, then press tab to trigger autocomplete. For example: + + Enum. + + Such function may not be available on some Windows shells. You may need + to pass the `--werl` flag when starting IEx, as in `iex --werl` for it + to work. `--werl` may be permanently enabled by setting the `IEX_WITH_WERL` + environment variable. + ## The Break command Inside IEx, hitting `Ctrl+C` will open up the `BREAK` menu. In this @@ -47,7 +59,7 @@ defmodule IEx do Now, try to access the `hello` variable again: hello - ** (UndefinedFunctionError) undefined function: hello/0 + ** (UndefinedFunctionError) undefined function hello/0 The command above fails because we have switched shells. Since shells are isolated from each other, you can't access the @@ -67,8 +79,8 @@ defmodule IEx do $ iex --sname foo iex(foo@HOST)1> - The string in between parenthesis in the prompt is the name - of your node. We can retrieve it by calling the `node()` + The string between the parentheses in the prompt is the name + of your node. We can retrieve it by calling the `node/0` function: iex(foo@HOST)1> node() @@ -91,7 +103,7 @@ defmodule IEx do as it was defined only in the other shell: iex(bar@HOST)1> Hello.world - ** (UndefinedFunctionError) undefined function: Hello.world/0 + ** (UndefinedFunctionError) undefined function Hello.world/0 However, we can connect to the other shell remotely. Open up the User Switch prompt (Ctrl+G) and type: @@ -113,38 +125,41 @@ defmodule IEx do Where "remsh" means "remote shell". In general, Elixir supports: - * remsh from an elixir node to an elixir node - * remsh from a plain erlang node to an elixir node (through the ^G menu) - * remsh from an elixir node to a plain erlang node (and get an erl shell there) + * remsh from an Elixir node to an Elixir node + * remsh from a plain Erlang node to an Elixir node (through the ^G menu) + * remsh from an Elixir node to a plain Erlang node (and get an `erl` shell there) Connecting an Elixir shell to a remote node without Elixir is **not** supported. ## The .iex.exs file - When starting IEx, it will look for a local `.iex.exs` file (located in the current - working directory), then a global one (located at `~/.iex.exs`) and will load the - first one it finds (if any). The code in the chosen .iex file will be + When starting, IEx looks for a local `.iex.exs` file (located in the current + working directory), then a global one (located at `~/.iex.exs`) and loads the + first one it finds (if any). The code in the loaded `.iex.exs` file is evaluated in the shell's context. So, for instance, any modules that are - loaded or variables that are bound in the .iex file will be available in the + loaded or variables that are bound in the `.iex.exs` file will be available in the shell after it has booted. - Sample contents of a local .iex file: + For example, take the following `.iex.exs` file: - # source another `.iex` file + # Load another ".iex.exs" file import_file "~/.iex.exs" - # print something before the shell starts + # Import some module from lib that may not yet have been defined + import_if_available MyApp.Mod + + # Print something before the shell starts IO.puts "hello world" - # bind a variable that'll be accessible in the shell + # Bind a variable that'll be accessible in the shell value = 13 - Running the shell in the directory where the above .iex file is located + Running IEx in the directory where the above `.iex.exs` file is located results in: $ iex - Erlang 17 [...] + Erlang 19 [...] hello world Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help) @@ -152,27 +167,27 @@ defmodule IEx do 13 It is possible to load another file by supplying the `--dot-iex` - option to iex. See `iex --help`. + option to IEx. See `iex --help`. ## Configuring the shell - There are a number of customization options provided by the shell. Take a look + There are a number of customization options provided by IEx. Take a look at the docs for the `IEx.configure/1` function by typing `h IEx.configure/1`. Those options can be configured in your project configuration file or globally - by calling `IEx.configure/1` from your `~/.iex.exs` file like this: + by calling `IEx.configure/1` from your `~/.iex.exs` file. For example: - # .iex + # .iex.exs IEx.configure(inspect: [limit: 3]) - ### now run the shell ### + Now run the shell: $ iex - Erlang 17 (erts-5.10.1) [...] + Erlang 19 [...] Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help) iex(1)> [1, 2, 3, 4, 5] - [1,2,3,...] + [1, 2, 3, ...] ## Expressions in IEx @@ -207,13 +222,30 @@ defmodule IEx do ...(1)> #iex:break ** (TokenMissingError) iex:1: incomplete expression + ## Exiting the shell + + There are a few ways to quit the IEx shell: + + * via the `BREAK` menu (available via `Ctrl+C`) by typing `q`, `Enter` + * by hitting `Ctrl+C`, `Ctrl+C` + * by hitting `Ctrl+\` + + If you are connected to remote shell, it remains alive after disconnection. """ @doc """ Configures IEx. - The supported options are: `:colors`, `:inspect`, - `:default_prompt`, `:alive_prompt` and `:history_size`. + The supported options are: + + * `:colors` + * `:inspect` + * `:width` + * `:history_size` + * `:default_prompt` + * `:alive_prompt` + + They are discussed individually in the sections below. ## Colors @@ -221,35 +253,65 @@ defmodule IEx do shell. See documentation for the `IO.ANSI` module for the list of supported colors and attributes. - The value is a keyword list. List of supported keys: + List of supported keys in the keyword list: * `:enabled` - boolean value that allows for switching the coloring on and off * `:eval_result` - color for an expression's resulting value - * `:eval_info` - … various informational messages - * `:eval_error` - … error messages - * `:stack_app` - … the app in stack traces - * `:stack_info` - … the remaining info in stacktraces - * `:ls_directory` - … for directory entries (ls helper) - * `:ls_device` - … device entries (ls helper) - - When printing documentation, IEx will convert the markdown - documentation to ANSI as well. Those can be configured via: - - * `:doc_code` — the attributes for code blocks (cyan, bright) + * `:eval_info` - ... various informational messages + * `:eval_error` - ... error messages + * `:stack_info` - ... the stacktrace color + * `:blame_same` - ... when blaming source with valid match + * `:blame_diff` - ... when blaming source with no match + * `:ls_directory` - ... for directory entries (ls helper) + * `:ls_device` - ... device entries (ls helper) + + When printing documentation, IEx will convert the Markdown + documentation to ANSI as well. Colors for this can be configured + via: + + * `:doc_code` - the attributes for code blocks (cyan, bright) * `:doc_inline_code` - inline code (cyan) * `:doc_headings` - h1 and h2 (yellow, bright) - * `:doc_title` — the overall heading for the output (reverse,yellow,bright) + * `:doc_title` - the overall heading for the output (reverse, yellow, bright) * `:doc_bold` - (bright) * `:doc_underline` - (underline) + IEx will also color inspected expressions using the `:syntax_colors` + option. Such can be disabled with: + + IEx.configure [colors: [syntax_colors: false]] + + You can also configure the syntax colors, however, as desired: + + IEx.configure [colors: [syntax_colors: [atom: :red]]] + + Configuration for most built-in data types are supported: `:atom`, + `:string`, `:binary`, `:list`, `:number`, `:boolean`, `:nil`, etc. + The default is: + + [number: :magenta, atom: :cyan, string: :green, + boolean: :magenta, nil: :magenta] + ## Inspect A keyword list containing inspect options used by the shell - when printing results of expression evaluation. Defailt to + when printing results of expression evaluation. Default to pretty formatting with a limit of 50 entries. + To show all entries, configure the limit to `:infinity`: + + IEx.configure [inspect: [limit: :infinity]] + See `Inspect.Opts` for the full list of options. + ## Width + + An integer indicating the maximum number of columns to use in output. + The default value is 80 columns. The actual output width is the minimum + of this number and result of `:io.columns`. This way you can configure IEx + to be your largest screen size and it should always take up the full width + of your current terminal screen. + ## History size Number of expressions and their results to keep in the history. @@ -260,12 +322,12 @@ defmodule IEx do This is an option determining the prompt displayed to the user when awaiting input. - The value is a keyword list. Two prompt types: + The value is a keyword list with two possible keys representing prompt types: - * `:default_prompt` - used when `Node.alive?` returns false - * `:alive_prompt` - used when `Node.alive?` returns true + * `:default_prompt` - used when `Node.alive?/0` returns `false` + * `:alive_prompt` - used when `Node.alive?/0` returns `true` - The part of the listed in the following of the prompt string is replaced. + The following values in the prompt string will be replaced appropriately: * `%counter` - the index of the history * `%prefix` - a prefix given by `IEx.Server` @@ -273,54 +335,35 @@ defmodule IEx do """ def configure(options) do - Enum.each options, fn {k, v} -> - Application.put_env(:iex, k, configure(k, v)) - end - end - - defp configure(k, v) when k in [:colors, :inspect] and is_list(v) do - Keyword.merge(Application.get_env(:iex, k), v) - end - - defp configure(:history_size, v) when is_integer(v) do - v - end - - defp configure(k, v) when k in [:default_prompt, :alive_prompt] and is_binary(v) do - v - end - - defp configure(k, v) do - raise ArgumentError, "invalid value #{inspect v} for configuration #{inspect k}" + IEx.Config.configure(options) end @doc """ Returns IEx configuration. """ def configuration do - Application.get_all_env(:iex) + IEx.Config.configuration() end @doc """ Registers a function to be invoked after the IEx process is spawned. """ def after_spawn(fun) when is_function(fun) do - Application.put_env(:iex, :after_spawn, [fun|after_spawn]) + IEx.Config.after_spawn(fun) end @doc """ Returns registered `after_spawn` callbacks. """ def after_spawn do - {:ok, list} = Application.fetch_env(:iex, :after_spawn) - list + IEx.Config.after_spawn() end @doc """ Returns `true` if IEx was started. """ def started? do - Application.get_env(:iex, :started, false) + IEx.Config.started?() end @doc """ @@ -329,35 +372,28 @@ defmodule IEx do ANSI escapes in `string` are not processed in any way. """ def color(color, string) do - colors = Application.get_env(:iex, :colors) - - if colors[:enabled] do - ansi = Keyword.get(colors, color, default_color(color)) - IO.ANSI.escape_fragment("%{#{ansi}}", true) <> string <> - IO.ANSI.escape_fragment("%{reset}", true) - else - string + case IEx.Config.color(color) do + nil -> + string + ansi -> + [ansi | string] |> IO.ANSI.format(true) |> IO.iodata_to_binary() end end @doc """ Gets the IEx width for printing. - Used by helpers and it has a maximum cap of 80 chars. + Used by helpers and it has a default maximum cap of 80 chars. """ def width do - case :io.columns() do - {:ok, width} -> min(width, 80) - {:error, _} -> 80 - end + IEx.Config.width() end @doc """ Gets the options used for inspecting. """ def inspect_opts do - Application.get_env(:iex, :inspect) ++ - [width: width(), pretty: true] + IEx.Config.inspect_opts() end @doc """ @@ -366,9 +402,9 @@ defmodule IEx do This is useful for debugging a particular chunk of code and inspect the state of a particular process. The process is temporarily changed to trap exits (i.e. the process flag - `:trap_exit` is set to true) and has the `group_leader` changed + `:trap_exit` is set to `true`) and has the `group_leader` changed to support ANSI escape codes. Those values are reverted by - calling `respawn`, which starts a new IEx shell, freeing up + calling `respawn/0`, which starts a new IEx shell, freeing up the pried one. When a process is pried, all code runs inside IEx and, as @@ -379,7 +415,7 @@ defmodule IEx do ## Examples Let's suppose you want to investigate what is happening - with some particular function. By invoking `IEx.pry` from + with some particular function. By invoking `IEx.pry/1` from the function, IEx will allow you to access its binding (variables), verify its lexical information and access the process information. Let's see an example: @@ -399,43 +435,72 @@ defmodule IEx do the shell will be reset and you gain access to all variables and the lexical scope from above: - pry(1)> map([a,b,c], &IO.inspect(&1)) + pry(1)> map([a, b, c], &IO.inspect(&1)) 1 2 3 - Keep in mind that `IEx.pry` runs in the caller process, + Keep in mind that `IEx.pry/1` runs in the caller process, blocking the caller during the evaluation cycle. The caller - process can be freed by calling `respawn`, which starts a + process can be freed by calling `respawn/0`, which starts a new IEx evaluation cycle, letting this one go: - pry(2)> respawn + pry(2)> respawn() true Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help) Setting variables or importing modules in IEx does not - affect the caller the environment (hence it is called `pry`). + affect the caller's environment (hence it is called `pry`). """ - defmacro pry(timeout \\ 1000) do + defmacro pry(timeout \\ 5000) do quote do - env = __ENV__ - meta = "#{inspect self} at #{Path.relative_to_cwd(env.file)}:#{env.line}" - opts = [binding: binding, dot_iex_path: "", env: env, prefix: "pry"] - res = IEx.Server.take_over("Request to pry #{meta}", opts, unquote(timeout)) - - # We cannot use colors because IEx may be off. - case res do - {:error, :self} = err -> - IO.puts :stdio, "IEx cannot pry itself." - {:error, :no_iex} = err -> - IO.puts :stdio, "Cannot pry #{meta}. Is an IEx shell running?" - _ -> - :ok + IEx.pry(binding(), __ENV__, unquote(timeout)) + end + end + + @doc """ + Callback for `IEx.pry/1`. + + You can invoke this function directly when you are not able to invoke + `IEx.pry/1` as a macro. This function expects the binding (from + `Kernel.binding/0`), the environment (from `__ENV__/0`) and the timeout + (a sensible default is 5000). + """ + def pry(binding, env, timeout) do + opts = [binding: binding, dot_iex_path: "", env: env, prefix: "pry"] + meta = "#{inspect self()} at #{Path.relative_to_cwd(env.file)}:#{env.line}" + desc = + if File.regular?(env.file) do + parse_file(env) + else + "" end - res + res = IEx.Server.take_over("Request to pry #{meta}#{desc}", opts, timeout) + + # We cannot use colors because IEx may be off. + case res do + {:error, :no_iex} -> + extra = + case :os.type do + {:win32, _} -> " If you are Windows, you may need to start IEx with the --werl flag." + _ -> "" + end + IO.puts :stdio, "Cannot pry #{meta}. Is an IEx shell running?" <> extra + _ -> + :ok end + + res + end + + defp parse_file(env) do + lines = + env.file + |> File.stream! + |> Enum.slice(max(env.line - 3, 0), 5) + Enum.intersperse(["\n\n" | lines], " ") end ## Callbacks @@ -450,9 +515,9 @@ defmodule IEx do _ -> :init.wait_until_started() end - start_iex() - set_expand_fun() - run_after_spawn() + :ok = start_iex() + :ok = set_expand_fun() + :ok = run_after_spawn() IEx.Server.start(opts, mfa) end end @@ -463,29 +528,27 @@ defmodule IEx do ## Helpers defp start_iex() do - unless started? do - Application.start(:elixir) - Application.start(:iex) - Application.put_env(:iex, :started, true) - - colors = [enabled: IO.ANSI.terminal?] ++ - Application.get_env(:iex, :colors) - Application.put_env(:iex, :colors, colors) - end + {:ok, _} = Application.ensure_all_started(:iex) + :ok end defp set_expand_fun do gl = Process.group_leader glnode = node gl - if glnode != node do - ensure_module_exists glnode, IEx.Remsh - expand_fun = IEx.Remsh.expand node - else - expand_fun = &IEx.Autocomplete.expand(&1) - end + expand_fun = + if glnode != node() do + _ = ensure_module_exists glnode, IEx.Remsh + IEx.Remsh.expand node() + else + &IEx.Autocomplete.expand(&1) + end - :io.setopts gl, [expand_fun: expand_fun, binary: true, encoding: :unicode] + # expand_fun is not supported by a shell variant + # on Windows, so we do two IO calls, not caring + # about the result of the expand_fun one. + _ = :io.setopts(gl, expand_fun: expand_fun) + :io.setopts(gl, binary: true, encoding: :unicode) end defp ensure_module_exists(node, mod) do @@ -496,26 +559,7 @@ defmodule IEx do end defp run_after_spawn do - for fun <- Enum.reverse(after_spawn), do: fun.() + _ = for fun <- Enum.reverse(after_spawn()), do: fun.() + :ok end - - # Used by default on evaluation cycle - defp default_color(:eval_interrupt), do: "yellow" - defp default_color(:eval_result), do: "yellow" - defp default_color(:eval_error), do: "red" - defp default_color(:eval_info), do: "normal" - defp default_color(:stack_app), do: "red,bright" - defp default_color(:stack_info), do: "red" - - # Used by ls - defp default_color(:ls_directory), do: "blue" - defp default_color(:ls_device), do: "green" - - # Used by ansi docs - defp default_color(:doc_bold), do: "bright" - defp default_color(:doc_code), do: "cyan,bright" - defp default_color(:doc_headings), do: "yellow,bright" - defp default_color(:doc_inline_code), do: "cyan" - defp default_color(:doc_underline), do: "underline" - defp default_color(:doc_title), do: "reverse,yellow,bright" end diff --git a/lib/iex/lib/iex/app.ex b/lib/iex/lib/iex/app.ex new file mode 100644 index 00000000000..baf398702bd --- /dev/null +++ b/lib/iex/lib/iex/app.ex @@ -0,0 +1,25 @@ +defmodule IEx.App do + @moduledoc false + + use Application + + def start(_type, _args) do + import Supervisor.Spec + + children = [worker(IEx.Config, [])] + options = [strategy: :one_for_one, name: IEx.Supervisor] + + tab = IEx.Config.new() + case Supervisor.start_link(children, options) do + {:ok, pid} -> + {:ok, pid, tab} + {:error, _} = error -> + IEx.Config.delete(tab) + error + end + end + + def stop(tab) do + IEx.Config.delete(tab) + end +end diff --git a/lib/iex/lib/iex/autocomplete.ex b/lib/iex/lib/iex/autocomplete.ex index 72e94044938..afa5ce786e0 100644 --- a/lib/iex/lib/iex/autocomplete.ex +++ b/lib/iex/lib/iex/autocomplete.ex @@ -1,77 +1,90 @@ defmodule IEx.Autocomplete do @moduledoc false - def expand([]) do - funs = module_funs(IEx.Helpers) ++ module_funs(Kernel) - mods = [%{name: "Elixir", type: :elixir, kind: :module}] - format_expansion mods ++ funs + def expand(expr, server \\ IEx.Server) + + def expand('', server) do + expand_variable_or_import("", server) end - def expand([h|t]=expr) do + def expand([h | t] = expr, server) do cond do - h === ?. and t != []-> - expand_dot reduce(t) - h === ?: -> - expand_erlang_modules + h === ?. and t != [] -> + expand_dot(reduce(t), server) + h === ?: and t == [] -> + expand_erlang_modules() identifier?(h) -> - expand_expr reduce(expr) + expand_expr(reduce(expr), server) (h == ?/) and t != [] and identifier?(hd(t)) -> - expand_expr reduce(t) - h in '(+[' -> - expand '' + expand_expr(reduce(t), server) + h in '([{' -> + expand('') true -> no() end end + @doc false + def exports(mod) do + if function_exported?(mod, :__info__, 1) do + mod.__info__(:macros) ++ (mod.__info__(:functions) -- [__info__: 1]) + else + mod.module_info(:exports) + end + end + defp identifier?(h) do (h in ?a..?z) or (h in ?A..?Z) or (h in ?0..?9) or h in [?_, ??, ?!] end - defp expand_dot(expr) do + defp expand_dot(expr, server) do case Code.string_to_quoted expr do {:ok, atom} when is_atom(atom) -> - expand_call atom, "" + expand_call(atom, "", server) {:ok, {:__aliases__, _, list}} -> - expand_elixir_modules list + expand_elixir_modules(list, "", server) + {:ok, {_, _, _} = ast_node} -> + expand_call(ast_node, "", server) _ -> no() end end - defp expand_expr(expr) do + defp expand_expr(expr, server) do case Code.string_to_quoted expr do {:ok, atom} when is_atom(atom) -> - expand_erlang_modules Atom.to_string(atom) + expand_erlang_modules(Atom.to_string(atom)) {:ok, {atom, _, nil}} when is_atom(atom) -> - expand_call Kernel, Atom.to_string(atom) + expand_variable_or_import(Atom.to_string(atom), server) {:ok, {:__aliases__, _, [root]}} -> - expand_elixir_modules [], Atom.to_string(root) - {:ok, {:__aliases__, _, [h|_] = list}} when is_atom(h) -> + expand_elixir_modules([], Atom.to_string(root), server) + {:ok, {:__aliases__, _, [h | _] = list}} when is_atom(h) -> hint = Atom.to_string(List.last(list)) list = Enum.take(list, length(list) - 1) - expand_elixir_modules list, hint - {:ok, {{:., _, [mod, fun]}, _, []}} when is_atom(fun) -> - expand_call mod, Atom.to_string(fun) + expand_elixir_modules(list, hint, server) + {:ok, {{:., _, [ast_node, fun]}, _, []}} when is_atom(fun) -> + expand_call(ast_node, Atom.to_string(fun), server) _ -> no() end end defp reduce(expr) do - last_token(Enum.reverse(expr), [' ', '(', '[', '+', '-']) + Enum.reduce(' ([{', expr, fn token, acc -> + hd(:string.tokens(acc, [token])) + end) + |> Enum.reverse() + |> trim_leading(?&) + |> trim_leading(?%) end - defp last_token(s, []) do - s - end - - defp last_token(s, [h|t]) do - last_token(List.last(:string.tokens(s, h)), t) - end + defp trim_leading([char | rest], char), + do: rest + defp trim_leading(expr, _char), + do: expr defp yes(hint, entries) do - {:yes, String.to_char_list(hint), Enum.map(entries, &String.to_char_list/1)} + {:yes, String.to_charlist(hint), Enum.map(entries, &String.to_charlist/1)} end defp no do @@ -80,67 +93,77 @@ defmodule IEx.Autocomplete do ## Formatting - defp format_expansion(list, hint \\ "") - defp format_expansion([], _) do no() end defp format_expansion([uniq], hint) do - hint = to_hint(uniq, hint) - uniq = if hint == "", do: to_uniq_entries(uniq), else: [] - yes(hint, uniq) + case to_hint(uniq, hint) do + "" -> yes("", to_uniq_entries(uniq)) + hint -> yes(hint, []) + end end - defp format_expansion([first|_]=entries, hint) do + defp format_expansion([first | _] = entries, hint) do binary = Enum.map(entries, &(&1.name)) length = byte_size(hint) prefix = :binary.longest_common_prefix(binary) if prefix in [0, length] do - entries = Enum.reduce(entries, [], fn e, acc -> to_entries(e) ++ acc end) - yes("", entries) + yes("", Enum.flat_map(entries, &to_entries/1)) else - yes(:binary.part(first.name, prefix, length-prefix), []) - end - end - - ## Root Modules - - defp root_modules do - Enum.reduce :code.all_loaded, [], fn {m, _}, acc -> - mod = Atom.to_string(m) - case mod do - "Elixir" <> _ -> - tokens = String.split(mod, ".") - if length(tokens) == 2 do - [%{kind: :module, name: List.last(tokens), type: :elixir}|acc] - else - acc - end - _ -> - [%{kind: :module, name: mod, type: :erlang}|acc] - end + yes(:binary.part(first.name, prefix, length - prefix), []) end end ## Expand calls # :atom.fun - defp expand_call(mod, hint) when is_atom(mod) do - expand_module_funs mod, hint + defp expand_call(mod, hint, _server) when is_atom(mod) do + expand_require(mod, hint) end # Elixir.fun - defp expand_call({:__aliases__, _, list}, hint) do - expand_module_funs Module.concat(list), hint + defp expand_call({:__aliases__, _, list}, hint, server) do + case expand_alias(list, server) do + {:ok, alias} -> expand_require(alias, hint) + :error -> no() + end + end + + # variable.fun_or_key + defp expand_call({_, _, _} = ast_node, hint, server) do + case value_from_binding(ast_node, server) do + {:ok, mod} when is_atom(mod) -> expand_call(mod, hint, server) + {:ok, map} when is_map(map) -> expand_map_field_access(map, hint) + _otherwise -> no() + end end - defp expand_call(_, _) do + defp expand_call(_, _, _) do no() end - defp expand_module_funs(mod, hint) do - format_expansion module_funs(mod, hint), hint + defp expand_map_field_access(map, hint) do + case match_map_fields(map, hint) do + [%{kind: :map_key, name: ^hint, value_is_map: false}] -> no() + map_fields when is_list(map_fields) -> format_expansion(map_fields, hint) + end + end + + defp expand_require(mod, hint) do + format_expansion match_module_funs(get_module_funs(mod), hint), hint + end + + defp expand_variable_or_import(hint, server) do + variables = expand_variable(hint, server) + funs = match_module_funs(imports_from_env(server) ++ get_module_funs(Kernel.SpecialForms), hint) + format_expansion(variables ++ funs, hint) + end + + defp expand_variable(hint, server) do + variables_from_binding(hint, server) + |> Enum.sort() + |> Enum.map(&%{kind: :variable, name: &1}) end ## Erlang modules @@ -149,99 +172,201 @@ defmodule IEx.Autocomplete do format_expansion match_erlang_modules(hint), hint end - defp match_erlang_modules("") do - Enum.filter root_modules, fn m -> m.type === :erlang end - end - defp match_erlang_modules(hint) do - Enum.filter root_modules, fn m -> String.starts_with?(m.name, hint) end + for mod <- match_modules(hint, true), + usable_as_unquoted_module?(mod) do + %{kind: :module, name: mod, type: :erlang} + end end ## Elixir modules - defp expand_elixir_modules(list, hint \\ "") do - mod = Module.concat(list) - format_expansion elixir_submodules(mod, hint, list == []) ++ module_funs(mod, hint), hint - end - - defp elixir_submodules(mod, hint, root) do - modname = Atom.to_string(mod) - depth = length(String.split(modname, ".")) + 1 - base = modname <> "." <> hint - - Enum.reduce modules_as_lists(root), [], fn(m, acc) -> - if String.starts_with?(m, base) do - tokens = String.split(m, ".") - if length(tokens) == depth do - name = List.last(tokens) - [%{kind: :module, type: :elixir, name: name}|acc] - else - acc - end - else - acc - end + defp expand_elixir_modules([], hint, server) do + aliases = match_aliases(hint, server) + expand_elixir_modules_from_aliases(Elixir, hint, aliases) + end + + defp expand_elixir_modules(list, hint, server) do + case expand_alias(list, server) do + {:ok, alias} -> expand_elixir_modules_from_aliases(alias, hint, []) + :error -> no() end end - defp modules_as_lists(true) do - ["Elixir.Elixir"] ++ modules_as_lists(false) + defp expand_elixir_modules_from_aliases(mod, hint, aliases) do + aliases + |> Kernel.++(match_elixir_modules(mod, hint)) + |> Kernel.++(match_module_funs(get_module_funs(mod), hint)) + |> format_expansion(hint) end - defp modules_as_lists(false) do - Enum.map(:code.all_loaded, fn({m, _}) -> Atom.to_string(m) end) + defp expand_alias([name | rest], server) when is_atom(name) do + case Keyword.fetch(aliases_from_env(server), Module.concat(Elixir, name)) do + {:ok, name} when rest == [] -> {:ok, name} + {:ok, name} -> {:ok, Module.concat([name | rest])} + :error -> {:ok, Module.concat([name | rest])} + end end + defp expand_alias([_ | _], _) do + :error + end + + defp match_aliases(hint, server) do + for {alias, _mod} <- aliases_from_env(server), + [name] = Module.split(alias), + String.starts_with?(name, hint) do + %{kind: :module, type: :alias, name: name} + end + end + + defp match_elixir_modules(module, hint) do + name = Atom.to_string(module) + depth = length(String.split(name, ".")) + 1 + base = name <> "." <> hint + + for mod <- match_modules(base, module === Elixir), + parts = String.split(mod, "."), + depth <= length(parts), + name = Enum.at(parts, depth - 1), + valid_alias_piece?("." <> name) do + %{kind: :module, type: :elixir, name: name} + end + |> Enum.uniq + end + + defp valid_alias_piece?(<>) when char in ?A..?Z, + do: valid_alias_rest?(rest) + defp valid_alias_piece?(_), + do: false + + defp valid_alias_rest?(<>) + when char in ?A..?Z + when char in ?a..?z + when char in ?0..?9 + when char == ?_, + do: valid_alias_rest?(rest) + defp valid_alias_rest?(<<>>), + do: true + defp valid_alias_rest?(rest), + do: valid_alias_piece?(rest) ## Helpers - defp module_funs(mod, hint \\ "") do - case ensure_loaded(mod) do - {:module, _} -> - falist = get_funs(mod) - - list = Enum.reduce falist, [], fn {f, a}, acc -> - case :lists.keyfind(f, 1, acc) do - {f, aa} -> :lists.keyreplace(f, 1, acc, {f, [a|aa]}) - false -> [{f, [a]}|acc] - end - end - - for {fun, arities} <- list, - name = Atom.to_string(fun), - String.starts_with?(name, hint) do - %{kind: :function, name: name, arities: arities} - end - _ -> + defp usable_as_unquoted_module?(name) do + # Convertion to atom is not a problem because + # it is only called with existing modules names. + Macro.classify_identifier(String.to_atom(name)) != :other + end + + defp match_modules(hint, root) do + get_modules(root) + |> :lists.usort() + |> Enum.drop_while(& not String.starts_with?(&1, hint)) + |> Enum.take_while(& String.starts_with?(&1, hint)) + end + + defp get_modules(true) do + ["Elixir.Elixir"] ++ get_modules(false) + end + + defp get_modules(false) do + modules = Enum.map(:code.all_loaded(), &Atom.to_string(elem(&1, 0))) + case :code.get_mode() do + :interactive -> modules ++ get_modules_from_applications() + _otherwise -> modules + end + end + + defp get_modules_from_applications do + for [app] <- loaded_applications(), + {:ok, modules} = :application.get_key(app, :modules), + module <- modules do + Atom.to_string(module) + end + end + + defp loaded_applications do + # If we invoke :application.loaded_applications/0, + # it can error if we don't call safe_fixtable before. + # Since in both cases we are reaching over the + # application controller internals, we choose to match + # for performance. + :ets.match(:ac_tab, {{:loaded, :"$1"}, :_}) + end + + defp match_module_funs(funs, hint) do + for({fun, arity} <- funs, + name = Atom.to_string(fun), + String.starts_with?(name, hint), + do: %{kind: :function, name: name, arity: arity}) + |> Enum.sort_by(&{&1.name, &1.arity}) + end + + defp match_map_fields(map, hint) do + for({key, value} when is_atom(key) <- Map.to_list(map), + key = Atom.to_string(key), + String.starts_with?(key, hint), + do: %{kind: :map_key, name: key, value_is_map: is_map(value)}) + |> Enum.sort_by(& &1.name) + end + + defp get_module_funs(mod) do + cond do + not ensure_loaded?(mod) -> [] + docs = Code.get_docs(mod, :docs) -> + exports(mod) + |> Kernel.--(default_arg_functions_with_doc_false(docs)) + |> Enum.reject(&hidden_fun?(&1, docs)) + true -> + exports(mod) end end - defp get_funs(mod) do - if function_exported?(mod, :__info__, 1) do - if docs = Code.get_docs(mod, :docs) do - for {tuple, _line, _kind, _sign, doc} <- docs, doc != false, do: tuple - else - (mod.__info__(:functions) -- [__info__: 1]) ++ mod.__info__(:macros) - end - else - mod.module_info(:exports) + defp default_arg_functions_with_doc_false(docs) do + for {{fun_name, arity}, _, _, args, false} <- docs, + count = count_defaults(args), + count > 0, + new_arity <- (arity-count)..arity, + do: {fun_name, new_arity} + end + + defp count_defaults(args) do + Enum.count(args, &match?({:\\, _, _}, &1)) + end + + defp hidden_fun?(fun, docs) do + case List.keyfind(docs, fun, 0) do + nil -> + underscored_fun?(fun) + {_, _, _, _, false} -> + true + {fun, _, _, _, nil} -> + underscored_fun?(fun) + {_, _, _, _, _} -> + false end end - defp ensure_loaded(Elixir), do: {:error, :nofile} - defp ensure_loaded(mod), do: Code.ensure_compiled(mod) + defp underscored_fun?({name, _}), + do: hd(Atom.to_charlist(name)) == ?_ + + defp ensure_loaded?(Elixir), do: false + defp ensure_loaded?(mod), do: Code.ensure_loaded?(mod) ## Ad-hoc conversions - defp to_entries(%{kind: :module, name: name}) do + defp to_entries(%{kind: kind, name: name}) when + kind in [:map_key, :module, :variable] do [name] end - defp to_entries(%{kind: :function, name: name, arities: arities}) do - for a <- arities, do: "#{name}/#{a}" + defp to_entries(%{kind: :function, name: name, arity: arity}) do + ["#{name}/#{arity}"] end - defp to_uniq_entries(%{kind: :module}) do + defp to_uniq_entries(%{kind: kind}) when + kind in [:map_key, :module, :variable] do [] end @@ -249,11 +374,75 @@ defmodule IEx.Autocomplete do to_entries(fun) end - defp to_hint(%{kind: :module, name: name}, hint) do - :binary.part(name, byte_size(hint), byte_size(name) - byte_size(hint)) <> "." + defp to_hint(%{kind: :module, name: name}, hint) when name == hint do + format_hint(name, name) <> "." + end + + defp to_hint(%{kind: :map_key, name: name, value_is_map: true}, hint) when name == hint do + format_hint(name, hint) <> "." + end + + defp to_hint(%{kind: kind, name: name}, hint) when + kind in [:function, :map_key, :module, :variable] do + format_hint(name, hint) + end + + defp format_hint(name, hint) do + hint_size = byte_size(hint) + :binary.part(name, hint_size, byte_size(name) - hint_size) + end + + ## Evaluator interface + + defp imports_from_env(server) do + with evaluator when is_pid(evaluator) <- server.evaluator(), + env_fields = IEx.Evaluator.fields_from_env(evaluator, [:functions, :macros]), + %{functions: funs, macros: macros} <- env_fields do + Enum.flat_map(funs ++ macros, &elem(&1, 1)) + else + _ -> [] + end + end + + defp aliases_from_env(server) do + with evaluator when is_pid(evaluator) <- server.evaluator, + %{aliases: aliases} <- IEx.Evaluator.fields_from_env(evaluator, [:aliases]) do + aliases + else + _ -> [] + end + end + + defp variables_from_binding(hint, server) do + with evaluator when is_pid(evaluator) <- server.evaluator() do + IEx.Evaluator.variables_from_binding(evaluator, hint) + else + _ -> [] + end + end + + defp value_from_binding(ast_node, server) do + with evaluator when is_pid(evaluator) <- server.evaluator(), + {var, map_key_path} <- extract_from_ast(ast_node, []) do + IEx.Evaluator.value_from_binding(evaluator, var, map_key_path) + else + _ -> :error + end + end + + defp extract_from_ast(var_name, acc) when is_atom(var_name) do + {var_name, acc} + end + + defp extract_from_ast({var_name, _, nil}, acc) when is_atom(var_name) do + {var_name, acc} + end + + defp extract_from_ast({{:., _, [ast_node, fun]}, _, []}, acc) when is_atom(fun) do + extract_from_ast(ast_node, [fun | acc]) end - defp to_hint(%{kind: :function, name: name}, hint) do - :binary.part(name, byte_size(hint), byte_size(name) - byte_size(hint)) + defp extract_from_ast(_ast_node, _acc) do + :error end end diff --git a/lib/iex/lib/iex/cli.ex b/lib/iex/lib/iex/cli.ex index c2e24afc122..37edeac1e6b 100644 --- a/lib/iex/lib/iex/cli.ex +++ b/lib/iex/lib/iex/cli.ex @@ -15,7 +15,7 @@ # bug has arisen; # # 2. In some situations, connecting to a remote node via --remsh -# is not possible. This can be tested by starting two iex nodes: +# is not possible. This can be tested by starting two IEx nodes: # # $ iex --sname foo # $ iex --sname bar --remsh foo@localhost @@ -24,14 +24,14 @@ # are processed on the local node and not the remote one. For such, # one can replace the last line above by: # -# $ iex --sname bar --remsh foo@localhost -e IO.inspect node +# $ iex --sname bar --remsh foo@localhost -e 'IO.inspect node()' # # And verify that the local node name is printed. # # 4. Finally, in some other circumstances, printing messages may become # borked. This can be verified with: # -# $ iex -e ":error_logger.info_msg("foo~nbar", [])" +# $ iex -e ':error_logger.info_msg("foo~nbar", [])' # # By the time those instructions have been written, all tests above pass. defmodule IEx.CLI do @@ -50,14 +50,18 @@ defmodule IEx.CLI do a dumb terminal version is started instead. """ def start do - if tty_works? do - :user_drv.start([:"tty_sl -c -e", tty_args]) + if tty_works?() do + :user_drv.start([:"tty_sl -c -e", tty_args()]) else - :user.start - IO.puts "Warning: could not run smart terminal, falling back to dumb one" + :application.set_env(:stdlib, :shell_prompt_func, + {__MODULE__, :prompt}) + :user.start() local_start() end end + def prompt(_n) do + [] + end # Check if tty works. If it does not, we fall back to the # simple/dumb terminal. This is starting the linked in @@ -79,7 +83,8 @@ defmodule IEx.CLI do {:badrpc, reason} -> abort "Could not contact remote node #{remote}, reason: #{inspect reason}. Aborting..." {:module, IEx} -> - {remote, :erlang, :apply, [remote_start_function, []]} + {mod, fun, args} = remote_start_mfa() + {remote, mod, fun, args} _ -> abort "Could not find IEx on remote node #{remote}. Aborting..." end @@ -87,16 +92,16 @@ defmodule IEx.CLI do abort "In order to use --remsh, you need to name the current node using --name or --sname. Aborting..." end else - {:erlang, :apply, [local_start_function, []]} + {:erlang, :apply, [local_start_function(), []]} end end def local_start do - IEx.start(config(), {:elixir, :start_cli, []}) + IEx.start(options(), {:elixir, :start_cli, []}) end def remote_start(parent, ref) do - send parent, {:begin, ref, self} + send parent, {:begin, ref, self()} receive do: ({:done, ^ref} -> :ok) end @@ -104,9 +109,9 @@ defmodule IEx.CLI do &local_start/0 end - defp remote_start_function do - ref = make_ref - config = config() + defp remote_start_mfa do + ref = make_ref() + opts = options() parent = spawn_link fn -> receive do @@ -116,12 +121,10 @@ defmodule IEx.CLI do end end - fn -> - IEx.start(config, {__MODULE__, :remote_start, [parent, ref]}) - end + {IEx, :start, [opts, {__MODULE__, :remote_start, [parent, ref]}]} end - defp config do + defp options do [dot_iex_path: find_dot_iex(:init.get_plain_arguments)] end @@ -133,11 +136,11 @@ defmodule IEx.CLI do {:erlang, :apply, [function, []]} end - defp find_dot_iex(['--dot-iex', h|_]), do: List.to_string(h) - defp find_dot_iex([_|t]), do: find_dot_iex(t) + defp find_dot_iex(['--dot-iex', h | _]), do: List.to_string(h) + defp find_dot_iex([_ | t]), do: find_dot_iex(t) defp find_dot_iex([]), do: nil - defp get_remsh(['--remsh', h|_]), do: List.to_atom(h) - defp get_remsh([_|t]), do: get_remsh(t) + defp get_remsh(['--remsh', h | _]), do: List.to_atom(h) + defp get_remsh([_ | t]), do: get_remsh(t) defp get_remsh([]), do: nil end diff --git a/lib/iex/lib/iex/config.ex b/lib/iex/lib/iex/config.ex new file mode 100644 index 00000000000..4056dc47069 --- /dev/null +++ b/lib/iex/lib/iex/config.ex @@ -0,0 +1,182 @@ +defmodule IEx.Config do + @moduledoc false + + @table __MODULE__ + @agent __MODULE__ + @keys [:colors, :inspect, :history_size, :default_prompt, :alive_prompt, :width] + + # Read API + + def configuration() do + Application.get_all_env(:iex) |> Keyword.take(@keys) + end + + def width() do + columns = columns() + value = Application.get_env(:iex, :width) || min(columns, 80) + min(value, columns) + end + + defp columns() do + case :io.columns() do + {:ok, width} -> width + {:error, _} -> 80 + end + end + + def started?() do + Process.whereis(@agent) !== nil + end + + def history_size() do + Application.fetch_env!(:iex, :history_size) + end + + def default_prompt() do + Application.fetch_env!(:iex, :default_prompt) + end + + def alive_prompt() do + Application.fetch_env!(:iex, :alive_prompt) + end + + def color(color) do + color(color, Application.get_env(:iex, :colors, [])) + end + + defp color(color, colors) do + if colors_enabled?(colors) do + case Keyword.fetch(colors, color) do + {:ok, value} -> + value + :error -> + default_color(color) + end + else + nil + end + end + + defp colors_enabled?(colors) do + case Keyword.fetch(colors, :enabled) do + {:ok, enabled} -> + enabled + :error -> + IO.ANSI.enabled?() + end + end + + # Used by default on evaluation cycle + defp default_color(:eval_interrupt), do: [:yellow] + defp default_color(:eval_result), do: [:yellow] + defp default_color(:eval_error), do: [:red] + defp default_color(:eval_info), do: [:normal] + defp default_color(:stack_info), do: [:red] + defp default_color(:blame_same), do: [:normal] + defp default_color(:blame_diff), do: [:red] + + # Used by ls + defp default_color(:ls_directory), do: [:blue] + defp default_color(:ls_device), do: [:green] + + # Used by inspect + defp default_color(:syntax_colors) do + [atom: :cyan, string: :green, list: :default_color, + boolean: :magenta, nil: :magenta, tuple: :default_color, + binary: :default_color, map: :default_color] + end + + # Used by ansi docs + defp default_color(doc_color) do + IO.ANSI.Docs.default_options() |> Keyword.fetch!(doc_color) + end + + def ansi_docs() do + colors = Application.get_env(:iex, :colors, []) + if enabled = colors_enabled?(colors) do + [width: width(), enabled: enabled] ++ colors + end + end + + def inspect_opts() do + Application.get_env(:iex, :inspect, []) + |> Keyword.put_new_lazy(:width, &width/0) + |> update_syntax_colors() + end + + defp update_syntax_colors(opts) do + colors = Application.get_env(:iex, :colors, []) + + if syntax_colors = color(:syntax_colors, colors) do + reset = [:reset | List.wrap(color(:eval_result, colors))] + syntax_colors = [reset: reset] ++ syntax_colors + Keyword.update(opts, :syntax_colors, syntax_colors, &Keyword.merge(syntax_colors, &1)) + else + opts + end + end + + # Agent API + + def start_link() do + Agent.start_link(__MODULE__, :handle_init, [@table], [name: @agent]) + end + + def new() do + tab = :ets.new(@table, [:named_table, :public]) + true = :ets.insert_new(tab, [after_spawn: []]) + tab + end + + def delete(__MODULE__) do + :ets.delete(__MODULE__) + end + + def after_spawn(fun) do + Agent.update(@agent, __MODULE__, :handle_after_spawn, [fun]) + end + + def after_spawn() do + :ets.lookup_element(@table, :after_spawn, 2) + end + + def configure(options) do + Agent.update(@agent, __MODULE__, :handle_configure, [options]) + end + + # Agent callbacks + + def handle_init(tab) do + :public = :ets.info(tab, :protection) + tab + end + + def handle_after_spawn(tab, fun) do + :ets.update_element(tab, :after_spawn, {2, [fun | after_spawn()]}) + end + + def handle_configure(tab, options) do + options = :lists.ukeysort(1, options) + configuration() + |> Keyword.merge(options, &merge_option/3) + |> update_configuration() + tab + end + + defp update_configuration(config) do + put = fn({key, value}) when key in @keys -> + Application.put_env(:iex, key, value) + end + Enum.each(config, put) + end + + defp merge_option(:colors, old, new) when is_list(new), do: Keyword.merge(old, new) + defp merge_option(:inspect, old, new) when is_list(new), do: Keyword.merge(old, new) + defp merge_option(:history_size, _old, new) when is_integer(new), do: new + defp merge_option(:default_prompt, _old, new) when is_binary(new), do: new + defp merge_option(:alive_prompt, _old, new) when is_binary(new), do: new + defp merge_option(:width, _old, new) when is_integer(new), do: new + defp merge_option(key, _old, new) do + raise ArgumentError, "invalid configuration or value for pair #{inspect key} - #{inspect new}" + end +end diff --git a/lib/iex/lib/iex/evaluator.ex b/lib/iex/lib/iex/evaluator.ex index 6d6ce100a3e..cbb616c640e 100644 --- a/lib/iex/lib/iex/evaluator.ex +++ b/lib/iex/lib/iex/evaluator.ex @@ -10,35 +10,126 @@ defmodule IEx.Evaluator do * keeping expression history """ - def start(server, leader) do - IEx.History.init + def init(command, server, leader, opts) do old_leader = Process.group_leader - Process.group_leader(self, leader) + Process.group_leader(self(), leader) + + state = loop_state(opts) + command == :ack && :proc_lib.init_ack(self()) try do - loop(server) + loop(server, IEx.History.init, state) after - IEx.History.reset - Process.group_leader(self, old_leader) + Process.group_leader(self(), old_leader) end end - defp loop(server) do + @doc """ + Gets a value out of the binding, using the provided + variable name and map key path. + """ + @spec value_from_binding(pid, atom, [atom]) :: {:ok, any} | :error + def value_from_binding(evaluator, var_name, map_key_path) do + ref = make_ref() + send evaluator, {:value_from_binding, ref, self(), var_name, map_key_path} + receive do - {:eval, ^server, code, config} -> - send server, {:evaled, self, eval(code, config)} - loop(server) - {:done, ^server} -> - IEx.History.reset - :ok + {^ref, result} -> result + after + 5000 -> :error + end + end + + @doc """ + Gets a list of variables out of the binding that match the passed + variable prefix. + """ + @spec variables_from_binding(pid, String.t) :: [String.t] + def variables_from_binding(evaluator, variable_prefix) do + ref = make_ref() + send evaluator, {:variables_from_binding, ref, self(), variable_prefix} + + receive do + {^ref, result} -> result + after + 5000 -> [] end end @doc """ - Locates and loads an .iex.exs file from one of predefined locations. - Returns the new config. + Returns the named fields from the current session environment. """ - def load_dot_iex(config, path \\ nil) do + @spec fields_from_env(pid, [atom]) :: %{atom => term} + def fields_from_env(evaluator, fields) do + ref = make_ref() + send evaluator, {:fields_from_env, ref, self(), fields} + + receive do + {^ref, result} -> result + after + 5000 -> %{} + end + end + + defp loop(server, history, state) do + receive do + {:eval, ^server, code, iex_state} -> + {result, history, state} = eval(code, iex_state, history, state) + send server, {:evaled, self(), result} + loop(server, history, state) + {:fields_from_env, ref, receiver, fields} -> + send receiver, {ref, Map.take(state.env, fields)} + loop(server, history, state) + {:value_from_binding, ref, receiver, var_name, map_key_path} -> + value = traverse_binding(state.binding, var_name, map_key_path) + send receiver, {ref, value} + loop(server, history, state) + {:variables_from_binding, ref, receiver, var_prefix} -> + value = find_matched_variables(state.binding, var_prefix) + send receiver, {ref, value} + loop(server, history, state) + {:done, ^server} -> + :ok + end + end + + defp traverse_binding(binding, var_name, map_key_path) do + accumulator = Keyword.fetch(binding, var_name) + + Enum.reduce map_key_path, accumulator, fn + key, {:ok, map} when is_map(map) -> Map.fetch(map, key) + _key, _acc -> :error + end + end + + defp find_matched_variables(binding, var_prefix) do + for {var_name, _value} <- binding, + is_atom(var_name), + var_name = Atom.to_string(var_name), + String.starts_with?(var_name, var_prefix), + do: var_name + end + + defp loop_state(opts) do + env = + if env = opts[:env] do + :elixir.env_for_eval(env, []) + else + :elixir.env_for_eval(file: "iex") + end + + {_, _, env, scope} = :elixir.eval('import IEx.Helpers', [], env) + + binding = Keyword.get(opts, :binding, []) + state = %{binding: binding, scope: scope, env: env} + + case opts[:dot_iex_path] do + "" -> state + path -> load_dot_iex(state, path) + end + end + + defp load_dot_iex(state, path) do candidates = if path do [path] else @@ -47,94 +138,102 @@ defmodule IEx.Evaluator do path = Enum.find candidates, &File.regular?/1 - if nil?(path) do - config + if is_nil(path) do + state else - eval_dot_iex(config, path) + eval_dot_iex(state, path) end end - defp eval_dot_iex(config, path) do + defp eval_dot_iex(state, path) do try do code = File.read!(path) - env = :elixir.env_for_eval(config.env, file: path, line: 1) + env = :elixir.env_for_eval(state.env, file: path, line: 1) # Evaluate the contents in the same environment server_loop will run in {_result, binding, env, _scope} = - :elixir.eval(String.to_char_list(code), config.binding, env) + :elixir.eval(String.to_charlist(code), state.binding, env) - %{config | binding: binding, env: :elixir.env_for_eval(env, file: "iex", line: 1)} + %{state | binding: binding, env: :elixir.env_for_eval(env, file: "iex", line: 1)} catch kind, error -> + stacktrace = System.stacktrace() io_result "Error while evaluating: #{path}" - print_error(kind, error, System.stacktrace) + print_error(kind, error, stacktrace) System.halt(1) end end - # Instead of doing just `:elixir.eval`, we first parse the expression to see + # Instead of doing just :elixir.eval, we first parse the expression to see # if it's well formed. If parsing succeeds, we evaluate the AST as usual. # # If parsing fails, this might be a TokenMissingError which we treat in # a special way (to allow for continuation of an expression on the next - # line in IEx). In case of any other error, we let :elixir_translator - # to re-raise it. + # line in IEx). # - # Returns updated config. + # Returns updated state. # # The first two clauses provide support for the break-trigger allowing to # break out from a pending incomplete expression. See # https://github.com/elixir-lang/elixir/issues/1089 for discussion. @break_trigger '#iex:break\n' - defp eval(code, config) do + defp eval(code, iex_state, history, state) do try do - do_eval(String.to_char_list(code), config) + do_eval(String.to_charlist(code), iex_state, history, state) catch kind, error -> print_error(kind, error, System.stacktrace) - %{config | cache: ''} + {%{iex_state | cache: ''}, history, state} end end - defp do_eval(@break_trigger, config=%IEx.Config{cache: ''}) do - config - end - - defp do_eval(@break_trigger, config) do - :elixir_errors.parse_error(config.counter, "iex", "incomplete expression", "") - end - - defp do_eval(latest_input, config) do - code = config.cache ++ latest_input - line = config.counter - - case Code.string_to_quoted(code, [line: line, file: "iex"]) do - {:ok, forms} -> - {result, new_binding, env, scope} = - :elixir.eval_forms(forms, config.binding, config.env, config.scope) - unless result == IEx.dont_display_result, do: io_inspect(result) - update_history(line, code, result) - %{config | env: env, - cache: '', - scope: scope, - binding: new_binding, - counter: config.counter + 1} - {:error, {line, error, token}} -> - if token == "" do - # Update config.cache so that IEx continues to add new input to - # the unfinished expression in `code` - %{config | cache: code} - else - # Encountered malformed expression - :elixir_errors.parse_error(line, "iex", error, token) - end - end + defp do_eval(@break_trigger, %IEx.State{cache: ''} = iex_state, history, state) do + {iex_state, history, state} + end + + defp do_eval(@break_trigger, iex_state, _history, _state) do + :elixir_errors.parse_error(iex_state.counter, "iex", "incomplete expression", "") + end + + defp do_eval(latest_input, iex_state, history, state) do + code = iex_state.cache ++ latest_input + line = iex_state.counter + Process.put(:iex_history, history) + handle_eval(Code.string_to_quoted(code, [line: line, file: "iex"]), code, line, iex_state, history, state) + after + Process.delete(:iex_history) + end + + defp handle_eval({:ok, forms}, code, line, iex_state, history, state) do + {result, binding, env, scope} = + :elixir.eval_forms(forms, state.binding, state.env, state.scope) + unless result == IEx.dont_display_result, do: io_inspect(result) + iex_state = + %{iex_state | cache: '', + counter: iex_state.counter + 1} + + state = + %{state | env: env, + scope: scope, + binding: binding} + + {iex_state, update_history(history, line, code, result), state} + end + + defp handle_eval({:error, {_, _, ""}}, code, _line, iex_state, history, state) do + # Update iex_state.cache so that IEx continues to add new input to + # the unfinished expression in "code" + {%{iex_state | cache: code}, history, state} + end + + defp handle_eval({:error, {line, error, token}}, _code, _line, _iex_state, _, _state) do + # Encountered malformed expression + :elixir_errors.parse_error(line, "iex", error, token) end - defp update_history(counter, cache, result) do - IEx.History.append({counter, cache, result}, counter, - Application.get_env(:iex, :history_size)) + defp update_history(history, counter, cache, result) do + IEx.History.append(history, {counter, cache, result}, IEx.Config.history_size) end defp io_inspect(result) do @@ -145,78 +244,64 @@ defmodule IEx.Evaluator do IO.puts :stdio, IEx.color(:eval_result, result) end - defp io_error(result) do - IO.puts :stdio, IEx.color(:eval_error, result) - end - ## Error handling defp print_error(kind, reason, stacktrace) do - {reason, stacktrace} = normalize_exception(kind, reason, stacktrace) - - message = Exception.format_banner(kind, reason, stacktrace) - io_error message - io_error (stacktrace |> prune_stacktrace |> format_stacktrace) - end + {blamed, stacktrace} = Exception.blame(kind, reason, stacktrace) + + ansidata = + case blamed do + %FunctionClauseError{} -> + {_, inspect_opts} = pop_in IEx.inspect_opts[:syntax_colors][:reset] + banner = Exception.format_banner(kind, reason, stacktrace) + blame = FunctionClauseError.blame(blamed, &inspect(&1, inspect_opts), &blame_match/2) + [IEx.color(:eval_error, banner), pad(blame)] + _ -> + [IEx.color(:eval_error, Exception.format_banner(kind, blamed, stacktrace))] + end - defp normalize_exception(:error, :undef, [{IEx.Helpers, fun, arity, _}|t]) do - {%RuntimeError{message: "undefined function: #{format_function(fun, arity)}"}, t} + stackdata = Exception.format_stacktrace(prune_stacktrace(stacktrace)) + IO.write :stdio, [ansidata, ?\n, IEx.color(:stack_info, stackdata)] end - defp normalize_exception(_kind, reason, stacktrace) do - {reason, stacktrace} + defp pad(string) do + " " <> String.replace(string, "\n", "\n ") end - defp format_function(fun, arity) do - cond do - is_list(arity) -> - "#{fun}/#{length(arity)}" - true -> - "#{fun}/#{arity}" + defp blame_match(%{match?: true, node: node}, _), + do: blame_ansi(:blame_same, "+", node) + defp blame_match(%{match?: false, node: node}, _), + do: blame_ansi(:blame_diff, "-", node) + defp blame_match(_, string), + do: string + + defp blame_ansi(color, no_ansi, node) do + case IEx.Config.color(color) do + nil -> + no_ansi <> Macro.to_string(node) <> no_ansi + ansi -> + [ansi | Macro.to_string(node)] + |> IO.ANSI.format(true) + |> IO.iodata_to_binary() end end + @elixir_internals [:elixir, :elixir_expand, :elixir_compiler, :elixir_module, + :elixir_clauses, :elixir_lexical, :elixir_def, :elixir_map, + :elixir_erl, :elixir_erl_clauses, :elixir_erl_pass] + defp prune_stacktrace(stacktrace) do # The order in which each drop_while is listed is important. - # For example, the user my call Code.eval_string/2 in IEx + # For example, the user may call Code.eval_string/2 in IEx # and if there is an error we should not remove erl_eval # and eval_bits information from the user stacktrace. stacktrace |> Enum.reverse() + |> Enum.drop_while(&(elem(&1, 0) == :proc_lib)) |> Enum.drop_while(&(elem(&1, 0) == __MODULE__)) |> Enum.drop_while(&(elem(&1, 0) == :elixir)) |> Enum.drop_while(&(elem(&1, 0) in [:erl_eval, :eval_bits])) |> Enum.reverse() - end - - @doc false - def format_stacktrace(trace) do - entries = - for entry <- trace do - split_entry(Exception.format_stacktrace_entry(entry)) - end - - width = Enum.reduce entries, 0, fn {app, _}, acc -> - max(String.length(app), acc) - end - - " " <> Enum.map_join(entries, "\n ", &format_entry(&1, width)) - end - - defp split_entry(entry) do - case entry do - "(" <> _ -> - case :binary.split(entry, ") ") do - [left, right] -> {left <> ") ", right} - _ -> {"", entry} - end - _ -> - {"", entry} - end - end - - defp format_entry({app, info}, width) do - app = String.rjust(app, width) - "#{IEx.color(:stack_app, app)}#{IEx.color(:stack_info, info)}" + |> Enum.reject(&(elem(&1, 0) in @elixir_internals)) end end diff --git a/lib/iex/lib/iex/helpers.ex b/lib/iex/lib/iex/helpers.ex index 60e8a5580d8..471fab6321b 100644 --- a/lib/iex/lib/iex/helpers.ex +++ b/lib/iex/lib/iex/helpers.ex @@ -5,96 +5,192 @@ defmodule IEx.Helpers do which provides many helpers to make Elixir's shell more joyful to work with. - This message was triggered by invoking the helper - `h()`, usually referred to as `h/0` (since it expects 0 - arguments). - - There are many other helpers available: - - * `c/2` — compiles a file at the given path - * `cd/1` — changes the current directory - * `clear/0` — clears the screen - * `flush/0` — flushes all messages sent to the shell - * `h/0` — prints this help message - * `h/1` — prints help for the given module, function or macro - * `l/1` — loads the given module's beam code and purges the current version - * `ls/0` — lists the contents of the current directory - * `ls/1` — lists the contents of the specified directory - * `pwd/0` — prints the current working directory - * `r/1` — recompiles and reloads the given module's source file - * `respawn/0` — respawns the current shell - * `s/1` — prints spec information - * `t/1` — prints type information - * `v/0` — prints the history of commands evaluated in the session - * `v/1` — retrieves the nth value from the history - * `import_file/1` - — evaluates the given file in the shell's context - - Help for functions in this module can be consulted - directly from the command line, as an example, try: - - h(c/2) - - You can also retrieve the documentation for any module - or function. Try these: - - h(Enum) - h(Enum.reverse/1) - - To learn more about IEx as a whole, just type `h(IEx)`. + This message was triggered by invoking the helper `h()`, + usually referred to as `h/0` (since it expects 0 arguments). + + You can use the `h/1` function to invoke the documentation + for any Elixir module or function: + + iex> h(Enum) + iex> h(Enum.map) + iex> h(Enum.reverse/1) + + You can also use the `i/1` function to introspect any value + you have in the shell: + + iex> i("hello") + + There are many other helpers available, here are some examples: + + * `b/1` - prints callbacks info and docs for a given module + * `c/1` - compiles a file into the current directory + * `c/2` - compiles a file to the given path + * `cd/1` - changes the current directory + * `clear/0` - clears the screen + * `e/1` - shows all exports (functions + macros) in a module + * `flush/0` - flushes all messages sent to the shell + * `h/0` - prints this help message + * `h/1` - prints help for the given module, function or macro + * `i/0` - prints information about the last value + * `i/1` - prints information about the given term + * `ls/0` - lists the contents of the current directory + * `ls/1` - lists the contents of the specified directory + * `pid/1` - creates a PID from a string + * `pid/3` - creates a PID with the 3 integer arguments passed + * `pwd/0` - prints the current working directory + * `r/1` - recompiles the given module's source file + * `recompile/0` - recompiles the current project + * `respawn/0` - respawns the current shell + * `v/0` - retrieves the last value from the history + * `v/1` - retrieves the nth value from the history + + Help for all of those functions can be consulted directly from + the command line using the `h/1` helper itself. Try: + + iex> h(v/0) + + To list all IEx helpers available, which is effectively all + exports (functions and macros) in the `IEx.Helpers` module: + + iex> e(IEx.Helpers) + + To learn more about IEx as a whole, type `h(IEx)`. """ import IEx, only: [dont_display_result: 0] @doc """ - Expects a list of files to compile and a path - to write their object code to. It returns the name - of the compiled modules. + Recompiles the current Mix application. - When compiling one file, there is no need to wrap it in a list. + This helper only works when IEx is started with a Mix + project, for example, `iex -S mix`. The application is + not restarted after compilation, which means any long + running process may crash as any changed module will be + temporarily removed and recompiled, without going through + the proper code changes callback. + + If you want to reload a single module, consider using + `r(ModuleName)` instead. + + This function is meant to be used for development and + debugging purposes. Do not depend on it in production code. + """ + def recompile do + if mix_started?() do + config = Mix.Project.config + consolidation = Mix.Project.consolidation_path(config) + reenable_tasks(config) + + # No longer allow consolidations to be accessed. + Code.delete_path(consolidation) + purge_protocols(consolidation) + + result = Mix.Task.run("compile") + + # Reenable consolidation and allow them to be loaded. + Code.prepend_path(consolidation) + purge_protocols(consolidation) + + result + else + IO.puts IEx.color(:eval_error, "Mix is not running. Please start IEx with: iex -S mix") + :error + end + end + + defp mix_started? do + List.keyfind(Application.started_applications, :mix, 0) != nil + end + + defp reenable_tasks(config) do + Mix.Task.reenable("compile") + Mix.Task.reenable("compile.all") + Mix.Task.reenable("compile.protocols") + compilers = config[:compilers] || Mix.compilers + Enum.each compilers, &Mix.Task.reenable("compile.#{&1}") + end + + defp purge_protocols(path) do + case File.ls(path) do + {:ok, beams} -> + for beam <- beams do + module = beam |> Path.rootname |> String.to_atom + :code.purge(module) + :code.delete(module) + end + :ok + {:error, _} -> + :ok + end + end + + @doc """ + Compiles the given files. + + It expects a list of files to compile and an optional path to write + the compiled code to (defaults to the current directory). When compiling + one file, there is no need to wrap it in a list. + + It returns the names of the compiled modules. + + If you want to recompile an existing module, check `r/1` instead. ## Examples - c ["foo.ex", "bar.ex"], "ebin" - #=> [Foo,Bar] + iex> c(["foo.ex", "bar.ex"], "ebin") + [Foo, Bar] + + iex> c("baz.ex") + [Baz] - c "baz.ex" - #=> [Baz] """ - def c(files, path \\ ".") when is_binary(path) do + def c(files, path \\ :in_memory) when is_binary(path) or path == :in_memory do files = List.wrap(files) unless Enum.all?(files, &is_binary/1) do raise ArgumentError, "expected a binary or a list of binaries as argument" end - {found, not_found} = - files - |> Enum.map(&Path.expand(&1, path)) - |> Enum.partition(&File.exists?/1) + {found, not_found} = Enum.split_with(files, &File.exists?/1) unless Enum.empty?(not_found) do raise ArgumentError, "could not find files #{Enum.join(not_found, ", ")}" end - {erls, exs} = Enum.partition(found, &String.ends_with?(&1, ".erl")) + {erls, exs} = Enum.split_with(found, &String.ends_with?(&1, ".erl")) - modules = Enum.map(erls, fn(source) -> + erl_modules = Enum.map(erls, fn(source) -> {module, binary} = compile_erlang(source) - base = source |> Path.basename |> Path.rootname - File.write!(Path.join(path, base <> ".beam"), binary) + unless path == :in_memory do + base = source |> Path.basename |> Path.rootname + File.write!(Path.join(path, base <> ".beam"), binary) + end module end) - modules ++ Kernel.ParallelCompiler.files_to_path(exs, path) + ex_modules = try do + compile_elixir(exs, path) + catch + _, _ -> raise CompileError + end + + erl_modules ++ ex_modules end @doc """ - Clear the console screen. + Clears the console screen. + + This function only works if ANSI escape codes are enabled + on the shell, which means this function is by default + unavailable on Windows machines. """ - def clear do - IO.write [IO.ANSI.home, IO.ANSI.clear] - dont_display_result + def clear() do + if IO.ANSI.enabled? do + IO.write [IO.ANSI.home, IO.ANSI.clear] + else + IO.puts "Cannot clear the screen because ANSI escape codes are not enabled on this shell" + end + dont_display_result() end @doc """ @@ -102,7 +198,7 @@ defmodule IEx.Helpers do """ def h() do IEx.Introspection.h(IEx.Helpers) - dont_display_result + dont_display_result() end @doc """ @@ -111,20 +207,20 @@ defmodule IEx.Helpers do ## Examples - h(Enum) - #=> Prints documentation for Enum + iex> h(Enum) It also accepts functions in the format `fun/arity` and `module.fun/arity`, for example: - h receive/1 - h Enum.all?/2 - h Enum.all? + iex> h receive/1 + iex> h Enum.all?/2 + iex> h Enum.all? """ @h_modules [__MODULE__, Kernel, Kernel.SpecialForms] - defmacro h({:/, _, [call, arity]} = other) do + defmacro h(term) + defmacro h({:/, _, [call, arity]} = term) do args = case Macro.decompose_call(call) do {_mod, :__info__, []} when arity == 1 -> @@ -134,7 +230,7 @@ defmodule IEx.Helpers do {fun, []} -> [@h_modules, fun, arity] _ -> - [other] + [term] end quote do @@ -161,17 +257,55 @@ defmodule IEx.Helpers do end @doc """ - When given a module, prints specifications (or simply specs) for all the - types defined in it. + Prints the documentation for the given callback function. + + It also accepts single module argument to list + all available behaviour callbacks. + + ## Examples + + iex> b(Mix.Task.run/1) + iex> b(Mix.Task.run) + iex> b(GenServer) + """ + defmacro b(term) + defmacro b({:/, _, [{{:., _, [mod, fun]}, _, []}, arity]}) do + quote do + IEx.Introspection.b(unquote(mod), unquote(fun), unquote(arity)) + end + end + + defmacro b({{:., _, [mod, fun]}, _, []}) do + quote do + IEx.Introspection.b(unquote(mod), unquote(fun)) + end + end - When given a particular type name, prints its spec. + defmacro b(module) do + quote do + IEx.Introspection.b(unquote(module)) + end + end + + @doc """ + Prints the types for the given module or for the given function/arity pair. ## Examples - t(Enum) - t(Enum.t/0) - t(Enum.t) + iex> t(Enum) + @type t() :: Enumerable.t() + @type element() :: any() + @type index() :: integer() + @type default() :: any() + + iex> t(Enum.t/0) + @type t() :: Enumerable.t() + + iex> t(Enum.t) + @type t() :: Enumerable.t() + """ + defmacro t(term) defmacro t({:/, _, [{{:., _, [mod, fun]}, _, []}, arity]}) do quote do IEx.Introspection.t(unquote(mod), unquote(fun), unquote(arity)) @@ -191,27 +325,24 @@ defmodule IEx.Helpers do end @doc """ - Similar to `t/1`, only for specs. - - When given a module, prints the list of all specs defined in the module. - - When given a particular spec name (with optional arity), prints its spec. + Prints the specs for the given module or for the given function/arity pair. ## Examples - s(Enum) - s(Enum.all?) - s(Enum.all?/2) - s(is_atom) - s(is_atom/1) + iex> s(Enum) + iex> s(Enum.all?) + iex> s(Enum.all?/2) + iex> s(is_atom) + iex> s(is_atom/1) """ - defmacro s({:/, _, [call, arity]} = other) do + defmacro s(term) + defmacro s({:/, _, [call, arity]} = term) do args = case Macro.decompose_call(call) do {mod, fun, []} -> [mod, fun, arity] {fun, []} -> [Kernel, fun, arity] - _ -> [other] + _ -> [term] end quote do @@ -233,34 +364,52 @@ defmodule IEx.Helpers do end @doc """ - Prints the history of expressions evaluated during the session along with - their results. - """ - def v do - inspect_opts = IEx.inspect_opts - IEx.History.each(&print_history_entry(&1, inspect_opts)) - end + Returns the value of the `n`th expression in the history. - defp print_history_entry({counter, cache, result}, inspect_opts) do - IO.write IEx.color(:eval_info, "#{counter}: #{cache}#=> ") - IO.puts IEx.color(:eval_result, "#{inspect result, inspect_opts}\n") - end + `n` can be a negative value: if it is, the corresponding expression value + relative to the current one is returned. For example, `v(-2)` returns the + value of the expression evaluated before the last evaluated expression. In + particular, `v(-1)` returns the result of the last evaluated expression and + `v()` does the same. - @doc """ - Retrieves the nth expression's value from the history. + ## Examples + + iex(1)> "hello" <> " world" + "hello world" + iex(2)> 40 + 2 + 42 + iex(3)> v(-2) + "hello world" + iex(4)> v(2) + 42 + iex(5)> v() + 42 - Use negative values to lookup expression values relative to the current one. - For instance, v(-1) returns the result of the last evaluated expression. """ - def v(n) do - IEx.History.nth(n) |> elem(2) + def v(n \\ -1) do + IEx.History.nth(history(), n) |> elem(2) end @doc """ - Recompiles and reloads the specified module's source file. + Recompiles and reloads the given `module`. + + Please note that all the modules defined in the same + file as `module` are recompiled and reloaded. + + This function is meant to be used for development and + debugging purposes. Do not depend on it in production code. - Please note that all the modules defined in the same file as `module` - are recompiled and reloaded. + ## In-memory reloading + + When we reload the module in IEx, we recompile the module source + code, updating its contents in memory. The original `.beam` file + in disk, probably the one where the first definition of the module + came from, does not change at all. + + Since typespecs and docs are loaded from the .beam file (they + are not loaded in memory with the module because there is no need + for them to be in memory), they are not reloaded when you reload + the module. """ def r(module) when is_atom(module) do {:reloaded, module, do_r(module)} @@ -288,14 +437,66 @@ defmodule IEx.Helpers do end @doc """ - Load the given module's beam code (and ensures any previous + Loads the given module's BEAM code (and ensures any previous old version was properly purged before). + + This function is useful when you know the bytecode for module + has been updated in the filesystem and you want to tell the VM + to load it. """ def l(module) when is_atom(module) do :code.purge(module) :code.load_file(module) end + @doc """ + Prints information about the data type of any given term. + + If no argument is given, the value of the previous expression + is used. + + ## Examples + + iex> i(1..5) + + Will print: + + Term + 1..5 + Data type + Range + Description + This is a struct. Structs are maps with a __struct__ key. + Reference modules + Range, Map + + """ + def i(term \\ v(-1)) do + info = + ["Term": inspect(term)] ++ + IEx.Info.info(term) ++ + ["Implemented protocols": all_implemented_protocols_for_term(term)] + + for {subject, info} <- info do + info = info |> to_string() |> String.trim() |> String.replace("\n", "\n ") + IO.puts IEx.color(:eval_result, to_string(subject)) + IO.puts IEx.color(:eval_info, " #{info}") + end + + dont_display_result() + end + + # Given any "term", this function returns all the protocols in + # :code.get_path() implemented by the data structure of such term, in the form + # of a binary like "Protocol1, Protocol2, Protocol3". + defp all_implemented_protocols_for_term(term) do + :code.get_path() + |> Protocol.extract_protocols() + |> Enum.uniq() + |> Enum.reject(fn(protocol) -> is_nil(protocol.impl_for(term)) end) + |> Enum.map_join(", ", &inspect/1) + end + @doc """ Flushes all messages sent to the shell and prints them out. """ @@ -327,6 +528,7 @@ defmodule IEx.Helpers do """ def pwd do IO.puts IEx.color(:eval_info, System.cwd!) + dont_display_result() end @doc """ @@ -334,14 +536,29 @@ defmodule IEx.Helpers do """ def cd(directory) when is_binary(directory) do case File.cd(expand_home(directory)) do - :ok -> pwd + :ok -> pwd() {:error, :enoent} -> IO.puts IEx.color(:eval_error, "No directory #{directory}") end + dont_display_result() end @doc """ - Produces a simple list of a directory's contents. + Prints a list of all the functions and macros exported by the given module. + """ + def e(module \\ Kernel) do + IEx.Autocomplete.exports(module) |> print_exports() + dont_display_result() + end + + defp print_exports(functions) do + list = Enum.map(functions, fn({name, arity}) -> Atom.to_string(name) <> "/" <> Integer.to_string(arity) end) + print_table(list) + end + + @doc """ + Prints a list of the given directory's contents. + If `path` points to a file, prints its full path. """ def ls(path \\ ".") when is_binary(path) do @@ -349,7 +566,10 @@ defmodule IEx.Helpers do case File.ls(path) do {:ok, items} -> sorted_items = Enum.sort(items) - ls_print(path, sorted_items) + printer = fn(item, width) -> + format_item(Path.join(path, item), String.pad_trailing(item, width)) + end + print_table(sorted_items, printer) {:error, :enoent} -> IO.puts IEx.color(:eval_error, "No such file or directory #{path}") @@ -357,35 +577,41 @@ defmodule IEx.Helpers do {:error, :enotdir} -> IO.puts IEx.color(:eval_info, Path.absname(path)) end + dont_display_result() end - defp expand_home(<>) do + defp expand_home(<>) do System.user_home! <> rest end defp expand_home(other), do: other - defp ls_print(_, []) do + defp print_table(list, printer \\ &String.pad_trailing/2) + defp print_table([], _printer) do :ok end - defp ls_print(path, list) do + defp print_table(list, printer) do # print items in multiple columns (2 columns in the worst case) lengths = Enum.map(list, &String.length(&1)) maxlen = maxlength(lengths) - width = min(maxlen, 30) + 5 - ls_print(path, list, width) + offset = min(maxlen, 30) + 5 + print_table(list, printer, offset) end - defp ls_print(path, list, width) do + defp print_table(list, printer, offset) do Enum.reduce(list, 0, fn(item, len) -> - if len >= 80 do - IO.puts "" - len = 0 - end - IO.write format_item(Path.join(path, item), String.ljust(item, width)) - len+width + len = + if len >= 80 do + IO.puts "" + 0 + else + len + end + IO.write printer.(item, offset) + len + offset end) + IO.puts "" end @@ -405,21 +631,55 @@ defmodule IEx.Helpers do end @doc """ - Respawns the current shell by starting a new - process and a new scope. Returns true if it worked. + Respawns the current shell by starting a new shell process. + + Returns `true` if it worked. """ def respawn do if whereis = IEx.Server.whereis do - send whereis, {:respawn, self} - dont_display_result + send whereis, {:respawn, self()} + dont_display_result() + end + end + + @doc """ + Similar to `import_file` but only imports the file it if it is available. + + By default, `import_file/1` fails when the given file does not exist. + However, since `import_file/1` is expanded at compile-time, it's not + possible to conditionally import a file since the macro is always + expanded: + + # This raises a File.Error if ~/.iex.exs doesn't exist. + if ("~/.iex.exs" |> Path.expand |> File.exists?) do + import_file "~/.iex.exs" + end + + This macro addresses this issue by checking if the file exists or not + in behalf of the user. + """ + defmacro import_file_if_available(path) when is_binary(path) do + import_file_if_available(path, true) + end + + defmacro import_file_if_available(_) do + raise ArgumentError, "import_file_if_available/1 expects a literal binary as its argument" + end + + defp import_file_if_available(path, optional?) when is_binary(path) do + path = Path.expand(path) + + if not optional? or File.exists?(path) do + path |> File.read! |> Code.string_to_quoted!(file: path) end end @doc """ Evaluates the contents of the file at `path` as if it were directly typed into - the shell. `path` has to be a literal binary. + the shell. - A leading `~` in `path` is automatically expanded. + `path` has to be a literal string. `path` is automatically expanded via + `Path.expand/1`. ## Examples @@ -431,19 +691,50 @@ defmodule IEx.Helpers do 13 iex(2)> value 13 + """ defmacro import_file(path) when is_binary(path) do - path = Path.expand(path) - Code.string_to_quoted! File.read!(path), file: path + import_file_if_available(path, false) end defmacro import_file(_) do raise ArgumentError, "import_file/1 expects a literal binary as its argument" end - # Compiles and loads an erlang source file, returns {module, binary} + @doc false + defmacro import_file(path, opts) when is_binary(path) and is_list(opts) do + IO.warn "import_file/2 is deprecated, please use import_file_if_available/1 instead" + import_file_if_available(path, Keyword.get(opts, :optional, false)) + end + + @doc """ + Calls `import/2` with the given arguments, but only if the module is available. + + This lets you put imports in `.iex.exs` files (including `~/.iex.exs`) without + getting compile errors if you open a console where the module is not available. + + ## Example + + # In ~/.iex.exs + import_if_available Ecto.Query + + """ + defmacro import_if_available(quoted_module, opts \\ []) do + module = Macro.expand(quoted_module, __CALLER__) + + if Code.ensure_loaded?(module) do + quote do + import unquote(quoted_module), unquote(opts) + end + end + end + + defp compile_elixir(exs, :in_memory), do: Kernel.ParallelCompiler.files(exs) + defp compile_elixir(exs, path), do: Kernel.ParallelCompiler.files_to_path(exs, path) + + # Compiles and loads an Erlang source file, returns {module, binary} defp compile_erlang(source) do - source = Path.relative_to_cwd(source) |> String.to_char_list + source = Path.relative_to_cwd(source) |> String.to_charlist case :compile.file(source, [:binary, :report]) do {:ok, module, binary} -> :code.purge(module) @@ -453,4 +744,79 @@ defmodule IEx.Helpers do raise CompileError end end + + defp history, do: Process.get(:iex_history) + + @doc """ + Creates a PID from `string`. + + ## Examples + + iex> pid("0.21.32") + #PID<0.21.32> + + """ + def pid(string) when is_binary(string) do + :erlang.list_to_pid('<#{string}>') + end + + @doc """ + Creates a PID with 3 non-negative integers passed as arguments + to the function. + + ## Examples + + iex> pid(0, 21, 32) + #PID<0.21.32> + iex> pid(0, 64, 2048) + #PID<0.64.2048> + + """ + def pid(x, y, z) when is_integer(x) and x >= 0 and + is_integer(y) and y >= 0 and + is_integer(z) and z >= 0 do + :erlang.list_to_pid( + '<' ++ Integer.to_charlist(x) ++ '.' ++ + Integer.to_charlist(y) ++ '.' ++ + Integer.to_charlist(z) ++ '>' + ) + end + + @doc """ + Deploys a given module's BEAM code to a list of nodes. + + This function is useful for development and debugging when you have code that + has been compiled or updated locally that you want to run on other nodes. + + The node list defaults to a list of all connected nodes. + + Returns `{:error, :nofile}` if the object code (i.e. ".beam" file) for the module + could not be found locally. + + ## Examples + + iex> nl(HelloWorld) + {:ok, [{:node1@easthost, :loaded, HelloWorld}, + {:node1@westhost, :loaded, HelloWorld}]} + + iex> nl(NoSuchModuleExists) + {:error, :nofile} + + """ + def nl(nodes \\ Node.list, module) when is_list(nodes) and is_atom(module) do + case :code.get_object_code(module) do + {^module, bin, beam_path} -> + results = + for node <- nodes do + case :rpc.call(node, :code, :load_binary, [module, beam_path, bin]) do + {:module, _} -> {node, :loaded, module} + {:badrpc, message} -> {node, :badrpc, message} + {:error, message} -> {node, :error, message} + unexpected -> {node, :error, unexpected} + end + end + {:ok, results} + _otherwise -> {:error, :nofile} + end + end end diff --git a/lib/iex/lib/iex/history.ex b/lib/iex/lib/iex/history.ex index 2e800b4d311..385263f11e3 100644 --- a/lib/iex/lib/iex/history.ex +++ b/lib/iex/lib/iex/history.ex @@ -1,140 +1,112 @@ - defmodule IEx.History do +defmodule IEx.History.State do @moduledoc false - @doc """ - Initializes IEx process variables. All history - information is kept in the process dictionary. - """ - def init do - Process.put(:iex_history_start_counter, 1) - Process.put(:iex_history_counter, 1) - end + defstruct queue: :queue.new, size: 0, start: 1 - @doc """ - Appends one entry to the history with the given counter. - """ - def append(entry, counter, limit) do - Process.put({:iex_history, counter}, entry) - Process.put(:iex_history_counter, counter+1) - - start_counter = Process.get(:iex_history_start_counter) - should_collect = limit_history(start_counter, counter, limit, false) - if should_collect do - collect_garbage() - end + def append(%{queue: q, size: size} = state, item) do + %{state | queue: :queue.in(item, q), size: size + 1} end - defp limit_history(_, _, limit, _) when limit < 0 do - false - end + def to_list(%{queue: q}), + do: :queue.to_list(q) - defp limit_history(counter, max_counter, limit, should_collect) when max_counter - counter < limit do - Process.put(:iex_history_start_counter, counter) - should_collect - end + # Traverses the queue front-to-back if the index is positive. + def nth(%{queue: q, size: size, start: start}, n) + when n - start >= 0 and n - start < size, + do: get_nth(q, n - start) - defp limit_history(counter, max_counter, limit, should_collect) do - if not should_collect do - entry = Process.delete({:iex_history, counter}) - should_collect = has_binary(entry) - else - Process.delete({:iex_history, counter}) - end - limit_history(counter+1, max_counter, limit, should_collect) - end + # Traverses the queue back-to-front if the index is negative. + def nth(%{queue: q, size: size, start: start}, n) + when n < 0 and size + n >= start - 1, + do: get_nth(:queue.reverse(q), abs(n) - 1) - @doc """ - Removes all entries from the history and forces a garbage collection cycle. - """ - def reset() do - each_pair(fn(key, _) -> - Process.delete(key) - end) + def nth(_, _), do: nil + + defp get_nth(q, 0), do: :queue.head(q) + defp get_nth(q, n) when n > 0, + do: get_nth(:queue.tail(q), n - 1) - counter = Process.get(:iex_history_counter) - Process.put(:iex_history_start_counter, counter) + # Traverses the queue front-to-back, dropping items as we go + # until its size is within the specified limit. + # + # The "start" value contains the index of the expression at the head + # of the queue. + def prune(%{start: start} = state, limit), + do: prune(state, start, limit, false) - collect_garbage() + defp prune(state, _, limit, _) when limit < 0, + do: {false, state} + + defp prune(%{size: size} = state, counter, limit, collect?) + when size - counter < limit, + do: {collect?, %{state | start: counter}} + + defp prune(%{queue: q} = state, counter, limit, collect?) do + {{:value, entry}, q} = :queue.out(q) + collect? = collect? || has_binary(entry) + prune(%{state | queue: q}, counter + 1, limit, collect?) end # Checks val and each of its elements (if it is a list or a tuple) - # recursively to see if it has any binaries + # recursively to see if it has any large binaries (outside of the heap). defp has_binary(val) do try do has_bin(val) catch - :throw, true -> true + :throw, :found -> true end end - # Worker function used by has_binary. Throws when the first binary of the - # minimum specified size is found - defp has_bin(val) when is_tuple(val) do - has_bin(val, tuple_size(val)-1) - end + defp has_bin(val) when is_tuple(val), + do: has_bin(val, tuple_size(val) - 1) - defp has_bin([h|t]) do - has_bin(h) - has_bin(t) + defp has_bin([head | tail]) do + has_bin(head) + has_bin(tail) end - defp has_bin(val) when byte_size(val) > 64 do - throw true - end + defp has_bin(val) when byte_size(val) > 64, + do: throw(:found) - defp has_bin(_) do - false - end + defp has_bin(_), do: false - defp has_bin(_, -1) do - false - end + defp has_bin(_, -1), do: false defp has_bin(tuple, index) do has_bin(elem(tuple, index)) - has_bin(tuple, index-1) + has_bin(tuple, index - 1) end +end - # Based on https://github.com/erlang/otp/blob/7dcccee4371477e983f026db9e243cb66900b1ef/lib/stdlib/src/shell.erl#L1401 - defp collect_garbage do - :erlang.garbage_collect(self()) - collect_garbage Process.whereis(:user) - collect_garbage Process.group_leader() - :erlang.garbage_collect() - end +defmodule IEx.History do + @moduledoc false - defp collect_garbage(process) do - try do - :erlang.garbage_collect(process) - catch - _, _ -> nil - end - end + alias IEx.History.State @doc """ - Enumerates over all items in the history starting from the oldest one and - applies `fun` to each one in turn. + Initializes IEx history state. """ - def each(fun) do - each_pair(fn _, item -> fun.(item) end) - end + def init(), do: %State{} - # Private helper that invokes fun with both key and item. - defp each_pair(fun) do - each_pair(Process.get(:iex_history_start_counter), - Process.get(:iex_history_counter), - fun) - end + @doc """ + Appends one entry to the history. + """ + def append(%State{} = state, entry, limit) do + {collect?, state} = + State.append(state, entry) + |> State.prune(limit) - defp each_pair(counter, max_counter, fun) when counter < max_counter do - key = {:iex_history, counter} - entry = Process.get(key) - fun.(key, entry) - each_pair(counter+1, max_counter, fun) + if collect?, do: collect_garbage() + state end - defp each_pair(_, _, _) do - :ok + @doc """ + Enumerates over all items in the history starting from the oldest one and + applies `fun` to each one in turn. + """ + def each(%State{} = state, fun) do + State.to_list(state) + |> Enum.each(fun) end @doc """ @@ -142,17 +114,25 @@ If `n` < 0, the count starts from the most recent item and goes back in time. """ - def nth(n) do - entry = case n do - x when x >= 0 -> - Process.get({:iex_history, n}) - x when x < 0 -> - counter = Process.get(:iex_history_counter) - Process.get({:iex_history, counter + n}) + def nth(%State{} = state, n) do + case State.nth(state, n) do + nil -> raise "v(#{n}) is out of bounds" + entry -> entry end - if nil?(entry) do - raise "v(#{n}) is out of bounds" + end + + # Based on https://github.com/erlang/otp/blob/7dcccee4371477e983f026db9e243cb66900b1ef/lib/stdlib/src/shell.erl#L1401 + defp collect_garbage() do + collect_proc_garbage Process.whereis(:user) + collect_proc_garbage Process.group_leader() + :erlang.garbage_collect() + end + + defp collect_proc_garbage(process) do + try do + :erlang.garbage_collect(process) + catch + _, _ -> nil end - entry end end diff --git a/lib/iex/lib/iex/info.ex b/lib/iex/lib/iex/info.ex new file mode 100644 index 00000000000..d5f2b28c71a --- /dev/null +++ b/lib/iex/lib/iex/info.ex @@ -0,0 +1,336 @@ +defprotocol IEx.Info do + @fallback_to_any true + + @spec info(term) :: [{atom, String.t}] + def info(term) +end + +defimpl IEx.Info, for: Tuple do + def info(_tuple) do + ["Data type": "Tuple", + "Reference modules": "Tuple"] + end +end + +defimpl IEx.Info, for: Atom do + def info(atom) do + specific_info = + cond do + Code.ensure_loaded?(atom) -> + info_module(atom) + match?("Elixir." <> _, Atom.to_string(atom)) -> + info_module_like_atom(atom) + true -> + info_atom(atom) + end + + description = + if atom == IEx.dont_display_result() do + ["Description": """ + This atom is returned by IEx when a function that should not print its + return value on screen is executed. + """] + else + [] + end + ["Data type": "Atom"] ++ description ++ specific_info + end + + defp info_module(mod) do + extra = + if Code.get_docs(mod, :moduledoc) do + "Use h(#{inspect mod}) to access its documentation.\n" + else + "" + end + + mod_info = mod.module_info() + generic_info = + ["Module bytecode": module_object_file(mod), + "Source": module_source_file(mod_info), + "Version": module_version(mod_info), + "Compile options": module_compile_options(mod_info), + "Description": "#{extra}Call #{inspect mod}.module_info() to access metadata."] + + specific_info = + if function_exported?(mod, :__protocol__, 1) do + impls = + mod + |> Protocol.extract_impls(:code.get_path()) + |> Enum.map_join(", ", &inspect/1) + ["Protocol": "This module is a protocol. These data structures implement it:\n #{impls}"] + else + [] + end + + generic_info ++ specific_info ++ + ["Raw representation": ":" <> inspect(Atom.to_string(mod)), + "Reference modules": "Module, Atom"] + end + + defp info_module_like_atom(atom) do + ["Raw representation": ":" <> inspect(Atom.to_string(atom)), + "Reference modules": "Atom"] + end + + defp info_atom(_atom) do + ["Reference modules": "Atom"] + end + + defp module_object_file(mod) do + default_or_apply :code.which(mod), fn + atom when is_atom(atom) -> inspect(atom) + path -> Path.relative_to_cwd(path) + end + end + + defp module_version(mod_info) do + default_or_apply(mod_info[:attributes][:vsn], &inspect/1) + end + + defp module_source_file(mod_info) do + default_or_apply(mod_info[:compile][:source], &Path.relative_to_cwd/1) + end + + defp module_compile_options(mod_info) do + default_or_apply(mod_info[:compile][:options], &inspect/1) + end + + defp default_or_apply(nil, _), do: "no value found" + defp default_or_apply(data, fun), do: fun.(data) +end + +defimpl IEx.Info, for: List do + def info(list) do + specific_info = + cond do + list == [] -> info_list(list) + Inspect.List.printable?(list) -> info_charlist(list) + Keyword.keyword?(list) -> info_kw_list(list) + true -> info_list(list) + end + + ["Data type": "List"] ++ specific_info + end + + defp info_charlist(charlist) do + desc = """ + This is a list of integers that is printed as a sequence of characters + delimited by single quotes because all the integers in it represent valid + ASCII characters. Conventionally, such lists of integers are referred to as + "charlists" (more precisely, a charlist is a list of Unicode codepoints, + and ASCII is a subset of Unicode). + """ + + ["Description": desc, + "Raw representation": inspect(charlist, charlists: :as_lists), + "Reference modules": "List"] + end + + defp info_kw_list(_kw_list) do + desc = """ + This is what is referred to as a "keyword list". A keyword list is a list + of two-element tuples where the first element of each tuple is an atom. + """ + + ["Description": desc, + "Reference modules": "Keyword, List"] + end + + defp info_list(_list) do + ["Reference modules": "List"] + end +end + +defimpl IEx.Info, for: BitString do + def info(bitstring) do + specific_info = + cond do + is_binary(bitstring) and String.printable?(bitstring) -> info_string(bitstring) + is_binary(bitstring) and String.valid?(bitstring) -> info_non_printable_string(bitstring) + is_binary(bitstring) -> info_binary(bitstring) + is_bitstring(bitstring) -> info_bitstring(bitstring) + end + + ["Data type": "BitString"] ++ specific_info + end + + defp info_string(bitstring) do + desc = """ + This is a string: a UTF-8 encoded binary. It's printed surrounded by + "double quotes" because all UTF-8 encoded codepoints in it are printable. + """ + ["Byte size": byte_size(bitstring), + "Description": desc, + "Raw representation": inspect(bitstring, binaries: :as_binaries), + "Reference modules": "String, :binary"] + end + + defp info_non_printable_string(bitstring) do + first_non_printable = + bitstring + |> String.codepoints() + |> Enum.find(fn cp -> not String.printable?(cp) end) + + desc = """ + This is a string: a UTF-8 encoded binary. It's printed with the `<<>>` + syntax (as opposed to double quotes) because it contains non-printable + UTF-8 encoded codepoints (the first non-printable codepoint being `#{inspect first_non_printable}`) + """ + ["Byte size": byte_size(bitstring), + "Description": desc, + "Reference modules": "String, :binary"] + end + + defp info_binary(bitstring) do + first_non_valid = + bitstring + |> String.codepoints() + |> Enum.find(fn cp -> not String.valid?(cp) end) + + desc = """ + This is a binary: a collection of bytes. It's printed with the `<<>>` + syntax (as opposed to double quotes) because it is not a + UTF-8 encoded binary (the first invalid byte being `#{inspect first_non_valid}`) + """ + + ["Byte size": byte_size(bitstring), + "Description": desc, + "Reference modules": ":binary"] + end + + defp info_bitstring(bitstring) do + desc = """ + This is a bitstring. It's a chunk of bits that are not divisible by 8 + (the number of bytes isn't whole). + """ + + ["Bits size": bit_size(bitstring), + "Description": desc] + end +end + +defimpl IEx.Info, for: Integer do + def info(_) do + ["Data type": "Integer", + "Reference modules": "Integer"] + end +end + +defimpl IEx.Info, for: Float do + def info(_) do + ["Data type": "Float", + "Reference modules": "Float"] + end +end + +defimpl IEx.Info, for: Function do + def info(fun) do + fun_info = :erlang.fun_info(fun) + + specific_info = + if fun_info[:type] == :external and fun_info[:env] == [] do + info_named_fun(fun_info) + else + info_anon_fun(fun_info) + end + + ["Data type": "Function"] ++ specific_info + end + + defp info_anon_fun(fun_info) do + ["Type": to_string(fun_info[:type]), + "Arity": fun_info[:arity], + "Description": "This is an anonymous function."] + end + + defp info_named_fun(fun_info) do + ["Type": to_string(fun_info[:type]), + "Arity": fun_info[:arity]] + end +end + +defimpl IEx.Info, for: PID do + @keys [:registered_name, :links, :message_queue_len] + + def info(pid) do + extra = + case :rpc.pinfo(pid, @keys) do + [_ | _] = info -> + ["Alive": true, + "Name": process_name(info[:registered_name]), + "Links": links(info[:links]), + "Message queue length": info[:message_queue_len]] + _ -> + ["Alive": false] + end + + ["Data type": "PID"] ++ extra ++ + ["Description": "Use Process.info/1 to get more info about this process", + "Reference modules": "Process, Node"] + end + + defp process_name([]), do: "not registered" + defp process_name(name), do: inspect(name) + + defp links([]), do: "none" + defp links(links), do: Enum.map_join(links, ", ", &inspect/1) +end + +defimpl IEx.Info, for: Map do + def info(_) do + ["Data type": "Map", + "Reference modules": "Map"] + end +end + +defimpl IEx.Info, for: Port do + def info(port) do + connected = :rpc.call(node(port), :erlang, :port_info, [port, :connected]) + + ["Data type": "Port", + "Open": match?({:connected, _}, connected), + "Reference modules": "Port"] + end +end + +defimpl IEx.Info, for: Reference do + def info(_) do + ["Data type": "Reference"] + end +end + +defimpl IEx.Info, for: [Date, Time, NaiveDateTime] do + {sigil, repr} = + case @for do + Date -> {"D", "date"} + Time -> {"T", "time"} + NaiveDateTime -> {"N", ~S{"naive" datetime (that is, a datetime without a timezone)}} + end + + def info(value) do + desc = """ + This is a struct representing a #{unquote(repr)}. It is commonly represented + using the `~#{unquote(sigil)}` sigil syntax, that is defined in the `Kernel.sigil_#{unquote(sigil)}/2` macro. + """ + ["Data type": inspect(@for), + "Description": desc, + "Raw representation": raw_inspect(value), + "Reference modules": inspect(@for) <> ", Calendar, Map"] + end + + defp raw_inspect(value) do + value + |> Inspect.Any.inspect(%Inspect.Opts{}) + |> Inspect.Algebra.format(:infinity) + |> IO.iodata_to_binary + end +end + +defimpl IEx.Info, for: Any do + def info(%{__struct__: mod}) do + ["Data type": inspect(mod), + "Description": "This is a struct. Structs are maps with a __struct__ key.", + "Reference modules": inspect(mod) <> ", Map"] + end +end diff --git a/lib/iex/lib/iex/introspection.ex b/lib/iex/lib/iex/introspection.ex index c5653b39ac2..4ab80b2fe02 100644 --- a/lib/iex/lib/iex/introspection.ex +++ b/lib/iex/lib/iex/introspection.ex @@ -5,9 +5,10 @@ defmodule IEx.Introspection do import IEx, only: [dont_display_result: 0] + alias Kernel.Typespec + @doc """ - Documentation for modules. - It has a fallback clauses + Prints the documentation for the given module. """ def h(module) when is_atom(module) do case Code.ensure_loaded(module) do @@ -15,46 +16,39 @@ defmodule IEx.Introspection do if function_exported?(module, :__info__, 1) do case Code.get_docs(module, :moduledoc) do {_, binary} when is_binary(binary) -> - if opts = ansi_docs() do - IO.ANSI.Docs.print_heading(inspect(module), opts) - IO.ANSI.Docs.print(binary, opts) - else - IO.puts "* #{inspect(module)}\n" - IO.puts binary - end + print_doc(inspect(module), binary) {_, _} -> - nodocs(inspect module) + no_docs(inspect module) _ -> - IO.puts IEx.color(:eval_error, "#{inspect module} was not compiled with docs") + puts_error("#{inspect module} was not compiled with docs") end else - IO.puts IEx.color(:eval_error, "#{inspect module} is an Erlang module and, as such, it does not have Elixir-style docs") + puts_error("#{inspect module} is an Erlang module and, as such, it does not have Elixir-style docs") end {:error, reason} -> - IO.puts IEx.color(:eval_error, "Could not load module #{inspect module}, got: #{reason}") + puts_error("Could not load module #{inspect module}, got: #{reason}") end - dont_display_result + dont_display_result() end def h(_) do - IO.puts IEx.color(:eval_error, "Invalid arguments for h helper") - dont_display_result + puts_error("Invalid arguments for h helper") + dont_display_result() end @doc """ - Docs for the given function, with any arity, in any of the modules. + Prints the documentation for the given function + with any arity in the list of modules. """ def h(modules, function) when is_list(modules) and is_atom(function) do - result = - Enum.reduce modules, :not_found, fn - module, :not_found -> h_mod_fun(module, function) - _module, acc -> acc - end + printed? = + Enum.any?(modules, fn module -> + h_mod_fun(module, function) == :ok + end) - unless result == :ok, do: - nodocs(function) + unless printed?, do: no_docs(function) - dont_display_result + dont_display_result() end def h(module, function) when is_atom(module) and is_atom(function) do @@ -62,19 +56,18 @@ defmodule IEx.Introspection do :ok -> :ok :no_docs -> - IO.puts IEx.color(:eval_error, "#{inspect module} was not compiled with docs") + puts_error("#{inspect module} was not compiled with docs") :not_found -> - nodocs("#{inspect module}.#{function}") + no_docs("#{inspect module}.#{function}") end - dont_display_result + dont_display_result() end - defp h_mod_fun(mod, fun) when is_atom(mod) and is_atom(fun) do + defp h_mod_fun(mod, fun) when is_atom(mod) do if docs = Code.get_docs(mod, :docs) do - result = for {{f, arity}, _line, _type, _args, doc} <- docs, fun == f, doc != false do + result = for {{^fun, arity}, _, _, _, _} = doc <- docs, has_content?(doc) do h(mod, fun, arity) - IO.puts "" end if result != [], do: :ok, else: :not_found @@ -84,19 +77,18 @@ defmodule IEx.Introspection do end @doc """ - Documentation for the given function and arity in the list of modules. + Prints the documentation for the given function + and arity in the list of modules. """ def h(modules, function, arity) when is_list(modules) and is_atom(function) and is_integer(arity) do - result = - Enum.reduce modules, :not_found, fn - module, :not_found -> h_mod_fun_arity(module, function, arity) - _module, acc -> acc - end + printed? = + Enum.any?(modules, fn module -> + h_mod_fun_arity(module, function, arity) == :ok + end) - unless result == :ok, do: - nodocs("#{function}/#{arity}") + unless printed?, do: no_docs("#{function}/#{arity}") - dont_display_result + dont_display_result() end def h(module, function, arity) when is_atom(module) and is_atom(function) and is_integer(arity) do @@ -104,25 +96,23 @@ defmodule IEx.Introspection do :ok -> :ok :no_docs -> - IO.puts IEx.color(:eval_error, "#{inspect module} was not compiled with docs") + puts_error("#{inspect module} was not compiled with docs") :not_found -> - nodocs("#{inspect module}.#{function}/#{arity}") + no_docs("#{inspect module}.#{function}/#{arity}") end - dont_display_result + dont_display_result() end - defp h_mod_fun_arity(mod, fun, arity) when is_atom(mod) and is_atom(fun) and is_integer(arity) do + defp h_mod_fun_arity(mod, fun, arity) when is_atom(mod) do if docs = Code.get_docs(mod, :docs) do - doc = - cond do - d = find_doc(docs, fun, arity) -> d - d = find_default_doc(docs, fun, arity) -> d - true -> nil + if doc = find_doc(docs, fun, arity) do + if callback_module = is_nil(elem(doc, 4)) and callback_module(mod, fun, arity) do + filter = &match?({^fun, ^arity}, elem(&1, 0)) + print_callback_docs(callback_module, filter, &print_doc/2) + else + print_doc(doc) end - - if doc do - print_doc(doc) :ok else :not_found @@ -132,33 +122,47 @@ defmodule IEx.Introspection do end end - defp find_doc(docs, function, arity) do - if doc = List.keyfind(docs, {function, arity}, 0) do - case elem(doc, 4) do - false -> nil - _ -> doc - end - end + defp find_doc(docs, fun, arity) do + doc = List.keyfind(docs, {fun, arity}, 0) || find_doc_defaults(docs, fun, arity) + if doc != nil and has_content?(doc), do: doc end - defp find_default_doc(docs, function, min) do - Enum.find docs, fn(doc) -> + defp find_doc_defaults(docs, function, min) do + Enum.find(docs, fn doc -> case elem(doc, 0) do - {^function, max} when max > min -> - defaults = Enum.count elem(doc, 3), &match?({:\\, _, _}, &1) - min + defaults >= max + {^function, arity} when arity > min -> + defaults = Enum.count(elem(doc, 3), &match?({:\\, _, _}, &1)) + arity <= (min + defaults) _ -> false end - end + end) + end + + defp has_content?({_, _, _, _, false}), + do: false + defp has_content?({{name, _}, _, _, _, nil}), + do: hd(Atom.to_charlist(name)) != ?_ + defp has_content?({_, _, _, _, _}), + do: true + + defp callback_module(mod, fun, arity) do + predicate = &match?({{^fun, ^arity}, _}, &1) + mod.module_info(:attributes) + |> Keyword.get_values(:behaviour) + |> Stream.concat() + |> Enum.find(&Enum.any?(Typespec.beam_callbacks(&1), predicate)) end defp print_doc({{fun, _}, _line, kind, args, doc}) do - args = Enum.map_join(args, ", ", &print_doc_arg(&1)) - heading = "#{kind} #{fun}(#{args})" - doc = doc || "" + args = Enum.map_join(args, ", ", &format_doc_arg(&1)) - if opts = ansi_docs() do + print_doc("#{kind} #{fun}(#{args})", doc) + end + + defp print_doc(heading, doc) do + doc = doc || "" + if opts = IEx.Config.ansi_docs do IO.ANSI.Docs.print_heading(heading, opts) IO.ANSI.Docs.print(doc, opts) else @@ -167,140 +171,236 @@ defmodule IEx.Introspection do end end - defp print_doc_arg({:\\, _, [left, right]}) do - print_doc_arg(left) <> " \\\\ " <> Macro.to_string(right) + defp format_doc_arg({:\\, _, [left, right]}) do + format_doc_arg(left) <> " \\\\ " <> Macro.to_string(right) end - defp print_doc_arg({var, _, _}) do + defp format_doc_arg({var, _, _}) do Atom.to_string(var) end - defp ansi_docs() do - opts = Application.get_env(:iex, :colors) - if opts[:enabled] do - [width: IEx.width] ++ opts + @doc """ + Prints the list of behaviour callbacks for the given module. + """ + def b(mod) when is_atom(mod) do + printer = fn heading, _doc -> puts_info(heading) end + case print_callback_docs(mod, fn _ -> true end, printer) do + :ok -> :ok + :no_beam -> no_beam(mod) + :no_docs -> puts_error("#{inspect mod} was not compiled with docs") + :not_found -> puts_error("No callbacks for #{inspect mod} were found") + end + + dont_display_result() + end + + @doc """ + Prints documentation for the given callback function with any arity. + """ + def b(mod, fun) when is_atom(mod) and is_atom(fun) do + filter = &match?({^fun, _}, elem(&1, 0)) + case print_callback_docs(mod, filter, &print_doc/2) do + :ok -> :ok + :no_beam -> no_beam(mod) + :no_docs -> puts_error("#{inspect mod} was not compiled with docs") + :not_found -> no_docs("#{inspect mod}.#{fun}") end + + dont_display_result() end @doc """ - Print types in module. + Prints documentation for the given callback function and arity. + """ + def b(mod, fun, arity) when is_atom(mod) and is_atom(fun) and is_integer(arity) do + filter = &match?({^fun, ^arity}, elem(&1, 0)) + case print_callback_docs(mod, filter, &print_doc/2) do + :ok -> :ok + :no_beam -> no_beam(mod) + :no_docs -> puts_error("#{inspect mod} was not compiled with docs") + :not_found -> no_docs("#{inspect mod}.#{fun}/#{arity}") + end + + dont_display_result() + end + + defp print_callback_docs(mod, filter, printer) do + case get_callback_docs(mod) do + {callbacks, docs} -> + docs + |> Enum.filter(filter) + |> Enum.map(fn + {{fun, arity}, _, :macrocallback, doc} -> + print_callback_doc(fun, :macrocallback, doc, {:"MACRO-#{fun}", arity + 1}, callbacks, printer) + {{fun, arity}, _, kind, doc} -> + print_callback_doc(fun, kind, doc, {fun, arity}, callbacks, printer) + end) + |> case do + [] -> :not_found + _ -> :ok + end + + other -> other + end + end + + defp get_callback_docs(mod) do + callbacks = Typespec.beam_callbacks(mod) + docs = Code.get_docs(mod, :callback_docs) + + cond do + is_nil(callbacks) -> :no_beam + is_nil(docs) -> :no_docs + true -> {callbacks, docs} + end + end + + defp print_callback_doc(name, kind, doc, key, callbacks, printer) do + {_, [spec | _]} = List.keyfind(callbacks, key, 0) + + definition = + Typespec.spec_to_ast(name, spec) + |> Macro.prewalk(&drop_macro_env/1) + |> Macro.to_string + + printer.("@#{kind} #{definition}", doc) + end + + defp drop_macro_env({name, meta, [{:::, _, [_, {{:., _, [Macro.Env, :t]}, _, _}]} | args]}), + do: {name, meta, args} + + defp drop_macro_env(other), + do: other + + @doc """ + Prints the types for the given module. """ def t(module) when is_atom(module) do - case Kernel.Typespec.beam_types(module) do - nil -> nobeam(module) - [] -> notypes(inspect module) - types -> for type <- types, do: print_type(type) + _ = case Typespec.beam_types(module) do + nil -> no_beam(module) + [] -> no_types(inspect module) + types -> Enum.each(types, &print_type/1) end - dont_display_result + dont_display_result() end @doc """ - Print the given type in module with any arity. + Prints the given type in module with any arity. """ def t(module, type) when is_atom(module) and is_atom(type) do - case Kernel.Typespec.beam_types(module) do - nil -> nobeam(module) + case Typespec.beam_types(module) do + nil -> no_beam(module) types -> printed = - for {_, {t, _, _args}} = typespec <- types, t == type do + for {_, {^type, _, _args}} = typespec <- types do + print_type_doc(module, type) print_type(typespec) typespec end if printed == [] do - notypes("#{inspect module}.#{type}") + no_types("#{inspect module}.#{type}") end end - dont_display_result + dont_display_result() end @doc """ - Print type in module with given arity. + Prints the type in module with given arity. """ def t(module, type, arity) when is_atom(module) and is_atom(type) and is_integer(arity) do - case Kernel.Typespec.beam_types(module) do - nil -> nobeam(module) + case Typespec.beam_types(module) do + nil -> no_beam(module) types -> printed = - for {_, {t, _, args}} = typespec <- types, t == type, length(args) == arity do + for {_, {^type, _, args}} = typespec <- types, length(args) == arity do + print_type_doc(module, type) print_type(typespec) typespec end if printed == [] do - notypes("#{inspect module}.#{type}") + no_types("#{inspect module}.#{type}") end end - dont_display_result + dont_display_result() + end + + defp print_type_doc(module, type) do + docs = Code.get_docs(module, :type_docs) + {_, _, _, content} = Enum.find(docs, fn({{name, _}, _, _, _}) -> + type == name + end) + if content, do: puts_info(content) end @doc """ - Print specs for given module. + Prints the specs for given module. """ def s(module) when is_atom(module) do case beam_specs(module) do - nil -> nobeam(module) - [] -> nospecs(inspect module) + nil -> no_beam(module) + [] -> no_specs(inspect module) specs -> - printed = for {_kind, {{f, _arity}, _spec}} = spec <- specs, f != :"__info__" do + printed = for {_kind, {{fun, _arity}, _spec}} = spec <- specs, fun != :__info__ do print_spec(spec) end if printed == [] do - nospecs(inspect module) + no_specs(inspect module) end end - dont_display_result + dont_display_result() end @doc """ - Print specs for given module and function. + Prints the specs for given module and function. """ def s(module, function) when is_atom(module) and is_atom(function) do case beam_specs(module) do - nil -> nobeam(module) + nil -> no_beam(module) specs -> printed = - for {_kind, {{f, _arity}, _spec}} = spec <- specs, f == function do + for {_kind, {{^function, _arity}, _spec}} = spec <- specs do print_spec(spec) spec end if printed == [] do - nospecs("#{inspect module}.#{function}") + no_specs("#{inspect module}.#{function}") end end - dont_display_result + dont_display_result() end @doc """ - Print spec in given module, with arity. + Prints the spec in given module, with arity. """ def s(module, function, arity) when is_atom(module) and is_atom(function) and is_integer(arity) do case beam_specs(module) do - nil -> nobeam(module) + nil -> no_beam(module) specs -> printed = - for {_kind, {{f, a}, _spec}} = spec <- specs, f == function and a == arity do + for {_kind, {{^function, ^arity}, _spec}} = spec <- specs do print_spec(spec) spec end if printed == [] do - nospecs("#{inspect module}.#{function}") + no_specs("#{inspect module}.#{function}") end end - dont_display_result + dont_display_result() end defp beam_specs(module) do - specs = beam_specs_tag(Kernel.Typespec.beam_specs(module), :spec) - callbacks = beam_specs_tag(Kernel.Typespec.beam_callbacks(module), :callback) + specs = beam_specs_tag(Typespec.beam_specs(module), :spec) + callbacks = beam_specs_tag(Typespec.beam_callbacks(module), :callback) specs && callbacks && Enum.concat(specs, callbacks) end @@ -309,34 +409,48 @@ defmodule IEx.Introspection do Enum.map(specs, &{tag, &1}) end + defp print_type({:opaque, type}) do + {:::, _, [ast, _]} = Typespec.type_to_ast(type) + puts_info("@opaque #{Macro.to_string(ast)}") + true + end + defp print_type({kind, type}) do - ast = Kernel.Typespec.type_to_ast(type) - IO.puts IEx.color(:eval_info, "@#{kind} #{Macro.to_string(ast)}") + ast = Typespec.type_to_ast(type) + puts_info("@#{kind} #{Macro.to_string(ast)}") true end defp print_spec({kind, {{name, _arity}, specs}}) do Enum.each specs, fn(spec) -> - binary = Macro.to_string Kernel.Typespec.spec_to_ast(name, spec) - IO.puts IEx.color(:eval_info, "@#{kind} #{binary}") + binary = Macro.to_string Typespec.spec_to_ast(name, spec) + puts_info("@#{kind} #{binary}") end true end - defp nobeam(module) do + defp no_beam(module) do case Code.ensure_loaded(module) do {:module, _} -> - IO.puts IEx.color(:eval_error, "Beam code not available for #{inspect module} or debug info is missing, cannot load typespecs") + puts_error("Beam code not available for #{inspect module} or debug info is missing, cannot load typespecs") {:error, reason} -> - IO.puts IEx.color(:eval_error, "Could not load module #{inspect module}, got: #{reason}") + puts_error("Could not load module #{inspect module}, got: #{reason}") end end - defp nospecs(for), do: no(for, "specification") - defp notypes(for), do: no(for, "type information") - defp nodocs(for), do: no(for, "documentation") + defp no_specs(for), do: no(for, "specification") + defp no_types(for), do: no(for, "type information") + defp no_docs(for), do: no(for, "documentation") defp no(for, type) do - IO.puts IEx.color(:eval_error, "No #{type} for #{for} was found") + puts_error("No #{type} for #{for} was found") + end + + defp puts_info(string) do + IO.puts IEx.color(:eval_info, string) + end + + defp puts_error(string) do + IO.puts IEx.color(:eval_error, string) end end diff --git a/lib/iex/lib/iex/server.ex b/lib/iex/lib/iex/server.ex index 071a0e61820..8ccc15f51ff 100644 --- a/lib/iex/lib/iex/server.ex +++ b/lib/iex/lib/iex/server.ex @@ -1,16 +1,28 @@ -defmodule IEx.Config do +defmodule IEx.State do @moduledoc false - defstruct binding: nil, cache: '', counter: 1, prefix: "iex", scope: nil, env: nil + defstruct cache: '', counter: 1, prefix: "iex" + @type t :: %__MODULE__{} end defmodule IEx.Server do @moduledoc false @doc """ - Finds where the current IEx server is located. + Finds the IEx server, on this or another node. """ @spec whereis :: pid | nil def whereis() do + Enum.find_value([node() | Node.list], fn node -> + server = :rpc.call(node, IEx.Server, :local, []) + if is_pid(server), do: server + end) + end + + @doc """ + Returns the PID of the IEx server on the local node if exists. + """ + @spec local :: pid | nil + def local() do # Locate top group leader, always registered as user # can be implemented by group (normally) or user # (if oldshell or noshell) @@ -30,6 +42,19 @@ defmodule IEx.Server do end end + @doc """ + Returns the PID of the IEx evaluator process if it exists. + """ + @spec evaluator :: pid | nil + def evaluator() do + case IEx.Server.local do + nil -> nil + pid -> + {:dictionary, dictionary} = Process.info(pid, :dictionary) + dictionary[:evaluator] + end + end + @doc """ Requests to take over the given shell from the current process. @@ -38,22 +63,22 @@ defmodule IEx.Server do :ok | {:error, :no_iex} | {:error, :refused} def take_over(identifier, opts, timeout \\ 1000, server \\ whereis()) do cond do - nil?(server) -> + is_nil(server) -> {:error, :no_iex} true -> ref = make_ref() - send server, {:take?, self, ref} + send server, {:take?, self(), ref} receive do ^ref -> - opts = [evaluator: self] ++ opts - send server, {:take, self, identifier, ref, opts} + opts = [evaluator: self()] ++ opts + send server, {:take, self(), identifier, ref, opts} receive do {^ref, nil} -> {:error, :refused} {^ref, leader} -> - IEx.Evaluator.start(server, leader) + IEx.Evaluator.init(:no_ack, server, leader, opts) end after timeout -> @@ -76,8 +101,9 @@ defmodule IEx.Server do we spawn a new server for it without waiting for its conclusion. """ - @spec start(list, mfa) :: :ok + @spec start(list, {module, atom, [any]}) :: :ok def start(opts, {m, f, a}) do + Process.flag(:trap_exit, true) {pid, ref} = spawn_monitor(m, f, a) start_loop(opts, pid, ref) end @@ -97,10 +123,10 @@ defmodule IEx.Server do start_loop(opts, pid, ref) end - {:DOWN, ^ref, :process, ^pid, :normal} -> + {:DOWN, ^ref, :process, ^pid, :normal} -> run(opts) - {:DOWN, ^ref, :process, ^pid, _reason} -> + {:DOWN, ^ref, :process, ^pid, _reason} -> :ok end end @@ -110,11 +136,21 @@ defmodule IEx.Server do defp run(opts) when is_list(opts) do IO.puts "Interactive Elixir (#{System.version}) - press Ctrl+C to exit (type h() ENTER for help)" - self_pid = self + evaluator = start_evaluator(opts) + loop(run_state(opts), evaluator, Process.monitor(evaluator)) + end + + @doc """ + Starts an evaluator using the provided options. + """ + @spec start_evaluator(Keyword.t) :: pid + def start_evaluator(opts) do + self_pid = self() self_leader = Process.group_leader - evaluator = opts[:evaluator] || spawn(fn -> IEx.Evaluator.start(self_pid, self_leader) end) + evaluator = opts[:evaluator] || + :proc_lib.start(IEx.Evaluator, :init, [:ack, self_pid, self_leader, opts]) Process.put(:evaluator, evaluator) - loop(run_config(opts), evaluator, Process.monitor(evaluator)) + evaluator end defp reset_loop(opts, evaluator, evaluator_ref) do @@ -130,47 +166,54 @@ defmodule IEx.Server do Process.delete(:evaluator) Process.demonitor(evaluator_ref, [:flush]) if done? do - send(evaluator, {:done, self}) + send(evaluator, {:done, self()}) end :ok end - defp loop(config, evaluator, evaluator_ref) do + defp loop(state, evaluator, evaluator_ref) do self_pid = self() - counter = config.counter - prefix = if config.cache != [], do: "...", else: config.prefix + counter = state.counter + prefix = if state.cache != [], do: "...", else: state.prefix input = spawn(fn -> io_get(self_pid, prefix, counter) end) - wait_input(config, evaluator, evaluator_ref, input) + wait_input(state, evaluator, evaluator_ref, input) end - defp wait_input(config, evaluator, evaluator_ref, input) do + defp wait_input(state, evaluator, evaluator_ref, input) do receive do {:input, ^input, code} when is_binary(code) -> - send evaluator, {:eval, self, code, config} - wait_eval(evaluator, evaluator_ref) + send evaluator, {:eval, self(), code, state} + wait_eval(state, evaluator, evaluator_ref) {:input, ^input, {:error, :interrupted}} -> io_error "** (EXIT) interrupted" - loop(%{config | cache: ''}, evaluator, evaluator_ref) + loop(%{state | cache: ''}, evaluator, evaluator_ref) {:input, ^input, :eof} -> exit_loop(evaluator, evaluator_ref) {:input, ^input, {:error, :terminated}} -> exit_loop(evaluator, evaluator_ref) - msg -> handle_take_over(msg, evaluator, evaluator_ref, input, fn -> - wait_input(config, evaluator, evaluator_ref, input) + wait_input(state, evaluator, evaluator_ref, input) end) end end - defp wait_eval(evaluator, evaluator_ref) do + defp wait_eval(state, evaluator, evaluator_ref) do receive do - {:evaled, ^evaluator, config} -> - loop(config, evaluator, evaluator_ref) + {:evaled, ^evaluator, new_state} -> + loop(new_state, evaluator, evaluator_ref) + {:EXIT, _pid, :interrupt} -> + # User did ^G while the evaluator was busy or stuck + io_error "** (EXIT) interrupted" + Process.delete(:evaluator) + Process.exit(evaluator, :kill) + Process.demonitor(evaluator_ref, [:flush]) + evaluator = start_evaluator([]) + loop(%{state | cache: ''}, evaluator, Process.monitor(evaluator)) msg -> handle_take_over(msg, evaluator, evaluator_ref, nil, - fn -> wait_eval(evaluator, evaluator_ref) end) + fn -> wait_eval(state, evaluator, evaluator_ref) end) end end @@ -182,8 +225,7 @@ defmodule IEx.Server do # re-runs the server OR goes back to the main loop. # # A take process may also happen if the evaluator dies, - # then a new evaluator is created to tackle replace the dead - # one. + # then a new evaluator is created to replace the dead one. defp handle_take_over({:take?, other, ref}, _evaluator, _evaluator_ref, _input, callback) do send(other, ref) callback.() @@ -206,7 +248,7 @@ defmodule IEx.Server do reset_loop([], evaluator, evaluator_ref) end - defp handle_take_over({:DOWN, evaluator_ref, :process, evaluator, reason}, + defp handle_take_over({:DOWN, evaluator_ref, :process, evaluator, reason}, evaluator, evaluator_ref, input, _callback) do try do io_error Exception.format_banner({:EXIT, evaluator}, reason) @@ -226,53 +268,41 @@ defmodule IEx.Server do defp kill_input(input), do: Process.exit(input, :kill) defp allow_take?(identifier) do - message = IEx.color(:eval_interrupt, "#{identifier}. Allow? [Yn] ") - IO.gets(:stdio, message) =~ ~r/^(Y(es)?)?$/i + message = IEx.color(:eval_interrupt, "#{identifier}\nAllow? [Yn] ") + yes?(IO.gets(:stdio, message)) end - ## Config - - defp run_config(opts) do - locals = Keyword.get(opts, :delegate_locals_to, IEx.Helpers) - - env = - if env = opts[:env] do - :elixir.env_for_eval(env, delegate_locals_to: locals) - else - :elixir.env_for_eval(file: "iex", delegate_locals_to: locals) - end + defp yes?(string) do + is_binary(string) and String.trim(string) in ["", "y", "Y", "yes", "YES", "Yes"] + end - {_, _, env, scope} = :elixir.eval('require IEx.Helpers', [], env) + ## State - binding = Keyword.get(opts, :binding, []) - prefix = Keyword.get(opts, :prefix, "iex") - config = %IEx.Config{binding: binding, scope: scope, prefix: prefix, env: env} + defp run_state(opts) do + prefix = Keyword.get(opts, :prefix, "iex") - case opts[:dot_iex_path] do - "" -> config - path -> IEx.Evaluator.load_dot_iex(config, path) - end + %IEx.State{prefix: prefix} end ## IO defp io_get(pid, prefix, counter) do prompt = prompt(prefix, counter) - send pid, {:input, self, IO.gets(:stdio, prompt)} + send pid, {:input, self(), IO.gets(:stdio, prompt)} end defp prompt(prefix, counter) do {mode, prefix} = if Node.alive? do - {:alive, prefix || remote_prefix} + {:alive_prompt, prefix || remote_prefix()} else - {:default, prefix || "iex"} + {:default_prompt, prefix || "iex"} end - prompt = Application.get_env(:iex, :"#{mode}_prompt") + prompt = apply(IEx.Config, mode, []) |> String.replace("%counter", to_string(counter)) |> String.replace("%prefix", to_string(prefix)) - |> String.replace("%node", to_string(node)) + |> String.replace("%node", to_string(node())) prompt <> " " end @@ -282,6 +312,6 @@ defmodule IEx.Server do end defp remote_prefix do - if node == node(Process.group_leader), do: "iex", else: "rem" + if node() == node(Process.group_leader), do: "iex", else: "rem" end end diff --git a/lib/iex/mix.exs b/lib/iex/mix.exs index 456337be90e..b050f2c6999 100644 --- a/lib/iex/mix.exs +++ b/lib/iex/mix.exs @@ -8,12 +8,13 @@ defmodule IEx.Mixfile do end def application do - [env: [ - after_spawn: [], - colors: [], - inspect: [], - history_size: 20, - default_prompt: "%prefix(%counter)>", - alive_prompt: "%prefix(%node)%counter>"]] + [registered: [IEx.Supervisor, IEx.Config], + mod: {IEx.App, []}, + env: [ + colors: [], + inspect: [pretty: true], + history_size: 20, + default_prompt: "%prefix(%counter)>", + alive_prompt: "%prefix(%node)%counter>"]] end end diff --git a/lib/iex/test/iex/autocomplete_test.exs b/lib/iex/test/iex/autocomplete_test.exs index ad4f30f6b78..6522969780d 100644 --- a/lib/iex/test/iex/autocomplete_test.exs +++ b/lib/iex/test/iex/autocomplete_test.exs @@ -3,96 +3,343 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule IEx.AutocompleteTest do use ExUnit.Case, async: true - def expand(expr) do - IEx.Autocomplete.expand(Enum.reverse expr) + setup do + evaluator = IEx.Server.start_evaluator([]) + Process.put(:evaluator, evaluator) + :ok end - test :erlang_module_simple_completion do - assert expand(':z') == {:yes, 'lib.', []} + defmodule MyServer do + def evaluator do + Process.get(:evaluator) + end end - test :erlang_module_no_completion do - assert expand(':x') == {:no, '', []} - assert expand('x.Foo') == {:no, '', []} + defp eval(line) do + ExUnit.CaptureIO.capture_io(fn -> + evaluator = MyServer.evaluator + Process.group_leader(evaluator, Process.group_leader) + send evaluator, {:eval, self(), line <> "\n", %IEx.State{}} + assert_receive {:evaled, _, _} + end) + end + + defp expand(expr) do + IEx.Autocomplete.expand(Enum.reverse(expr), MyServer) + end + + test "Erlang module completion" do + assert expand(':zl') == {:yes, 'ib', []} end - test :erlang_module_common_prefix_completion do - assert expand(':us') == {:yes, 'er', []} + test "Erlang module no completion" do + assert expand(':unknown') == {:no, '', []} + assert expand('Enum:') == {:no, '', []} end - test :erlang_module_multiple_values_completion do + test "Erlang module multiple values completion" do {:yes, '', list} = expand(':user') - assert length(list) > 1 + assert 'user' in list + assert 'user_drv' in list + end + + test "Erlang root completion" do + {:yes, '', list} = expand(':') + assert is_list(list) + assert 'lists' in list end - test :elixir_simple_completion do + test "Elixir proxy" do + {:yes, '', list} = expand('E') + assert 'Elixir' in list + end + + test "Elixir completion" do assert expand('En') == {:yes, 'um', []} - assert expand('Enumera') == {:yes, 'ble.', []} + assert expand('Enumera') == {:yes, 'ble', []} end - test :elixir_auto_completion_with_self do + test "Elixir completion with self" do assert expand('Enumerable') == {:yes, '.', []} end - test :elixir_no_completion do + test "Elixir completion on modules from load path" do + assert expand('Str') == {:yes, [], ['Stream', 'String', 'StringIO']} + assert expand('Ma') == {:yes, '', ['Macro', 'Map', 'MapSet', 'MatchError']} + assert expand('Dic') == {:yes, 't', []} + assert expand('Ex') == {:yes, [], ['ExUnit', 'Exception']} + end + + test "Elixir no completion for underscored functions with no doc" do + {:module, _, bytecode, _} = + defmodule Elixir.Sample do + def __foo__(), do: 0 + @doc "Bar doc" + def __bar__(), do: 1 + end + File.write!("Elixir.Sample.beam", bytecode) + assert Code.get_docs(Sample, :docs) + assert expand('Sample._') == {:yes, '_bar__', []} + after + File.rm("Elixir.Sample.beam") + :code.purge(Sample) + :code.delete(Sample) + end + + test "Elixir no completion for default argument functions with doc set to false" do + {:yes, '', available} = expand('String.') + refute Enum.member?(available, 'rjust/2') + assert Enum.member?(available, 'replace/3') + + assert expand('String.r') == {:yes, 'e', []} + + {:module, _, bytecode, _} = + defmodule Elixir.DefaultArgumentFunctions do + def foo(a \\ :a, b, c \\ :c), + do: {a, b, c} + + def _do_fizz(a \\ :a, b, c \\ :c), + do: {a, b, c} + + @doc false + def __fizz__(a \\ :a, b, c \\ :c), + do: {a, b, c} + + @doc "bar/0 doc" + def bar(), + do: :bar + @doc false + def bar(a \\ :a, b, c \\ :c, d \\ :d), + do: {a, b, c, d} + @doc false + def bar(a, b, c, d, e), + do: {a, b, c, d, e} + + @doc false + def baz(a \\ :a), + do: {a} + + @doc "biz/3 doc" + def biz(a, b, c \\ :c), + do: {a, b, c} + end + File.write!("Elixir.DefaultArgumentFunctions.beam", bytecode) + assert Code.get_docs(DefaultArgumentFunctions, :docs) + assert expand('DefaultArgumentFunctions.') == + {:yes, '', ['bar/0', 'biz/2', 'biz/3', 'foo/1', 'foo/2', 'foo/3']} + assert expand('DefaultArgumentFunctions.bi') == {:yes, 'z', []} + assert expand('DefaultArgumentFunctions.foo') == {:yes, '', ['foo/1', 'foo/2', 'foo/3']} + after + File.rm("Elixir.DefaultArgumentFunctions.beam") + :code.purge(DefaultArgumentFunctions) + :code.delete(DefaultArgumentFunctions) + end + + test "Elixir no completion" do assert expand('.') == {:no, '', []} assert expand('Xyz') == {:no, '', []} + assert expand('x.Foo') == {:no, '', []} + assert expand('x.Foo.get_by') == {:no, '', []} end - test :elixir_root_submodule_completion do - _ = [foo: 1][:foo] - assert expand('Elixir.Acce') == {:yes, 'ss.', []} + test "Elixir root submodule completion" do + assert expand('Elixir.Acce') == {:yes, 'ss', []} end - test :elixir_submodule_completion do - assert expand('String.Cha') == {:yes, 'rs.', []} + test "Elixir submodule completion" do + assert expand('String.Cha') == {:yes, 'rs', []} end - test :elixir_submodule_no_completion do + test "Elixir submodule no completion" do assert expand('IEx.Xyz') == {:no, '', []} end - test :elixir_function_completion do + test "function completion" do assert expand('System.ve') == {:yes, 'rsion', []} assert expand(':ets.fun2') == {:yes, 'ms', []} end - test :elixir_function_completion_with_arity do - assert expand('String.printable?') == {:yes, '', ['printable?/1']} - assert expand('String.printable?/') == {:yes, '', ['printable?/1']} + test "function completion with arity" do + assert expand('String.printable?') == {:yes, '', ['printable?/1', 'printable?/2']} + assert expand('String.printable?/') == {:yes, '', ['printable?/1', 'printable?/2']} + end + + test "function completion using a variable bound to a module" do + eval("mod = String") + assert expand('mod.print') == {:yes, 'able?', []} end - test :elixir_macro_completion do + test "map atom key completion is supported" do + eval("map = %{foo: 1, bar_1: 23, bar_2: 14}") + assert expand('map.f') == {:yes, 'oo', []} + assert expand('map.b') == {:yes, 'ar_', []} + assert expand('map.bar_') == {:yes, '', ['bar_1', 'bar_2']} + assert expand('map.c') == {:no, '', []} + assert expand('map.') == {:yes, '', ['bar_1', 'bar_2', 'foo']} + assert expand('map.foo') == {:no, '', []} + end + + test "nested map atom key completion is supported" do + eval("map = %{nested: %{deeply: %{foo: 1, bar_1: 23, bar_2: 14, mod: String, num: 1}}}") + assert expand('map.nested.deeply.f') == {:yes, 'oo', []} + assert expand('map.nested.deeply.b') == {:yes, 'ar_', []} + assert expand('map.nested.deeply.bar_') == {:yes, '', ['bar_1', 'bar_2']} + assert expand('map.nested.deeply.') == {:yes, '', ['bar_1', 'bar_2', 'foo', 'mod', 'num']} + assert expand('map.nested.deeply.mod.print') == {:yes, 'able?', []} + + assert expand('map.nested') == {:yes, '.', []} + assert expand('map.nested.deeply') == {:yes, '.', []} + assert expand('map.nested.deeply.foo') == {:no, '', []} + + assert expand('map.nested.deeply.c') == {:no, '', []} + assert expand('map.a.b.c.f') == {:no, '', []} + end + + test "map string key completion is not supported" do + eval(~S(map = %{"foo" => 1})) + assert expand('map.f') == {:no, '', []} + end + + test "autocompletion off a bound variable only works for modules and maps" do + eval("num = 5; map = %{nested: %{num: 23}}") + assert expand('num.print') == {:no, '', []} + assert expand('map.nested.num.f') == {:no, '', []} + assert expand('map.nested.num.key.f') == {:no, '', []} + end + + test "autocompletion using access syntax does is not supported" do + eval("map = %{nested: %{deeply: %{num: 23}}}") + assert expand('map[:nested][:deeply].n') == {:no, '', []} + assert expand('map[:nested].deeply.n') == {:no, '', []} + assert expand('map.nested.[:deeply].n') == {:no, '', []} + end + + test "autocompletion off of unbound variables is not supported" do + eval("num = 5") + assert expand('other_var.f') == {:no, '', []} + assert expand('a.b.c.d') == {:no, '', []} + end + + test "macro completion" do {:yes, '', list} = expand('Kernel.is_') assert is_list(list) end - test :elixir_root_completion do + test "imports completion" do {:yes, '', list} = expand('') assert is_list(list) assert 'h/1' in list - assert 'Elixir' in list + assert 'unquote/1' in list + assert 'pwd/0' in list end - test :elixir_kernel_completion do + test "kernel import completion" do assert expand('defstru') == {:yes, 'ct', []} + assert expand('put_') == {:yes, '', ['put_elem/3', 'put_in/2', 'put_in/3']} end - test :elixir_proxy do - {:yes, '', list} = expand('E') - assert 'Elixir' in list + test "variable name completion" do + eval("numeral = 3; number = 3; nothing = nil") + assert expand('numb') == {:yes, 'er', []} + assert expand('num') == {:yes, '', ['number', 'numeral']} + assert expand('no') == {:yes, '', ['nothing', 'node/0', 'node/1', 'not/1']} end - test :elixir_erlang_module_root_completion do - {:yes, '', list} = expand(':') - assert is_list(list) - assert 'lists' in list + test "completion of manually imported functions and macros" do + eval("import Enum; import Supervisor, only: [count_children: 1]; import Protocol") + assert expand('take') == {:yes, '', ['take/2', 'take_every/2', 'take_random/2', 'take_while/2']} + assert expand('count') == {:yes, '', ['count/1', 'count/2', 'count_children/1']} + assert expand('der') == {:yes, 'ive', []} + end + + defmacro define_var do + quote do: var!(my_var_1, Elixir) = 1 + end + + test "ignores quoted variables when performing variable completion" do + eval("require #{__MODULE__}; #{__MODULE__}.define_var(); my_var_2 = 2") + assert expand('my_var') == {:yes, '_2', []} end - test :completion_inside_expression do - assert expand('1+En') == {:yes, 'um', []} + test "kernel special form completion" do + assert expand('unquote_spl') == {:yes, 'icing', []} + end + + test "completion inside expression" do + assert expand('1 En') == {:yes, 'um', []} assert expand('Test(En') == {:yes, 'um', []} - assert expand('Test :z') == {:yes, 'lib.', []} - assert expand('[:z') == {:yes, 'lib.', []} + assert expand('Test :zl') == {:yes, 'ib', []} + assert expand('[:zl') == {:yes, 'ib', []} + assert expand('{:zl') == {:yes, 'ib', []} + end + + test "ampersand completion" do + assert expand('&Enu') == {:yes, 'm', []} + assert expand('&Enum.a') == {:yes, [], ['all?/1', 'all?/2', 'any?/1', 'any?/2', 'at/2', 'at/3']} + assert expand('f = &Enum.a') == {:yes, [], ['all?/1', 'all?/2', 'any?/1', 'any?/2', 'at/2', 'at/3']} + end + + defmodule SublevelTest.LevelA.LevelB do + end + + test "Elixir completion sublevel" do + assert expand('IEx.AutocompleteTest.SublevelTest.') == {:yes, 'LevelA', []} + end + + test "complete aliases of Elixir modules" do + eval("alias List, as: MyList") + assert expand('MyL') == {:yes, 'ist', []} + assert expand('MyList') == {:yes, '.', []} + assert expand('MyList.to_integer') == {:yes, [], ['to_integer/1', 'to_integer/2']} + end + + test "complete aliases of Erlang modules" do + eval("alias :lists, as: EList") + assert expand('EL') == {:yes, 'ist', []} + assert expand('EList') == {:yes, '.', []} + assert expand('EList.map') == {:yes, [], ['map/2', 'mapfoldl/3', 'mapfoldr/3']} + end + + test "completion for functions added when compiled module is reloaded" do + {:module, _, bytecode, _} = + defmodule Sample do + def foo(), do: 0 + end + File.write!("Elixir.IEx.AutocompleteTest.Sample.beam", bytecode) + assert Code.get_docs(Sample, :docs) + assert expand('IEx.AutocompleteTest.Sample.foo') == {:yes, '', ['foo/0']} + + Code.compiler_options(ignore_module_conflict: true) + defmodule Sample do + def foo(), do: 0 + def foobar(), do: 0 + end + assert expand('IEx.AutocompleteTest.Sample.foo') == {:yes, '', ['foo/0', 'foobar/0']} + after + File.rm("Elixir.IEx.AutocompleteTest.Sample.beam") + Code.compiler_options(ignore_module_conflict: false) + :code.purge(Sample) + :code.delete(Sample) + end + + defmodule MyStruct do + defstruct [:my_val] + end + + test "completion for struct names" do + assert expand('%IEx.AutocompleteTest.MyStr') == {:yes, 'uct', []} + end + + test "completion for struct keys" do + eval("struct = %IEx.AutocompleteTest.MyStruct{}") + assert expand('struct.my') == {:yes, '_val', []} + end + + test "ignore invalid Elixir module literals" do + defmodule :"Elixir.IEx.AutocompleteTest.Unicodé", do: nil + assert expand('IEx.AutocompleteTest.Unicod') == {:no, '', []} + after + :code.purge(:"Elixir.IEx.AutocompleteTest.Unicodé") + :code.delete(:"Elixir.IEx.AutocompleteTest.Unicodé") end end diff --git a/lib/iex/test/iex/evaluator_test.exs b/lib/iex/test/iex/evaluator_test.exs deleted file mode 100644 index 0860956e441..00000000000 --- a/lib/iex/test/iex/evaluator_test.exs +++ /dev/null @@ -1,25 +0,0 @@ -Code.require_file "../test_helper.exs", __DIR__ - -defmodule IEx.EvaluatorTest do - use ExUnit.Case, async: true - - alias IEx.Evaluator, as: E - - test "format_stacktrace returns formatted result in columns" do - frames = [ - {List, :one, 1, [file: "loc", line: 1]}, - {String, :second, 2, [file: "loc2", line: 102]}, - {IEx, :three, 3, [file: "longer", line: 1234]}, - {List, :four, 4, [file: "loc", line: 1]}, - ] - - expected = """ - (elixir) loc:1: List.one/1 - (elixir) loc2:102: String.second/2 - (iex) longer:1234: IEx.three/3 - (elixir) loc:1: List.four/4 - """ - - assert E.format_stacktrace(frames) <> "\n" == expected - end -end diff --git a/lib/iex/test/iex/helpers_test.exs b/lib/iex/test/iex/helpers_test.exs index e6f7a44c01e..4a99fb9ab66 100644 --- a/lib/iex/test/iex/helpers_test.exs +++ b/lib/iex/test/iex/helpers_test.exs @@ -6,12 +6,18 @@ defmodule IEx.HelpersTest do import IEx.Helpers test "clear helper" do - assert "\e[H\e[2J" == capture_iex("clear") + Application.put_env(:elixir, :ansi_enabled, true) + assert capture_iex("clear()") == "\e[H\e[2J" + + Application.put_env(:elixir, :ansi_enabled, false) + assert capture_iex("clear()") =~ "Cannot clear the screen because ANSI escape codes are not enabled on this shell" + after + Application.delete_env(:elixir, :ansi_enabled) end test "h helper" do assert "* IEx.Helpers\n\nWelcome to Interactive Elixir" <> _ - = capture_iex("h") + = capture_iex("h()") end test "h helper module" do @@ -27,10 +33,12 @@ defmodule IEx.HelpersTest do test "h helper function" do pwd_h = "* def pwd()\n\nPrints the current working directory.\n\n" - c_h = "* def c(files, path \\\\ \".\")\n\nExpects a list of files to compile" + c_h = "* def c(files, path \\\\ :in_memory)\n\nCompiles the given files." + eq_h = "* def ==(left, right)\n\nReturns `true` if the two items are equal.\n\n" assert capture_io(fn -> h IEx.Helpers.pwd/0 end) =~ pwd_h assert capture_io(fn -> h IEx.Helpers.c/2 end) =~ c_h + assert capture_io(fn -> h ==/2 end) =~ eq_h assert capture_io(fn -> h IEx.Helpers.c/1 end) =~ c_h assert capture_io(fn -> h pwd end) =~ pwd_h @@ -43,6 +51,101 @@ defmodule IEx.HelpersTest do assert capture_io(fn -> h __info__ end) == "No documentation for __info__ was found\n" end + test "h helper underscored functions" do + content = """ + defmodule Sample do + def __foo__(), do: 0 + @doc "Bar doc" + def __bar__(), do: 1 + end + """ + filename = "sample.ex" + with_file filename, content, fn -> + assert c(filename, ".") == [Sample] + + assert capture_io(fn -> h Sample.__foo__ end) == "No documentation for Sample.__foo__ was found\n" + assert capture_io(fn -> h Sample.__bar__ end) == "* def __bar__()\n\nBar doc\n" + + assert capture_io(fn -> h Sample.__foo__/0 end) == "No documentation for Sample.__foo__/0 was found\n" + assert capture_io(fn -> h Sample.__bar__/0 end) == "* def __bar__()\n\nBar doc\n" + end + after + cleanup_modules([Sample]) + end + + test "h helper for callbacks" do + behaviour = """ + defmodule MyBehaviour do + @doc "Docs for MyBehaviour.first" + @callback first(integer) :: integer + @callback second(integer) :: integer + @callback second(integer, integer) :: integer + end + """ + impl = """ + defmodule Impl do + @behaviour MyBehaviour + def first(0), do: 0 + @doc "Docs for Impl.second/1" + def second(0), do: 0 + @doc "Docs for Impl.second/2" + def second(0, 0), do: 0 + end + """ + files = ["my_behaviour.ex", "impl.ex"] + with_file files, [behaviour, impl], fn -> + assert c(files, ".") |> Enum.sort == [Impl, MyBehaviour] + + assert capture_io(fn -> h Impl.first/1 end) == "* @callback first(integer()) :: integer()\n\nDocs for MyBehaviour.first\n" + assert capture_io(fn -> h Impl.second/1 end) == "* def second(int)\n\nDocs for Impl.second/1\n" + assert capture_io(fn -> h Impl.second/2 end) == "* def second(int1, int2)\n\nDocs for Impl.second/2\n" + + assert capture_io(fn -> h Impl.first end) == "* @callback first(integer()) :: integer()\n\nDocs for MyBehaviour.first\n" + assert capture_io(fn -> h Impl.second end) == "* def second(int)\n\nDocs for Impl.second/1\n* def second(int1, int2)\n\nDocs for Impl.second/2\n" + end + after + cleanup_modules([Impl, MyBehaviour]) + end + + test "h helper for delegates" do + filename = "delegate.ex" + content = """ + defmodule Delegator do + defdelegate func1, to: Delegated + @doc "Delegator func2 doc" + defdelegate func2, to: Delegated + end + defmodule Delegated do + def func1, do: 1 + def func2, do: 2 + end + """ + with_file filename, content, fn -> + assert c(filename, ".") |> Enum.sort == [Delegated, Delegator] + + assert capture_io(fn -> h Delegator.func1 end) == "* def func1()\n\nSee `Delegated.func1/0`.\n" + assert capture_io(fn -> h Delegator.func2 end) == "* def func2()\n\nDelegator func2 doc\n" + end + after + cleanup_modules([Delegated, Delegator]) + end + + test "b helper module" do + assert capture_io(fn -> b Mix end) == "No callbacks for Mix were found\n" + assert capture_io(fn -> b NoMix end) == "Could not load module NoMix, got: nofile\n" + assert capture_io(fn -> b Mix.SCM end) =~ """ + @callback accepts_options(app :: atom(), opts()) :: opts() | nil + @callback checked_out?(opts()) :: boolean() + """ + end + + test "b helper function" do + assert capture_io(fn -> b Mix.Task.stop end) == "No documentation for Mix.Task.stop was found\n" + assert capture_io(fn -> b Mix.Task.run end) =~ "* @callback run([binary()]) :: any()\n\nA task needs to implement `run`" + assert capture_io(fn -> b NoMix.run end) == "Could not load module NoMix, got: nofile\n" + assert capture_io(fn -> b Exception.message/1 end) == "* @callback message(t()) :: String.t()\n\n\n" + end + test "t helper" do assert capture_io(fn -> t IEx end) == "No type information for IEx was found\n" @@ -51,75 +154,74 @@ defmodule IEx.HelpersTest do String.starts_with? line, "@type" end) >= 2 - assert "@type t() :: " <> _ - = capture_io(fn -> t Enum.t end) + assert "@type t() :: " <> _ = capture_io(fn -> t Enum.t end) assert capture_io(fn -> t Enum.t end) == capture_io(fn -> t Enum.t/0 end) + + assert "@opaque t(value)\n@type t() :: t(term())\n" = capture_io(fn -> t MapSet.t end) + assert capture_io(fn -> t URI.t end) == capture_io(fn -> t URI.t/0 end) + + content = """ + defmodule TypeSample do + @typedoc "An id with description." + @type id_with_desc :: {number, String.t} + end + """ + filename = "typesample.ex" + with_file filename, content, fn -> + assert c(filename, ".") == [TypeSample] + assert capture_io(fn -> t TypeSample.id_with_desc/0 end) == """ + An id with description. + @type id_with_desc() :: {number(), String.t()} + """ + assert capture_io(fn -> t TypeSample.id_with_desc end) == """ + An id with description. + @type id_with_desc() :: {number(), String.t()} + """ + end + after + cleanup_modules([TypeSample]) end test "s helper" do - assert capture_io(fn -> s ExUnit end) == "No specification for ExUnit was found\n" + assert capture_io(fn -> s IEx.Remsh end) == "No specification for IEx.Remsh was found\n" # Test that it shows at least two specs - assert Enum.count(capture_io(fn -> s Enum end) |> String.split("\n"), fn line -> + assert Enum.count(capture_io(fn -> s Process end) |> String.split("\n"), fn line -> String.starts_with? line, "@spec" end) >= 2 - assert Enum.count(capture_io(fn -> s Enum.all? end) |> String.split("\n"), fn line -> + assert Enum.count(capture_io(fn -> s Process.flag end) |> String.split("\n"), fn line -> String.starts_with? line, "@spec" end) >= 2 - assert capture_io(fn -> s Enum.all?/1 end) == - "@spec all?(t()) :: boolean()\n" + assert capture_io(fn -> s Process.register/2 end) == + "@spec register(pid() | port(), atom()) :: true\n" assert capture_io(fn -> s struct end) == - "@spec struct(module() | %{}, Enum.t()) :: %{}\n" + "@spec struct(module() | struct(), Enum.t()) :: struct()\n" end test "v helper" do - assert capture_iex("v") == ":ok" - assert capture_iex("1\n2\nv") == String.rstrip """ - 1 - 2 - 1: 1 - #=> 1 - - 2: 2 - #=> 2 - - :ok - """ - - assert "** (RuntimeError) v(0) is out of bounds" <> _ - = capture_iex("v(0)") + assert "** (RuntimeError) v(0) is out of bounds" <> _ = capture_iex("v(0)") assert capture_iex("1\n2\nv(2)") == "1\n2\n2" assert capture_iex("1\n2\nv(2)") == capture_iex("1\n2\nv(-1)") - - assert capture_iex("1\n2\nIEx.History.reset\nv") - == String.rstrip """ - 1 - 2 - true - 3: IEx.History.reset - #=> true - - :ok - """ + assert capture_iex("1\n2\nv(2)") == capture_iex("1\n2\nv()") end test "flush helper" do - assert capture_io(fn -> send self(), :hello; flush end) == ":hello\n" + assert capture_io(fn -> send self(), :hello; flush() end) == ":hello\n" end test "pwd helper" do - File.cd! iex_path, fn -> - assert capture_io(fn -> pwd end) =~ ~r"lib[\\/]iex\n$" + File.cd! iex_path(), fn -> + assert capture_io(fn -> pwd() end) =~ ~r"lib[\\/]iex\n$" end end test "ls helper" do - File.cd! iex_path, fn -> - paths = capture_io(fn -> ls end) + File.cd! iex_path(), fn -> + paths = capture_io(fn -> ls() end) |> String.split - |> Enum.map(&String.strip(&1)) + |> Enum.map(&String.trim/1) assert "ebin" in paths assert "mix.exs" in paths @@ -129,15 +231,21 @@ defmodule IEx.HelpersTest do end end + test "e helper" do + exports = capture_io(fn -> e(IEx.Autocomplete) end) + assert exports == "expand/1 expand/2 exports/1 \n" + end + test "import_file helper" do with_file "dot-iex", "variable = :hello\nimport IO", fn -> - assert "** (RuntimeError) undefined function: variable/0" <> _ - = capture_iex("variable") - assert "** (RuntimeError) undefined function: puts/1" <> _ - = capture_iex("puts \"hi\"") + capture_io(:stderr, fn -> + assert "** (CompileError) iex:1: undefined function variable/0" <> _ = capture_iex("variable") + end) - assert capture_iex("import_file \"dot-iex\"\nvariable\nputs \"hi\"") - == "nil\n:hello\nhi\n:ok" + assert "** (CompileError) iex:1: undefined function puts/1" <> _ = capture_iex("puts \"hi\"") + + assert capture_iex("import_file \"dot-iex\"\nvariable\nputs \"hi\"") == + "IO\n:hello\nhi\n:ok" end end @@ -146,35 +254,57 @@ defmodule IEx.HelpersTest do dot_1 = "variable = :hello\nimport IO" with_file ["dot-iex", "dot-iex-1"], [dot, dot_1], fn -> - assert "** (RuntimeError) undefined function: parent/0" <> _ - = capture_iex("parent") - assert "** (RuntimeError) undefined function: variable/0" <> _ - = capture_iex("variable") - assert "** (RuntimeError) undefined function: puts/1" <> _ - = capture_iex("puts \"hi\"") + capture_io(:stderr, fn -> + assert "** (CompileError) iex:1: undefined function parent/0" <> _ = capture_iex("parent") + end) + + assert "** (CompileError) iex:1: undefined function puts/1" <> _ = capture_iex("puts \"hi\"") - assert capture_iex("import_file \"dot-iex\"\nvariable\nputs \"hi\"\nparent") - == "nil\n:hello\nhi\n:ok\ntrue" + assert capture_iex("import_file \"dot-iex\"\nvariable\nputs \"hi\"\nparent") == + "IO\n:hello\nhi\n:ok\ntrue" end end + test "import_file when the file is missing" do + failing = capture_iex("import_file \"nonexistent\"") + assert "** (File.Error) could not read file" <> _ = failing + assert failing =~ "no such file or directory" + + assert "nil" == capture_iex("import_file_if_available \"nonexistent\"") + end + + test "import_if_available helper" do + assert "nil" == capture_iex("import_if_available NoSuchModule") + assert "[1, 2, 3]" == capture_iex("import_if_available Integer; digits 123") + assert "[1, 2, 3]" == capture_iex("import_if_available Integer, only: [digits: 1]; digits 123") + end + test "c helper" do - assert_raise UndefinedFunctionError, "undefined function: Sample.run/0", fn -> + assert_raise UndefinedFunctionError, ~r"function Sample\.run/0 is undefined", fn -> Sample.run end filename = "sample.ex" - with_file filename, test_module_code, fn -> + with_file filename, test_module_code(), fn -> assert c(filename) == [Sample] + refute File.exists?("Elixir.Sample.beam") assert Sample.run == :run end after cleanup_modules([Sample]) end + test "c helper with error" do + ExUnit.CaptureIO.capture_io fn -> + with_file "sample.ex", "raise \"oops\"", fn -> + assert_raise CompileError, fn -> c("sample.ex") end + end + end + end + test "c helper with full path" do filename = "sample.ex" - with_file filename, test_module_code, fn -> + with_file filename, test_module_code(), fn -> assert c(Path.expand(filename)) == [Sample] assert Sample.run == :run end @@ -183,12 +313,12 @@ defmodule IEx.HelpersTest do end test "c helper multiple modules" do - assert_raise UndefinedFunctionError, "undefined function: Sample.run/0", fn -> + assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn -> Sample.run end filename = "sample.ex" - with_file filename, test_module_code <> "\n" <> another_test_module, fn -> + with_file filename, test_module_code() <> "\n" <> another_test_module(), fn -> assert c(filename) |> Enum.sort == [Sample, Sample2] assert Sample.run == :run assert Sample2.hello == :world @@ -198,12 +328,12 @@ defmodule IEx.HelpersTest do end test "c helper list" do - assert_raise UndefinedFunctionError, "undefined function: Sample.run/0", fn -> + assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn -> Sample.run end filenames = ["sample1.ex", "sample2.ex"] - with_file filenames, [test_module_code, another_test_module], fn -> + with_file filenames, [test_module_code(), another_test_module()], fn -> assert c(filenames) |> Enum.sort == [Sample, Sample2] assert Sample.run == :run assert Sample2.hello == :world @@ -212,28 +342,28 @@ defmodule IEx.HelpersTest do cleanup_modules([Sample, Sample2]) end - test "c helper erlang" do - assert_raise UndefinedFunctionError, "undefined function: :sample.hello/0", fn -> + test "c helper Erlang" do + assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn -> :sample.hello end filename = "sample.erl" - with_file filename, erlang_module_code, fn -> + with_file filename, erlang_module_code(), fn -> assert c(filename) == [:sample] assert :sample.hello == :world + refute File.exists?("sample.beam") end after cleanup_modules([:sample]) end - test "c helper skips unknown files" do - assert_raise UndefinedFunctionError, "undefined function: :sample.hello/0", fn -> + assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn -> :sample.hello end filenames = ["sample.erl", "not_found.ex", "sample2.ex"] - with_file filenames, [erlang_module_code, "", another_test_module], fn -> + with_file filenames, [erlang_module_code(), "", another_test_module()], fn -> assert c(filenames) |> Enum.sort == [Sample2, :sample] assert :sample.hello == :world assert Sample2.hello == :world @@ -242,24 +372,38 @@ defmodule IEx.HelpersTest do cleanup_modules([:sample, Sample2]) end + test "c helper with path" do + assert_raise UndefinedFunctionError, ~r"function Sample\.run/0 is undefined", fn -> + Sample.run + end + + filename = "sample.ex" + with_file filename, test_module_code(), fn -> + assert c(filename, ".") == [Sample] + assert File.exists?("Elixir.Sample.beam") + assert Sample.run == :run + end + after + cleanup_modules([Sample]) + end test "l helper" do - assert_raise UndefinedFunctionError, "undefined function: Sample.run/0", fn -> + assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn -> Sample.run end assert l(:non_existent_module) == {:error, :nofile} filename = "sample.ex" - with_file filename, test_module_code, fn -> - assert c(filename) == [Sample] + with_file filename, test_module_code(), fn -> + assert c(filename, ".") == [Sample] assert Sample.run == :run File.write! filename, "defmodule Sample do end" - elixirc("sample.ex") + elixirc ["sample.ex"] assert l(Sample) == {:module, Sample} - assert_raise UndefinedFunctionError, "undefined function: Sample.run/0", fn -> + assert_raise UndefinedFunctionError, "function Sample.run/0 is undefined or private", fn -> Sample.run end end @@ -268,46 +412,55 @@ defmodule IEx.HelpersTest do cleanup_modules([Sample]) end + test "nl helper" do + assert nl(:non_existent_module) == {:error, :nofile} + assert nl([node()], Enum) == {:ok, [{:nonode@nohost, :loaded, Enum}]} + assert nl([:nosuchnode@badhost], Enum) == {:ok, [{:nosuchnode@badhost, :badrpc, :nodedown}]} + capture_log fn -> + assert nl([node()], :lists) == {:ok, [{:nonode@nohost, :error, :sticky_directory}]} + end + end + test "r helper unavailable" do assert_raise ArgumentError, "could not load nor find module: :non_existent_module", fn -> r :non_existent_module end end - test "r helper elixir" do - assert_raise UndefinedFunctionError, "undefined function: Sample.run/0", fn -> + test "r helper Elixir" do + assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined \(module Sample is not available\)", fn -> Sample.run end filename = "sample.ex" - with_file filename, test_module_code, fn -> + with_file filename, test_module_code(), fn -> assert capture_io(:stderr, fn -> - assert c(filename) == [Sample] + assert c(filename, ".") == [Sample] assert Sample.run == :run File.write! filename, "defmodule Sample do end" assert {:reloaded, Sample, [Sample]} = r(Sample) - assert_raise UndefinedFunctionError, "undefined function: Sample.run/0", fn -> + assert_raise UndefinedFunctionError, "function Sample.run/0 is undefined or private", fn -> Sample.run end - end) =~ ~r"^.*?sample\.ex:1: warning: redefining module Sample\n$" + end) =~ "redefining module Sample (current version loaded from Elixir.Sample.beam)" end after # Clean up old version produced by the r helper cleanup_modules([Sample]) end - test "r helper erlang" do - assert_raise UndefinedFunctionError, "undefined function: :sample.hello/0", fn -> + test "r helper Erlang" do + assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn -> :sample.hello end filename = "sample.erl" - with_file filename, erlang_module_code, fn -> - assert c(filename) == [:sample] + with_file filename, erlang_module_code(), fn -> + assert c(filename, ".") == [:sample] assert :sample.hello == :world - File.write!(filename, other_erlang_module_code) + File.write!(filename, other_erlang_module_code()) assert {:reloaded, :sample, [:sample]} = r(:sample) assert :sample.hello == :bye end @@ -315,6 +468,47 @@ defmodule IEx.HelpersTest do cleanup_modules([:sample]) end + test "pid/1 helper" do + assert inspect(pid("0.32767.3276")) == "#PID<0.32767.3276>" + assert inspect(pid("0.5.6")) == "#PID<0.5.6>" + assert_raise ArgumentError, fn -> + pid("0.6.-6") + end + end + + test "pid/3 helper" do + assert inspect(pid(0, 32767, 3276)) == "#PID<0.32767.3276>" + assert inspect(pid(0, 5, 6)) == "#PID<0.5.6>" + assert_raise FunctionClauseError, fn -> + pid(0, 6, -6) + end + end + + test "i/1 helper" do + output = capture_io fn -> i(:ok) end + assert output =~ String.trim_trailing(""" + Term + :ok + Data type + Atom + Reference modules + Atom + """) + end + + test "i/1 helper on functions that don't display result" do + output = capture_io fn -> i(IEx.dont_display_result()) end + assert output =~ String.trim_trailing(""" + Term + :"do not show this result in output" + Data type + Atom + Description + This atom is returned by IEx when a function that should not print its + return value on screen is executed. + """) + end + defp test_module_code do """ defmodule Sample do @@ -377,7 +571,7 @@ defmodule IEx.HelpersTest do defp elixirc(args) do executable = Path.expand("../../../../bin/elixirc", __DIR__) - System.cmd("#{executable}#{executable_extension} #{args}") + System.cmd("#{executable}#{executable_extension()}", args, [stderr_to_stdout: true]) end defp iex_path do diff --git a/lib/iex/test/iex/info_test.exs b/lib/iex/test/iex/info_test.exs new file mode 100644 index 00000000000..5e9934c031b --- /dev/null +++ b/lib/iex/test/iex/info_test.exs @@ -0,0 +1,182 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule IEx.InfoTest do + use ExUnit.Case + + alias IEx.Info + + defmodule Foo do + defstruct [:foo] + end + + test "tuples" do + assert Info.info({:ok, "good!"}) == ["Data type": "Tuple", + "Reference modules": "Tuple"] + end + + test "atoms: loaded module (without docs)" do + info = Info.info(Foo) + assert info[:"Data type"] == "Atom" + assert info[:"Source"] == Path.relative_to_cwd(__ENV__.file) + assert info[:"Description"] == "Call IEx.InfoTest.Foo.module_info() to access metadata." + assert info[:"Raw representation"] == ~s(:"Elixir.IEx.InfoTest.Foo") + end + + test "atoms: loaded module (with docs)" do + info = Info.info(List) + description = "Use h(List) to access its documentation.\n" <> + "Call List.module_info() to access metadata." + assert info[:"Description"] == description + end + + test "atoms: module that is also a protocol" do + info = Info.info(String.Chars) + description = info[:"Protocol"] + assert description =~ "This module is a protocol" + assert description =~ "Atom" + assert description =~ "BitString" + end + + test "atoms: module-like atom (Foo)" do + info = Info.info(NonexistentModuleAtom) + assert info[:"Raw representation"] == ~s(:"Elixir.NonexistentModuleAtom") + end + + test "atoms: regular atom" do + assert Info.info(:foo) == ["Data type": "Atom", + "Reference modules": "Atom"] + end + + test "lists: charlists" do + info = Info.info('foo') + assert info[:"Description"] =~ "This is a list of integers that is printed" + assert info[:"Raw representation"] == "[102, 111, 111]" + end + + test "lists: keyword lists" do + info = Info.info(a: 1, b: 2) + assert info[:"Description"] =~ "This is what is referred to as a \"keyword list\"" + end + + test "lists: regular lists" do + assert Info.info([:foo, :bar, :baz])[:"Reference modules"] == "List" + end + + test "bitstring: strings" do + info = Info.info("føø") + assert info[:"Byte size"] == 5 + assert info[:"Description"] =~ "This is a string: a UTF-8 encoded binary" + assert info[:"Raw representation"] == "<<102, 195, 184, 195, 184>>" + end + + test "bitstring: non-printable string" do + info = Info.info(<<"foo", 0, "bar">>) + assert info[:"Byte size"] == 7 + assert info[:"Description"] =~ "This is a string" + assert info[:"Description"] =~ "It's printed with the `<<>>`" + assert info[:"Description"] =~ "the first non-printable codepoint being `<<0>>`" + end + + test "bitstring: binary" do + info = Info.info(<<255, 255>>) + assert info[:"Description"] =~ "This is a binary: a collection of bytes" + end + + test "bitstring: bitstring" do + info = Info.info(<<1 :: 1>>) + assert info[:"Bits size"] == 1 + assert info[:"Description"] =~ "This is a bitstring" + end + + test "integers" do + assert Info.info(99) == ["Data type": "Integer", + "Reference modules": "Integer"] + end + + test "float" do + assert Info.info(3.14) == ["Data type": "Float", + "Reference modules": "Float"] + end + + test "functions: named function" do + info = Info.info(&String.length/1) + assert info[:"Type"] == "external" + assert info[:"Arity"] == 1 + end + + test "functions: anonymous function" do + info = Info.info(fn -> :ok end) + assert info[:"Type"] == "local" + assert info[:"Arity"] == 0 + assert info[:"Description"] == "This is an anonymous function." + end + + test "PIDs" do + pid = spawn_link(fn -> Process.sleep(1000) end) + + info = Info.info(pid) + assert info[:"Alive"] == true + assert info[:"Name"] == "not registered" + assert info[:"Links"] == inspect(self()) + assert info[:"Message queue length"] == 0 + + Process.register(pid, :iex_info_registered_pid) + Process.unlink(pid) + send pid, :oops + info = Info.info(pid) + assert info[:"Name"] == ":iex_info_registered_pid" + assert info[:"Links"] == "none" + assert info[:"Message queue length"] == 1 + + Process.exit(pid, :kill) + assert Info.info(pid)[:"Alive"] == false + end + + test "ports" do + {:ok, port} = :gen_udp.open(0) + assert Info.info(port)[:"Open"] == true + :ok = :gen_udp.close(port) + assert Info.info(port)[:"Open"] == false + end + + test "references" do + assert Info.info(make_ref()) == ["Data type": "Reference"] + end + + test "date" do + {:ok, date} = Date.new(2017, 1, 1) + info = Info.info(date) + assert info[:"Data type"] == "Date" + assert info[:"Raw representation"] == "%Date{calendar: Calendar.ISO, day: 1, month: 1, year: 2017}" + assert info[:"Reference modules"] == "Date, Calendar, Map" + assert info[:"Description"] =~ "a date" + assert info[:"Description"] =~ "`~D`" + end + + test "time" do + {:ok, time} = Time.new(23, 59, 59) + info = Info.info(time) + assert info[:"Data type"] == "Time" + assert info[:"Raw representation"] == "%Time{calendar: Calendar.ISO, hour: 23, microsecond: {0, 0}, minute: 59, second: 59}" + assert info[:"Reference modules"] == "Time, Calendar, Map" + assert info[:"Description"] =~ "a time" + assert info[:"Description"] =~ "`~T`" + end + + test "naive datetime" do + {:ok, time} = NaiveDateTime.new(2017, 1, 1, 23, 59, 59) + info = Info.info(time) + assert info[:"Data type"] == "NaiveDateTime" + assert info[:"Raw representation"] == "%NaiveDateTime{calendar: Calendar.ISO, day: 1, hour: 23, microsecond: {0, 0}, minute: 59, month: 1, second: 59, year: 2017}" + assert info[:"Reference modules"] == "NaiveDateTime, Calendar, Map" + assert info[:"Description"] =~ ~S{a "naive" datetime (that is, a datetime without a timezone)} + assert info[:"Description"] =~ "`~N`" + end + + test "structs" do + info = Info.info(%Foo{}) + assert info[:"Data type"] == "IEx.InfoTest.Foo" + assert info[:"Description"] == "This is a struct. Structs are maps with a __struct__ key." + assert info[:"Reference modules"] == "IEx.InfoTest.Foo, Map" + end +end diff --git a/lib/iex/test/iex/interaction_test.exs b/lib/iex/test/iex/interaction_test.exs index 9670430e663..6b89753e1da 100644 --- a/lib/iex/test/iex/interaction_test.exs +++ b/lib/iex/test/iex/interaction_test.exs @@ -29,6 +29,14 @@ defmodule IEx.InteractionTest do assert capture_iex(code) =~ "10" end + test "code escape" do + code = """ + 1 \\ + + 2 + """ + assert capture_iex(code) =~ "3" + end + test "exception" do exception = Regex.escape("** (ArithmeticError) bad argument in arithmetic expression") assert capture_iex("1 + :atom\n:this_is_still_working") @@ -65,19 +73,13 @@ defmodule IEx.InteractionTest do end test "invalid input" do - assert capture_iex("if true do ) false end") =~ "** (SyntaxError) iex:1: \"do\" starting at" - end - - test "undefined function" do - assert "** (RuntimeError) undefined function: format/0" <> _ = capture_iex("format") - assert "** (RuntimeError) undefined function: with_one/1" <> _ = capture_iex("with_one(22)") - assert "** (RuntimeError) undefined function: many/3" <> _ = capture_iex("many(:ok, 22, \"hi\")") + assert capture_iex("if true do ) false end") =~ "** (SyntaxError) iex:1: \"do\" is missing terminator \"end\". unexpected token: \")\" at line 1" end test "module definition" do input = """ defmodule Sample do - def foo, do: bar + def foo, do: bar() def bar, do: 13 end && Sample.foo """ @@ -92,36 +94,22 @@ defmodule IEx.InteractionTest do assert capture_iex("1\n", opts, [], true) == "prompt(1)> 1\nprompt(2)>" end - unless match?({:win32,_}, :os.type) do + if IO.ANSI.enabled? do test "color" do - opts = [colors: [enabled: true, eval_result: "red"]] - assert capture_iex("1 + 2", opts) == "\e[31m3\e[0m" - - # Sanity checks - assert capture_iex("IO.ANSI.escape(\"%{blue}hello\", true)", opts) - == "\e[31m\"\\e[34mhello\\e[0m\"\e[0m" - assert capture_iex("IO.puts IO.ANSI.escape(\"%{blue}hello\", true)", opts) - == "\e[34mhello\e[0m\n\e[31m:ok\e[0m" - assert capture_iex("IO.puts IO.ANSI.escape(\"%{blue}hello\", true)", [colors: [enabled: false]]) - == "\e[34mhello\e[0m\n:ok" - - # Test that ANSI escapes in the docs are left alone - opts = [colors: [enabled: true]] - assert capture_iex("h IO.ANSI.escape_fragment", opts) - =~ ~r"%\{red\}" - - # Test that ANSI escapes in iex output are left alone - opts = [colors: [enabled: true, eval_result: "red", eval_info: "red"]] - assert capture_iex("\"%{red} %{blue}\"", opts) == "\e[31m\"%{red} %{blue}\"\e[0m" - assert capture_iex("IO.puts IEx.color(:eval_info, \"%{red} %{blue}\")", opts) - == "\e[31m%{red} %{blue}\e[0m\n\e[31m:ok\e[0m" + opts = [colors: [enabled: true, eval_result: [:red]]] + assert capture_iex("1 + 2", opts) == + "\e[31m3\e[0m" + assert capture_iex("IO.ANSI.blue", opts) == + "\e[31m\e[32m\"\\e[34m\"\e[0m\e[31m\e[0m" + assert capture_iex("{:ok}", opts) == + "\e[31m\e[39m{\e[0m\e[31m\e[36m:ok\e[0m\e[31m\e[39m}\e[0m\e[31m\e[0m" end end test "inspect opts" do - opts = [inspect: [binaries: :as_binaries, char_lists: :as_lists, structs: false, limit: 4]] - assert capture_iex("<<45,46,47>>\n[45,46,47]\n%IO.Stream{}", opts) == - "<<45, 46, 47>>\n[45, 46, 47]\n%{__struct__: IO.Stream, device: nil, line_or_bytes: :line, raw: true}" + opts = [inspect: [binaries: :as_binaries, charlists: :as_lists, structs: false, limit: 4]] + assert capture_iex("<<45, 46, 47>>\n[45, 46, 47]\n%IO.Stream{}", opts) == + "<<45, 46, 47>>\n[45, 46, 47]\n%{__struct__: IO.Stream, device: nil, line_or_bytes: :line, raw: true}" end test "history size" do @@ -132,34 +120,10 @@ defmodule IEx.InteractionTest do assert "1\n2\n3\n4\n2\n** (RuntimeError) v(2) is out of bounds" <> _ = capture_iex("1\n2\n3\n4\nv(2)\nv(2)", opts) end - ## .iex file loading - - test "no .iex" do - assert "** (RuntimeError) undefined function: my_variable/0" <> _ = capture_iex("my_variable") - end - - test ".iex" do - File.write!("dot-iex", "my_variable = 144") - assert capture_iex("my_variable", [], [dot_iex_path: "dot-iex"]) == "144" - after - File.rm("dot-iex") - end - - test "nested .iex" do - File.write!("dot-iex-1", "nested_var = 13\nimport IO") - File.write!("dot-iex", "import_file \"dot-iex-1\"\nmy_variable=14") - - input = "nested_var\nmy_variable\nputs \"hello\"" - assert capture_iex(input, [], [dot_iex_path: "dot-iex"]) == "13\n14\nhello\n:ok" - after - File.rm("dot-iex-1") - File.rm("dot-iex") - end - test "receive exit" do - assert capture_iex("spawn_link(fn -> exit(:bye) end)") =~ + assert capture_iex("spawn_link(fn -> exit(:bye) end); Process.sleep(1000)") =~ ~r"\*\* \(EXIT from #PID<\d+\.\d+\.\d+>\) :bye" - assert capture_iex("spawn_link(fn -> exit({:bye, [:world]}) end)") =~ + assert capture_iex("spawn_link(fn -> exit({:bye, [:world]}) end); Process.sleep(1000)") =~ ~r"\*\* \(EXIT from #PID<\d+\.\d+\.\d+>\) {:bye, \[:world\]}" end @@ -167,14 +131,55 @@ defmodule IEx.InteractionTest do # use exit/1 to fake an error so that an error message # is not sent to the error logger. content = capture_iex("spawn_link(fn -> exit({%ArgumentError{}, - [{:not_a_real_module, :function, 0, []}]}) end)") + [{:not_a_real_module, :function, 0, []}]}) end); + Process.sleep(1000)") assert content =~ ~r"\*\* \(EXIT from #PID<\d+\.\d+\.\d+>\) an exception was raised:\n" assert content =~ ~r"\s{4}\*\* \(ArgumentError\) argument error\n" assert content =~ ~r"\s{8}:not_a_real_module\.function/0" end - test "exit due to failed call" do + test "receive exit due to failed call" do assert capture_iex("exit({:bye, {:gen_server, :call, [self(), :hello]}})") =~ ~r"\*\* \(exit\) exited in: :gen_server\.call\(#PID<\d+\.\d+\.\d+>, :hello\)\n\s{4}\*\* \(EXIT\) :bye" end + + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + test "blames function clause error" do + content = capture_iex("Access.fetch(:foo, :bar)") + assert content =~ "** (FunctionClauseError) no function clause matching in Access.fetch/2" + assert content =~ "The following arguments were given to Access.fetch/2" + assert content =~ ":foo" + assert content =~ "def fetch(-%struct{} = container-, +key+)" + assert content =~ ~r"\(elixir\) lib/access\.ex:\d+: Access\.fetch/2" + end + end + + ## .iex file loading + + describe ".iex" do + test "no .iex" do + capture_io(:stderr, fn -> + assert "** (CompileError) iex:1: undefined function my_variable/0" <> _ = capture_iex("my_variable") + end) + end + + test "single .iex" do + File.write!("dot-iex", "my_variable = 144") + assert capture_iex("my_variable", [], [dot_iex_path: "dot-iex"]) == "144" + after + File.rm("dot-iex") + end + + test "nested .iex" do + File.write!("dot-iex-1", "nested_var = 13\nimport IO") + File.write!("dot-iex", "import_file \"dot-iex-1\"\nmy_variable=14") + + input = "nested_var\nmy_variable\nputs \"hello\"" + assert capture_iex(input, [], [dot_iex_path: "dot-iex"]) == "13\n14\nhello\n:ok" + after + File.rm("dot-iex-1") + File.rm("dot-iex") + end + end end diff --git a/lib/iex/test/iex/server_test.exs b/lib/iex/test/iex/server_test.exs index 596ea7784f3..0b396bac656 100644 --- a/lib/iex/test/iex/server_test.exs +++ b/lib/iex/test/iex/server_test.exs @@ -1,7 +1,7 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule IEx.ServerTest do - use IEx.Case, async: true + use IEx.Case # Options @@ -11,12 +11,6 @@ defmodule IEx.ServerTest do end) =~ "pry(1)> " end - test "delegate_locals_to option" do - assert capture_io("sort([:foo, :bar])", fn -> - boot([delegate_locals_to: Enum]) - end) =~ "[:bar, :foo]" - end - test "env option" do assert capture_io("__ENV__.file", fn -> boot([env: __ENV__]) @@ -27,7 +21,7 @@ defmodule IEx.ServerTest do test "allows take over of the shell during boot" do assert capture_io("Y\na+b", fn -> - server = self + server = self() boot([], fn -> opts = [prefix: "dbg", binding: [a: 1, b: 2]] IEx.Server.take_over("iex:13", opts, 1000, server) @@ -54,7 +48,7 @@ defmodule IEx.ServerTest do # Helpers - defp boot(opts, callback \\ fn -> end) do + defp boot(opts, callback \\ fn -> nil end) do IEx.Server.start(Keyword.merge([dot_iex_path: ""], opts), {:erlang, :apply, [callback, []]}) end diff --git a/lib/iex/test/test_helper.exs b/lib/iex/test/test_helper.exs index e3bf9ed704f..e614955a081 100644 --- a/lib/iex/test/test_helper.exs +++ b/lib/iex/test/test_helper.exs @@ -1,6 +1,6 @@ -Application.start(:iex) -Application.put_env(:iex, :colors, [enabled: false]) -ExUnit.start [trace: "--trace" in System.argv] +:ok = Application.start(:iex) +IEx.configure([colors: [enabled: false]]) +ExUnit.start [trace: "--trace" in System.argv, assert_receive_timeout: 500] defmodule IEx.Case do use ExUnit.CaseTemplate @@ -12,7 +12,7 @@ defmodule IEx.Case do # defmodule IEx.InteractionTest do # use IEx.Case # - # test :input do + # test "input" do # assert capture_iex("1+2") == "3" # end # end @@ -27,15 +27,19 @@ defmodule IEx.Case do using do quote do import ExUnit.CaptureIO + import ExUnit.CaptureLog import unquote(__MODULE__) end end + keys = [:default_prompt, :alive_prompt, :inspect, :colors, :history_size] + @iex_env Application.get_all_env(:iex) |> Keyword.take(keys) + setup do - opts = IEx.configuration |> - Keyword.take([:default_prompt, :alive_prompt, :inspect, :colors, :history_size]) on_exit fn -> - Enum.each opts, fn {k, v} -> Application.put_env(:iex, k, v) end + env = @iex_env + Enum.each(env, fn {k, _} -> Application.delete_env(:iex, k) end) + IEx.configure(env) end :ok end @@ -61,12 +65,8 @@ defmodule IEx.Case do defp strip_iex(string) do string - |> strip_line # strip the greeting - |> String.strip - end - - defp strip_line(string) do - Regex.replace ~r/\A.+?$/ms, string, "" + |> String.split("\n", parts: 2) # trim the greeting + |> Enum.at(1) + |> String.trim end end - diff --git a/lib/logger/lib/logger.ex b/lib/logger/lib/logger.ex new file mode 100644 index 00000000000..d4a48811889 --- /dev/null +++ b/lib/logger/lib/logger.ex @@ -0,0 +1,753 @@ +defmodule Logger do + @moduledoc ~S""" + A logger for Elixir applications. + + It includes many features: + + * Provides debug, info, warn, and error levels. + + * Supports multiple backends which are automatically + supervised when plugged into `Logger`. + + * Formats and truncates messages on the client + to avoid clogging `Logger` backends. + + * Alternates between sync and async modes to remain + performant when required but also apply backpressure + when under stress. + + * Wraps OTP's `error_logger` to prevent it from + overflowing. + + Logging is useful for tracking when an event of interest happens in your + system. For example, it may be helpful to log whenever a user is deleted. + + def delete_user(user) do + Logger.info fn -> + "Deleting user from the system: #{inspect(user)}" + end + # ... + end + + The `Logger.info/2` macro emits the provided message at the `:info` + level. There are additional macros for other levels. Notice the argument + passed to `Logger.info/2` in the above example is a zero argument function. + + Although the `Logger` macros accept messages as strings as well as functions, + it's recommended to use functions whenever the message is expensive to + compute. In the example above, the message is evaluated (and thus so is the + interpolation inside it) regardless of the level, even if the message will not + be actually logged at runtime; the only way of avoiding evaluation of such + message is purging the log call at compile-time through the + `:compile_time_purge_level` option (see below), or using a function that is + evaluated to generate the message only if the message needs to be logged + according to the runtime level. + + ## Levels + + The supported levels are: + + * `:debug` - for debug-related messages + * `:info` - for information of any kind + * `:warn` - for warnings + * `:error` - for errors + + ## Configuration + + `Logger` supports a wide range of configurations. + + This configuration is split in three categories: + + * Application configuration - must be set before the `:logger` + application is started + + * Runtime configuration - can be set before the `:logger` + application is started, but may be changed during runtime + + * Error logger configuration - configuration for the + wrapper around OTP's `error_logger` + + ### Application configuration + + The following configuration must be set via config files (such as + `config/config.exs`) before the `:logger` application is started. + + * `:backends` - the backends to be used. Defaults to `[:console]`. + See the "Backends" section for more information. + + * `:compile_time_purge_level` - purges *at compilation time* all calls that + have log level lower than the value of this option. This means that + `Logger` calls with level lower than this option will be completely + removed at compile time, accruing no overhead at runtime. Defaults to + `:debug` and only applies to the `Logger.debug/2`, `Logger.info/2`, + `Logger.warn/2`, and `Logger.error/2` macros (for example, it doesn't apply to + `Logger.log/3`). Note that arguments passed to `Logger` calls that are + removed from the AST at compilation time are never evaluated, thus any + function call that occurs in these arguments is never executed. As a + consequence, avoid code that looks like `Logger.debug("Cleanup: + #{perform_cleanup()}")` as in the example `perform_cleanup/0` won't be + executed if the `:compile_time_purge_level` is `:info` or higher. + + * `:compile_time_application` - sets the `:application` metadata value + to the configured value at compilation time. This configuration is + usually only useful for build tools to automatically add the + application to the metadata for `Logger.debug/2`, `Logger.info/2`, etc. + style of calls. + + For example, to configure the `:backends` and `compile_time_purge_level` + options in a `config/config.exs` file: + + config :logger, + backends: [:console], + compile_time_purge_level: :info + + ### Runtime Configuration + + All configuration below can be set via config files (such as + `config/config.exs`) but also changed dynamically during runtime via + `Logger.configure/1`. + + * `:level` - the logging level. Attempting to log any message + with severity less than the configured level will simply + cause the message to be ignored. Keep in mind that each backend + may have its specific level, too. Note that, unlike what happens with the + `:compile_time_purge_level` option, the argument passed to `Logger` calls + is evaluated even if the level of the call is lower than + `:level`. For this reason, messages that are expensive to + compute should be wrapped in 0-arity anonymous functions that are + evaluated only when the `:label` option demands it. + + * `:utc_log` - when `true`, uses UTC in logs. By default it uses + local time (i.e., it defaults to `false`). + + * `:truncate` - the maximum message size to be logged (in bytes). Defaults + to 8192 bytes. Note this configuration is approximate. Truncated messages + will have `" (truncated)"` at the end. The atom `:infinity` can be passed + to disable this behavior. + + * `:sync_threshold` - if the `Logger` manager has more than + `:sync_threshold` messages in its queue, `Logger` will change + to *sync mode*, to apply backpressure to the clients. + `Logger` will return to *async mode* once the number of messages + in the queue is reduced to `sync_threshold * 0.75` messages. + Defaults to 20 messages. + + * `:translator_inspect_opts` - when translating OTP reports and + errors, the last message and state must be inspected in the + error reports. This configuration allow developers to change + how much and how the data should be inspected. + + For example, to configure the `:level` and `:truncate` options in a + `config/config.exs` file: + + config :logger, + level: :warn, + truncate: 4096 + + ### Error logger configuration + + The following configuration applies to `Logger`'s wrapper around + Erlang's `error_logger`. All the configurations below must be set + before the `:logger` application starts. + + * `:handle_otp_reports` - redirects OTP reports to `Logger` so + they are formatted in Elixir terms. This uninstalls Erlang's + logger that prints terms to terminal. Defaults to `true`. + + * `:handle_sasl_reports` - redirects supervisor, crash and + progress reports to `Logger` so they are formatted in Elixir + terms. Your application must guarantee `:sasl` is started before + `:logger`. This means you may see some initial reports written + in Erlang syntax until the Logger application kicks in and + uninstalls SASL's logger in favor of its own. Defaults to `false`. + + * `:discard_threshold_for_error_logger` - a value that, when + reached, triggers the error logger to discard messages. This + value must be a positive number that represents the maximum + number of messages accepted per second. Once above this + threshold, the `error_logger` enters discard mode for the + remainder of that second. Defaults to 500 messages. + + For example, to configure `Logger` to redirect all `error_logger` messages + using a `config/config.exs` file: + + config :logger, + handle_otp_reports: true, + handle_sasl_reports: true + + Furthermore, `Logger` allows messages sent by Erlang's `error_logger` + to be translated into an Elixir format via translators. Translators + can be dynamically added at any time with the `add_translator/1` + and `remove_translator/1` APIs. Check `Logger.Translator` for more + information. + + ## Backends + + `Logger` supports different backends where log messages are written to. + + The available backends by default are: + + * `:console` - logs messages to the console (enabled by default) + + Developers may also implement their own backends, an option that + is explored in more detail below. + + The initial backends are loaded via the `:backends` configuration, + which must be set before the `:logger` application is started. + + ### Console backend + + The console backend logs messages by printing them to the console. It supports + the following options: + + * `:level` - the level to be logged by this backend. + Note that messages are filtered by the general + `:level` configuration for the `:logger` application first. + + * `:format` - the format message used to print logs. + Defaults to: `"$time $metadata[$level] $levelpad$message\n"`. + It may also be a `{module, function}` tuple that is invoked + with the log level, the message, the current timestamp and + the metadata. + + * `:metadata` - the metadata to be printed by `$metadata`. + Defaults to an empty list (no metadata). + Setting `:metadata` to `:all` prints all metadata. + + * `:colors` - a keyword list of coloring options. + + * `:device` - the device to log error messages to. Defaults to + `:user` but can be changed to something else such as `:standard_error`. + + * `:max_buffer` - maximum events to buffer while waiting + for a confirmation from the IO device (default: 32). + Once the buffer is full, the backend will block until + a confirmation is received. + + In addition to the keys provided by the user via `Logger.metadata/1`, + the following extra keys are available to the `:metadata` list: + + * `:application` - the current application + + * `:module` - the current module + + * `:function` - the current function + + * `:file` - the current file + + * `:line` - the current line + + * `:pid` - the current process ID + + The supported keys in the `:colors` keyword list are: + + * `:enabled` - boolean value that allows for switching the + coloring on and off. Defaults to: `IO.ANSI.enabled?/0` + + * `:debug` - color for debug messages. Defaults to: `:cyan` + + * `:info` - color for info messages. Defaults to: `:normal` + + * `:warn` - color for warn messages. Defaults to: `:yellow` + + * `:error` - color for error messages. Defaults to: `:red` + + See the `IO.ANSI` module for a list of colors and attributes. + + Here is an example of how to configure the `:console` backend in a + `config/config.exs` file: + + config :logger, :console, + format: "\n$time $metadata[$level] $levelpad$message\n", + metadata: [:user_id] + + #### Custom Formatting + + The console backend allows you to customize the format of your log messages + with the `:format` option. + + You may set `:format` to either a string or a `{module, function}` tuple if + you wish to provide your own format function. The `{module, function}` will be + invoked with the log level, the message, the current timestamp and the + metadata. + + Here is an example of how to configure the `:console` backend in a + `config/config.exs` file: + + config :logger, :console, + format: {MyConsoleLogger, :format} + + And here is an example of how you can define `MyConsoleLogger.format/4` from the + above configuration. + + defmodule MyConsoleLogger do + def format(level, message, timestamp, metadata) do + # Custom formatting logic... + end + end + + You can read more about formatting in `Logger.Formatter`. + + ### Custom backends + + Any developer can create their own `Logger` backend. + Since `Logger` is an event manager powered by `:gen_event`, + writing a new backend is a matter of creating an event + handler, as described in the [`:gen_event`](http://erlang.org/doc/man/gen_event.html) + documentation. + + From now on, we will be using the term "event handler" to refer + to your custom backend, as we head into implementation details. + + Once the `:logger` application starts, it installs all event handlers listed under + the `:backends` configuration into the `Logger` event manager. The event + manager and all added event handlers are automatically supervised by `Logger`. + + Once initialized, the handler should be designed to handle events + in the following format: + + {level, group_leader, {Logger, message, timestamp, metadata}} | :flush + + where: + + * `level` is one of `:debug`, `:info`, `:warn`, or `:error`, as previously + described + * `group_leader` is the group leader of the process which logged the message + * `{Logger, message, timestamp, metadata}` is a tuple containing information + about the logged message: + * the first element is always the atom `Logger` + * `message` is the actual message (as chardata) + * `timestamp` is the timestamp for when the message was logged, as a + `{{year, month, day}, {hour, minute, second, millisecond}}` tuple + * `metadata` is a keyword list of metadata used when logging the message + + It is recommended that handlers ignore messages where + the group leader is in a different node than the one where + the handler is installed. For example: + + def handle_event({_level, gl, {Logger, _, _, _}}, state) + when node(gl) != node() do + {:ok, state} + end + + In the case of the event `:flush` handlers should flush any pending data. This + event is triggered by `flush/0`. + + Furthermore, backends can be configured via the + `configure_backend/2` function which requires event handlers + to handle calls of the following format: + + {:configure, options} + + where `options` is a keyword list. The result of the call is + the result returned by `configure_backend/2`. The recommended + return value for successful configuration is `:ok`. + + It is recommended that backends support at least the following + configuration options: + + * `:level` - the logging level for that backend + * `:format` - the logging format for that backend + * `:metadata` - the metadata to include in that backend + + Check the implementation for `Logger.Backends.Console`, for + examples on how to handle the recommendations in this section + and how to process the existing options. + """ + + @type backend :: :gen_event.handler + @type message :: IO.chardata | String.Chars.t + @type level :: :error | :info | :warn | :debug + @type metadata :: Keyword.t(String.Chars.t) + @levels [:error, :info, :warn, :debug] + + @metadata :logger_metadata + @compile {:inline, __metadata__: 0} + + defp __metadata__ do + Process.get(@metadata) || {true, []} + end + + @doc """ + Alters the current process metadata according the given keyword list. + + This function will merge the given keyword list into the existing metadata, + with the exception of setting a key to `nil`, which will remove that key + from the metadata. + """ + @spec metadata(metadata) :: :ok + def metadata(keyword) do + {enabled?, metadata} = __metadata__() + metadata = + Enum.reduce(keyword, metadata, fn + {key, nil}, acc -> Keyword.delete(acc, key) + {key, val}, acc -> Keyword.put(acc, key, val) + end) + Process.put(@metadata, {enabled?, metadata}) + :ok + end + + @doc """ + Reads the current process metadata. + """ + @spec metadata() :: metadata + def metadata() do + __metadata__() |> elem(1) + end + + @doc """ + Resets the current process metadata to the given keyword list. + """ + @spec reset_metadata(metadata) :: :ok + def reset_metadata(keywords \\ []) do + {enabled?, _metadata} = __metadata__() + Process.put(@metadata, {enabled?, []}) + metadata(keywords) + end + + @doc """ + Enables logging for the current process. + + Currently the only accepted PID is `self()`. + """ + @spec enable(pid) :: :ok + def enable(pid) when pid == self() do + Process.put(@metadata, {true, metadata()}) + :ok + end + + @doc """ + Disables logging for the current process. + + Currently the only accepted PID is `self()`. + """ + @spec disable(pid) :: :ok + def disable(pid) when pid == self() do + Process.put(@metadata, {false, metadata()}) + :ok + end + + @doc """ + Retrieves the `Logger` level. + + The `Logger` level can be changed via `configure/1`. + """ + @spec level() :: level + def level() do + %{level: level} = Logger.Config.__data__ + level + end + + @doc """ + Compares log levels. + + Receives two log levels and compares the `left` level + against the `right` level and returns + + * `:lt` if `left` is less than `right` + * `:eq` if `left` and `right` are equal + * `:gt` if `left` is greater than `right` + + ## Examples + + iex> Logger.compare_levels(:debug, :warn) + :lt + iex> Logger.compare_levels(:error, :info) + :gt + + """ + @spec compare_levels(level, level) :: :lt | :eq | :gt + def compare_levels(level, level), do: + :eq + def compare_levels(left, right), do: + if(level_to_number(left) > level_to_number(right), do: :gt, else: :lt) + + defp level_to_number(:debug), do: 0 + defp level_to_number(:info), do: 1 + defp level_to_number(:warn), do: 2 + defp level_to_number(:error), do: 3 + + @doc """ + Configures the logger. + + See the "Runtime Configuration" section in the `Logger` module + documentation for the available options. + """ + @valid_options [:compile_time_purge_level, :compile_time_application, :sync_threshold, :truncate, :level, :utc_log] + @spec configure(Keyword.t) :: :ok + def configure(options) do + Logger.Config.configure(Keyword.take(options, @valid_options)) + end + + @doc """ + Flushes the logger. + + This guarantees all messages sent to `Logger` prior to this call will + be processed. This is useful for testing and it should not be called + in production code. + """ + @spec flush :: :ok + def flush do + _ = :gen_event.which_handlers(:error_logger) + :gen_event.sync_notify(Logger, :flush) + end + + @doc """ + Adds a new backend. + + ## Options + + * `:flush` - when `true`, guarantees all messages currently sent + to both Logger and Erlang's `error_logger` are processed before + the backend is added + + """ + @spec add_backend(atom, Keyword.t) :: Supervisor.on_start_child + def add_backend(backend, opts \\ []) do + _ = if opts[:flush], do: flush() + case Logger.Watcher.watch(Logger, Logger.Config.translate_backend(backend), backend) do + {:ok, _} = ok -> + Logger.Config.add_backend(backend) + ok + {:error, {:already_started, _pid}} -> + {:error, :already_present} + {:error, _} = error -> + error + end + end + + @doc """ + Removes a backend. + + ## Options + + * `:flush` - when `true`, guarantees all messages currently sent + to both Logger and Erlang's `error_logger` are processed before + the backend is removed + + """ + @spec remove_backend(atom, Keyword.t) :: :ok | {:error, term} + def remove_backend(backend, opts \\ []) do + _ = if opts[:flush], do: flush() + Logger.Config.remove_backend(backend) + Logger.Watcher.unwatch(Logger, Logger.Config.translate_backend(backend)) + end + + @doc """ + Adds a new translator. + """ + @spec add_translator({module, function :: atom}) :: :ok + def add_translator({mod, fun} = translator) when is_atom(mod) and is_atom(fun) do + Logger.Config.add_translator(translator) + end + + @doc """ + Removes a translator. + """ + @spec remove_translator({module, function :: atom}) :: :ok + def remove_translator({mod, fun} = translator) when is_atom(mod) and is_atom(fun) do + Logger.Config.remove_translator(translator) + end + + @doc """ + Configures the given backend. + + The backend needs to be started and running in order to + be configured at runtime. + """ + @spec configure_backend(backend, Keyword.t) :: term + def configure_backend(backend, options) when is_list(options) do + :gen_event.call(Logger, Logger.Config.translate_backend(backend), {:configure, options}) + end + + @doc """ + Logs a message dynamically. + + Use this function only when there is a need to + explicitly avoid embedding metadata. + """ + @spec bare_log(level, message | (() -> message | {message, Keyword.t}), Keyword.t) :: + :ok | {:error, :noproc} | {:error, term} + def bare_log(level, chardata_or_fun, metadata \\ []) + when level in @levels and is_list(metadata) do + case __metadata__() do + {true, pdict} -> + %{mode: mode, truncate: truncate, + level: min_level, utc_log: utc_log?} = Logger.Config.__data__ + + if compare_levels(level, min_level) != :lt do + metadata = [pid: self()] ++ Keyword.merge(pdict, metadata) + {message, metadata} = normalize_message(chardata_or_fun, metadata) + truncated = truncate(message, truncate) + + tuple = {Logger, truncated, Logger.Utils.timestamp(utc_log?), metadata} + + try do + notify(mode, {level, Process.group_leader(), tuple}) + :ok + rescue + ArgumentError -> {:error, :noproc} + catch + :exit, reason -> {:error, reason} + end + else + :ok + end + {false, _} -> + :ok + end + end + + @doc """ + Logs a warning message. + + Returns `:ok` or an `{:error, reason}` tuple. + + ## Examples + + Logger.warn "knob turned too far to the right" + Logger.warn fn -> "expensive to calculate warning" end + Logger.warn fn -> {"expensive to calculate warning", [additional: :metadata]} end + + """ + defmacro warn(chardata_or_fun, metadata \\ []) do + maybe_log(:warn, chardata_or_fun, metadata, __CALLER__) + end + + @doc """ + Logs an info message. + + Returns `:ok` or an `{:error, reason}` tuple. + + ## Examples + + Logger.info "mission accomplished" + Logger.info fn -> "expensive to calculate info" end + Logger.info fn -> {"expensive to calculate info", [additional: :metadata]} end + + """ + defmacro info(chardata_or_fun, metadata \\ []) do + maybe_log(:info, chardata_or_fun, metadata, __CALLER__) + end + + @doc """ + Logs an error message. + + Returns `:ok` or an `{:error, reason}` tuple. + + ## Examples + + Logger.error "oops" + Logger.error fn -> "expensive to calculate error" end + Logger.error fn -> {"expensive to calculate error", [additional: :metadata]} end + + """ + defmacro error(chardata_or_fun, metadata \\ []) do + maybe_log(:error, chardata_or_fun, metadata, __CALLER__) + end + + @doc """ + Logs a debug message. + + Returns `:ok` or an `{:error, reason}` tuple. + + ## Examples + + Logger.debug "hello?" + Logger.debug fn -> "expensive to calculate debug" end + Logger.debug fn -> {"expensive to calculate debug", [additional: :metadata]} end + + """ + defmacro debug(chardata_or_fun, metadata \\ []) do + maybe_log(:debug, chardata_or_fun, metadata, __CALLER__) + end + + @doc """ + Logs a message with the given `level`. + + Returns `:ok` or an `{:error, reason}` tuple. + + The macros `debug/2`, `warn/2`, `info/2`, and `error/2` are + preferred over this macro as they can automatically eliminate + the call to `Logger` alotgether at compile time if desired + (see the documentation for the `Logger` module). + """ + defmacro log(level, chardata_or_fun, metadata \\ []) do + macro_log(level, chardata_or_fun, metadata, __CALLER__) + end + + defp macro_log(level, data, metadata, caller) do + %{module: module, function: fun, file: file, line: line} = caller + + caller = + compile_time_application() ++ + [module: module, function: form_fa(fun), file: file, line: line] + + quote do + Logger.bare_log(unquote(level), unquote(data), unquote(caller) ++ unquote(metadata)) + end + end + + defp compile_time_application do + if app = Application.get_env(:logger, :compile_time_application) do + [application: app] + else + [] + end + end + + defp maybe_log(level, data, metadata, caller) do + min_level = Application.get_env(:logger, :compile_time_purge_level, :debug) + if compare_levels(level, min_level) != :lt do + macro_log(level, data, metadata, caller) + else + handle_unused_variable_warnings(data, caller) + end + end + + defp normalize_message(fun, metadata) when is_function(fun, 0), + do: normalize_message(fun.(), metadata) + defp normalize_message({message, fun_metadata}, metadata) when is_list(fun_metadata), + do: {message, Keyword.merge(metadata, fun_metadata)} + defp normalize_message(message, metadata), + do: {message, metadata} + + defp truncate(data, n) when is_list(data) or is_binary(data), + do: Logger.Utils.truncate(data, n) + defp truncate(data, n), + do: Logger.Utils.truncate(to_string(data), n) + + defp form_fa({name, arity}) do + Atom.to_string(name) <> "/" <> Integer.to_string(arity) + end + + defp form_fa(nil), do: nil + + defp notify(:sync, msg), do: :gen_event.sync_notify(Logger, msg) + defp notify(:async, msg), do: :gen_event.notify(Logger, msg) + + defp handle_unused_variable_warnings(data, caller) do + # We collect all the names of variables (leaving `data` unchanged) with a + # scope of `nil` (as we don't warn for variables with a different scope + # anyways). We only want the variables that figure in `caller.vars`, as the + # AST for calls to local 0-arity functions without parens is the same as the + # AST for variables. + {^data, logged_vars} = Macro.postwalk(data, [], fn + {name, _meta, nil} = var, acc when is_atom(name) -> + if {name, nil} in caller.vars, do: {var, [name | acc]}, else: {var, acc} + ast, acc -> + {ast, acc} + end) + + assignments = + logged_vars + |> Enum.reverse() + |> Enum.uniq() + |> Enum.map("e(do: _ = unquote(Macro.var(&1, nil)))) + + quote do + unquote_splicing(assignments) + :ok + end + end +end diff --git a/lib/logger/lib/logger/app.ex b/lib/logger/lib/logger/app.ex new file mode 100644 index 00000000000..cd618db31c8 --- /dev/null +++ b/lib/logger/lib/logger/app.ex @@ -0,0 +1,84 @@ +defmodule Logger.App do + @moduledoc false + + use Application + + @doc false + def start(_type, _args) do + import Supervisor.Spec + + otp_reports? = Application.get_env(:logger, :handle_otp_reports) + sasl_reports? = Application.get_env(:logger, :handle_sasl_reports) + threshold = Application.get_env(:logger, :discard_threshold_for_error_logger) + + options = [strategy: :rest_for_one, name: Logger.Supervisor] + children = [worker(:gen_event, [{:local, Logger}]), + worker(Logger.Watcher, [Logger, Logger.Config, []], + [id: Logger.Config, function: :watcher]), + supervisor(Logger.Watcher, [Logger.Config, :handlers, []]), + worker(Logger.Watcher, + [:error_logger, Logger.ErrorHandler, + {otp_reports?, sasl_reports?, threshold}], + [id: Logger.ErrorHandler, function: :watcher])] + + config = Logger.Config.new() + + case Supervisor.start_link(children, options) do + {:ok, sup} -> + handlers = [error_logger_tty_h: otp_reports?, + sasl_report_tty_h: sasl_reports?] + delete_handlers(handlers) + {:ok, sup, config} + {:error, _} = error -> + Logger.Config.delete(config) + error + end + end + + @doc false + def start do + Application.start(:logger) + end + + @doc false + def stop(config) do + Logger.Config.deleted_handlers() + |> add_handlers() + Logger.Config.delete(config) + end + + @doc false + def config_change(_changed, _new, _removed) do + Logger.Config.configure([]) + end + + @doc """ + Stops the application without sending messages to error logger. + """ + def stop() do + try do + Logger.Config.deleted_handlers([]) + catch + :exit, {:noproc, _} -> + {:error, {:not_started, :logger}} + else + deleted_handlers -> + result = Application.stop(:logger) + add_handlers(deleted_handlers) + result + end + end + + defp delete_handlers(handlers) do + to_delete = + for {handler, delete?} <- handlers, + delete? && :error_logger.delete_report_handler(handler) != {:error, :module_not_found}, + do: handler + [] = Logger.Config.deleted_handlers(to_delete) + :ok + end + + defp add_handlers(handlers) do + Enum.each(handlers, &:error_logger.add_report_handler/1) + end +end diff --git a/lib/logger/lib/logger/backends/console.ex b/lib/logger/lib/logger/backends/console.ex new file mode 100644 index 00000000000..0f985992b2e --- /dev/null +++ b/lib/logger/lib/logger/backends/console.ex @@ -0,0 +1,230 @@ +defmodule Logger.Backends.Console do + @moduledoc false + + @behaviour :gen_event + + defstruct [format: nil, metadata: nil, level: nil, colors: nil, device: nil, + max_buffer: nil, buffer_size: 0, buffer: [], ref: nil, output: nil] + + def init(:console) do + config = Application.get_env(:logger, :console) + device = Keyword.get(config, :device, :user) + + if Process.whereis(device) do + {:ok, init(config, %__MODULE__{})} + else + {:error, :ignore} + end + end + + def init({__MODULE__, opts}) when is_list(opts) do + config = configure_merge(Application.get_env(:logger, :console), opts) + {:ok, init(config, %__MODULE__{})} + end + + def handle_call({:configure, options}, state) do + {:ok, :ok, configure(options, state)} + end + + def handle_event({_level, gl, _event}, state) when node(gl) != node() do + {:ok, state} + end + + def handle_event({level, _gl, {Logger, msg, ts, md}}, state) do + %{level: log_level, ref: ref, buffer_size: buffer_size, + max_buffer: max_buffer} = state + cond do + not meet_level?(level, log_level) -> + {:ok, state} + is_nil(ref) -> + {:ok, log_event(level, msg, ts, md, state)} + buffer_size < max_buffer -> + {:ok, buffer_event(level, msg, ts, md, state)} + buffer_size === max_buffer -> + state = buffer_event(level, msg, ts, md, state) + {:ok, await_io(state)} + end + end + + def handle_event(:flush, state) do + {:ok, flush(state)} + end + + def handle_event(_, state) do + {:ok, state} + end + + def handle_info({:io_reply, ref, msg}, %{ref: ref} = state) do + {:ok, handle_io_reply(msg, state)} + end + + def handle_info({:DOWN, ref, _, pid, reason}, %{ref: ref}) do + raise "device #{inspect pid} exited: " <> Exception.format_exit(reason) + end + + def handle_info(_, state) do + {:ok, state} + end + + def code_change(_old_vsn, state, _extra) do + {:ok, state} + end + + def terminate(_reason, _state) do + :ok + end + + ## Helpers + + defp meet_level?(_lvl, nil), do: true + + defp meet_level?(lvl, min) do + Logger.compare_levels(lvl, min) != :lt + end + + defp configure(options, state) do + config = configure_merge(Application.get_env(:logger, :console), options) + Application.put_env(:logger, :console, config) + init(config, state) + end + + defp init(config, state) do + level = Keyword.get(config, :level) + device = Keyword.get(config, :device, :user) + format = Logger.Formatter.compile Keyword.get(config, :format) + colors = configure_colors(config) + metadata = Keyword.get(config, :metadata, []) |> configure_metadata() + max_buffer = Keyword.get(config, :max_buffer, 32) + + %{state | format: format, metadata: metadata, + level: level, colors: colors, device: device, max_buffer: max_buffer} + end + + defp configure_metadata(:all), do: :all + defp configure_metadata(metadata), do: Enum.reverse(metadata) + + defp configure_merge(env, options) do + Keyword.merge(env, options, fn + :colors, v1, v2 -> Keyword.merge(v1, v2) + _, _v1, v2 -> v2 + end) + end + + defp configure_colors(config) do + colors = Keyword.get(config, :colors, []) + %{debug: Keyword.get(colors, :debug, :cyan), + info: Keyword.get(colors, :info, :normal), + warn: Keyword.get(colors, :warn, :yellow), + error: Keyword.get(colors, :error, :red), + enabled: Keyword.get(colors, :enabled, IO.ANSI.enabled?)} + end + + defp log_event(level, msg, ts, md, %{device: device} = state) do + output = format_event(level, msg, ts, md, state) + %{state | ref: async_io(device, output), output: output} + end + + defp buffer_event(level, msg, ts, md, state) do + %{buffer: buffer, buffer_size: buffer_size} = state + buffer = [buffer | format_event(level, msg, ts, md, state)] + %{state | buffer: buffer, buffer_size: buffer_size + 1} + end + + defp async_io(name, output) when is_atom(name) do + case Process.whereis(name) do + device when is_pid(device) -> + async_io(device, output) + nil -> + raise "no device registered with the name #{inspect name}" + end + end + + defp async_io(device, output) when is_pid(device) do + ref = Process.monitor(device) + send(device, {:io_request, self(), ref, {:put_chars, :unicode, output}}) + ref + end + + defp await_io(%{ref: nil} = state), do: state + + defp await_io(%{ref: ref} = state) do + receive do + {:io_reply, ^ref, :ok} -> + handle_io_reply(:ok, state) + {:io_reply, ^ref, error} -> + handle_io_reply(error, state) + |> await_io() + {:DOWN, ^ref, _, pid, reason} -> + raise "device #{inspect pid} exited: " <> Exception.format_exit(reason) + end + end + + defp format_event(level, msg, ts, md, state) do + %{format: format, metadata: keys, colors: colors} = state + format + |> Logger.Formatter.format(level, msg, ts, take_metadata(md, keys)) + |> color_event(level, colors, md) + end + + defp take_metadata(metadata, :all), do: metadata + defp take_metadata(metadata, keys) do + Enum.reduce keys, [], fn key, acc -> + case Keyword.fetch(metadata, key) do + {:ok, val} -> [{key, val} | acc] + :error -> acc + end + end + end + + defp color_event(data, _level, %{enabled: false}, _md), do: data + + defp color_event(data, level, %{enabled: true} = colors, md) do + color = md[:ansi_color] || Map.fetch!(colors, level) + [IO.ANSI.format_fragment(color, true), data | IO.ANSI.reset] + end + + defp log_buffer(%{buffer_size: 0, buffer: []} = state), do: state + + defp log_buffer(state) do + %{device: device, buffer: buffer} = state + %{state | ref: async_io(device, buffer), buffer: [], buffer_size: 0, + output: buffer} + end + + defp handle_io_reply(:ok, %{ref: ref} = state) do + Process.demonitor(ref, [:flush]) + log_buffer(%{state | ref: nil, output: nil}) + end + + defp handle_io_reply({:error, {:put_chars, :unicode, _} = error}, state) do + retry_log(error, state) + end + + defp handle_io_reply({:error, :put_chars}, %{output: output} = state) do + retry_log({:put_chars, :unicode, output}, state) + end + + defp handle_io_reply({:error, error}, _) do + raise "failure while logging console messages: " <> inspect(error) + end + + defp retry_log(error, %{device: device, ref: ref, output: dirty} = state) do + Process.demonitor(ref, [:flush]) + case :unicode.characters_to_binary(dirty) do + {_, good, bad} -> + clean = [good | Logger.Formatter.prune(bad)] + %{state | ref: async_io(device, clean), output: clean} + _ -> + # A well behaved IO device should not error on good data + raise "failure while logging consoles messages: " <> inspect(error) + end + end + + defp flush(%{ref: nil} = state), do: state + + defp flush(state) do + state + |> await_io() + |> flush() + end +end diff --git a/lib/logger/lib/logger/config.ex b/lib/logger/lib/logger/config.ex new file mode 100644 index 00000000000..030522bd151 --- /dev/null +++ b/lib/logger/lib/logger/config.ex @@ -0,0 +1,210 @@ +defmodule Logger.Config do + @moduledoc false + + @behaviour :gen_event + + @name __MODULE__ + @table __MODULE__ + @data :__data__ + @deleted_handlers :__deleted_handlers__ + + def start_link do + GenServer.start_link(__MODULE__, :ok, name: @name) + end + + def configure(options) do + :gen_event.call(Logger, @name, {:configure, options}) + end + + def add_translator(translator) do + :gen_event.call(Logger, @name, {:add_translator, translator}) + end + + def remove_translator(translator) do + :gen_event.call(Logger, @name, {:remove_translator, translator}) + end + + def handlers() do + for backend <- backends() do + {Logger, translate_backend(backend), backend} + end + end + + def backends() do + :gen_event.call(Logger, @name, :backends) + end + + def add_backend(backend) do + :gen_event.call(Logger, @name, {:add_backend, backend}) + end + + def remove_backend(backend) do + :gen_event.call(Logger, @name, {:remove_backend, backend}) + end + + def translate_backend(:console), do: Logger.Backends.Console + def translate_backend(other), do: other + + def __data__() do + try do + :ets.lookup_element(@table, @data, 2) + rescue + ArgumentError -> + raise "cannot use Logger, the :logger application is not running" + else + nil -> + raise "cannot use Logger, the :logger application is not running" + data -> + data + end + end + + def deleted_handlers() do + try do + :ets.lookup_element(@table, @deleted_handlers, 2) + rescue + ArgumentError -> + [] + end + end + + def deleted_handlers(handlers) do + :gen_event.call(Logger, @name, {:deleted_handlers, handlers}) + end + + def new() do + tab = :ets.new(@table, [:named_table, :public, {:read_concurrency, true}]) + true = :ets.insert_new(@table, [{@data, nil}, {@deleted_handlers, []}]) + tab + end + + def delete(@table) do + :ets.delete(@table) + end + + ## Callbacks + + def init(_) do + # Use previous data if available in case this handler crashed. + state = :ets.lookup_element(@table, @data, 2) || compute_state(:async) + {:ok, state} + end + + def handle_event({_type, gl, _msg} = event, state) when node(gl) != node() do + # Cross node messages are always async which also + # means this handler won't crash in case Logger + # is not installed in the other node. + :gen_event.notify({Logger, node(gl)}, event) + {:ok, state} + end + + def handle_event(_event, %{mode: mode} = state) do + case compute_mode(state) do + ^mode -> + {:ok, state} + new_mode -> + {:ok, persist(%{state | mode: new_mode})} + end + end + + def handle_call(:backends, state) do + {:ok, Application.get_env(:logger, :backends), state} + end + + def handle_call({:configure, options}, state) do + Enum.each options, fn {key, value} -> + Application.put_env(:logger, key, value) + end + {:ok, :ok, compute_state(state.mode)} + end + + def handle_call({:add_translator, translator}, state) do + state = update_translators(state, fn t -> [translator | List.delete(t, translator)] end) + {:ok, :ok, state} + end + + def handle_call({:remove_translator, translator}, state) do + state = update_translators(state, &List.delete(&1, translator)) + {:ok, :ok, state} + end + + def handle_call({:add_backend, backend}, state) do + update_backends(&[backend | List.delete(&1, backend)]) + {:ok, :ok, state} + end + + def handle_call({:remove_backend, backend}, state) do + update_backends(&List.delete(&1, backend)) + {:ok, :ok, state} + end + + def handle_call({:deleted_handlers, new}, state) do + old = deleted_handlers() + true = :ets.update_element(@table, @deleted_handlers, {2, new}) + {:ok, old, state} + end + + def handle_info(_msg, state) do + {:ok, state} + end + + def terminate(_reason, _state) do + :ok + end + + def code_change(_old, state, _extra) do + {:ok, state} + end + + ## Helpers + + defp compute_mode(state) do + {:message_queue_len, len} = Process.info(self(), :message_queue_len) + + cond do + len > state.sync_threshold and state.mode == :async -> + :sync + len < state.async_threshold and state.mode == :sync -> + :async + true -> + state.mode + end + end + + defp update_backends(fun) do + backends = fun.(Application.get_env(:logger, :backends, [])) + Application.put_env(:logger, :backends, backends) + end + + defp update_translators(%{translators: translators} = state, fun) do + translators = fun.(translators) + Application.put_env(:logger, :translators, translators) + persist %{state | translators: translators} + end + + defp compute_state(mode) do + level = Application.get_env(:logger, :level) + utc_log = Application.get_env(:logger, :utc_log) + truncate = Application.get_env(:logger, :truncate) + translators = Application.get_env(:logger, :translators) + + sync_threshold = Application.get_env(:logger, :sync_threshold) + async_threshold = trunc(sync_threshold * 0.75) + + state = %{level: level, mode: mode, truncate: truncate, + utc_log: utc_log, sync_threshold: sync_threshold, + async_threshold: async_threshold, translators: translators} + + case compute_mode(state) do + ^mode -> + persist(state) + new_mode -> + persist(%{state | mode: new_mode}) + end + end + + defp persist(state) do + :ets.update_element(@table, @data, {2, state}) + state + end +end diff --git a/lib/logger/lib/logger/error_handler.ex b/lib/logger/lib/logger/error_handler.ex new file mode 100644 index 00000000000..28dd5cec465 --- /dev/null +++ b/lib/logger/lib/logger/error_handler.ex @@ -0,0 +1,151 @@ +defmodule Logger.ErrorHandler do + @moduledoc false + + @behaviour :gen_event + + require Logger + + def init({otp?, sasl?, threshold}) do + # We store the Logger PID in the state because when we are shutting + # down the Logger application, the Logger process may be terminated + # and then trying to reach it will lead to crashes. So we send a + # message to a PID, instead of named process, to avoid crashes on + # send since this handler will be removed soon by the supervisor. + {:ok, %{otp: otp?, sasl: sasl?, threshold: threshold, + logger: Process.whereis(Logger), last_length: 0, + last_time: :os.timestamp, dropped: 0}} + end + + ## Handle event + + def handle_event({_type, gl, _msg}, state) when node(gl) != node() do + {:ok, state} + end + + def handle_event(event, state) do + state = check_threshold(state) + log_event(event, state) + {:ok, state} + end + + def handle_call(request, _state) do + exit {:bad_call, request} + end + + def handle_info(_msg, state) do + {:ok, state} + end + + def code_change(_old_vsn, state, _extra) do + {:ok, state} + end + + def terminate(_reason, _state) do + :ok + end + + ## Helpers + + defp log_event({:error, _gl, {pid, format, data}}, %{otp: true} = state), + do: log_event(:error, :format, pid, {format, data}, state) + defp log_event({:error_report, _gl, {pid, :std_error, format}}, %{otp: true} = state), + do: log_event(:error, :report, pid, {:std_error, format}, state) + defp log_event({:error_report, _gl, {pid, :supervisor_report, data}}, %{sasl: true} = state), + do: log_event(:error, :report, pid, {:supervisor_report, data}, state) + defp log_event({:error_report, _gl, {pid, :crash_report, data}}, %{sasl: true} = state), + do: log_event(:error, :report, pid, {:crash_report, data}, state) + + defp log_event({:warning_msg, _gl, {pid, format, data}}, %{otp: true} = state), + do: log_event(:warn, :format, pid, {format, data}, state) + defp log_event({:warning_report, _gl, {pid, :std_warning, format}}, %{otp: true} = state), + do: log_event(:warn, :report, pid, {:std_warning, format}, state) + + defp log_event({:info_msg, _gl, {pid, format, data}}, %{otp: true} = state), + do: log_event(:info, :format, pid, {format, data}, state) + defp log_event({:info_report, _gl, {pid, :std_info, format}}, %{otp: true} = state), + do: log_event(:info, :report, pid, {:std_info, format}, state) + defp log_event({:info_report, _gl, {pid, :progress, data}}, %{sasl: true} = state), + do: log_event(:info, :report, pid, {:progress, data}, state) + + defp log_event(_, _state), + do: :ok + + defp log_event(level, kind, pid, {type, _} = data, state) do + %{level: min_level, truncate: truncate, + utc_log: utc_log?, translators: translators} = Logger.Config.__data__ + + with log when log != :lt <- Logger.compare_levels(level, min_level), + {:ok, message} <- translate(translators, min_level, level, kind, data, truncate) do + message = Logger.Utils.truncate(message, truncate) + + # Mode is always async to avoid clogging the error_logger + meta = [pid: ensure_pid(pid), error_logger: ensure_type(type)] + :gen_event.notify(state.logger, + {level, Process.group_leader(), + {Logger, message, Logger.Utils.timestamp(utc_log?), meta}}) + end + + :ok + end + + defp ensure_type(type) when is_atom(type), do: type + defp ensure_type(_), do: :format + + defp ensure_pid(pid) when is_pid(pid), do: pid + defp ensure_pid(_), do: self() + + defp check_threshold(%{last_time: last_time, last_length: last_length, + dropped: dropped, threshold: threshold} = state) do + {m, s, _} = current_time = :os.timestamp + current_length = message_queue_length() + + cond do + match?({^m, ^s, _}, last_time) and current_length - last_length > threshold -> + count = drop_messages(current_time, 0) + %{state | dropped: dropped + count, last_length: message_queue_length()} + match?({^m, ^s, _}, last_time) -> + state + true -> + _ = if dropped > 0 do + Logger.warn "Logger dropped #{dropped} OTP/SASL messages as it " <> + "exceeded the amount of #{threshold} messages/second" + end + %{state | dropped: 0, last_time: current_time, last_length: current_length} + end + end + + defp message_queue_length() do + {:message_queue_len, len} = Process.info(self(), :message_queue_len) + len + end + + defp drop_messages({m, s, _} = last_time, count) do + case :os.timestamp do + {^m, ^s, _} -> + receive do + {:notify, _event} -> drop_messages(last_time, count + 1) + after + 0 -> count + end + _ -> + count + end + end + + defp translate([{mod, fun} | t], min_level, level, kind, data, truncate) do + case apply(mod, fun, [min_level, level, kind, data]) do + {:ok, chardata} -> {:ok, chardata} + :skip -> :skip + :none -> translate(t, min_level, level, kind, data, truncate) + end + end + + defp translate([], _min_level, _level, :format, {format, args}, truncate) do + {format, args} = Logger.Utils.inspect(format, args, truncate) + {:ok, :io_lib.format(format, args)} + end + + defp translate([], _min_level, _level, :report, {_type, data}, _truncate) do + {:ok, Kernel.inspect(data)} + end +end diff --git a/lib/logger/lib/logger/formatter.ex b/lib/logger/lib/logger/formatter.ex new file mode 100644 index 00000000000..9ad560f4337 --- /dev/null +++ b/lib/logger/lib/logger/formatter.ex @@ -0,0 +1,158 @@ +import Kernel, except: [inspect: 2] + +defmodule Logger.Formatter do + @moduledoc ~S""" + Conveniences for formatting data for logs. + + This module allows developers to specify a string that + serves as template for log messages, for example: + + $time $metadata[$level] $message\n + + Will print error messages as: + + 18:43:12.439 user_id=13 [error] Hello\n + + The valid parameters you can use are: + + * `$time` - time the log message was sent + * `$date` - date the log message was sent + * `$message` - the log message + * `$level` - the log level + * `$node` - the node that prints the message + * `$metadata` - user controlled data presented in `"key=val key2=val2"` format + * `$levelpad` - sets to a single space if level is 4 characters long, + otherwise set to the empty space. Used to align the message after level. + + Backends typically allow developers to supply such control + strings via configuration files. This module provides `compile/1`, + which compiles the string into a format for fast operations at + runtime and `format/5` to format the compiled pattern into an + actual IO data. + + ## Metadata + + Metadata to be sent to the logger can be read and written with + the `Logger.metadata/0` and `Logger.metadata/1` functions. For example, + you can set `Logger.metadata([user_id: 13])` to add user_id metadata + to the current process. The user can configure the backend to chose + which metadata it wants to print and it will replace the `$metadata` + value. + """ + + @type time :: {{1970..10000, 1..12, 1..31}, {0..23, 0..59, 0..59, 0..999}} + @type pattern :: :date | :level | :levelpad | :message | :metadata | :node | :time + @valid_patterns [:time, :date, :message, :level, :node, :metadata, :levelpad] + @default_pattern "\n$time $metadata[$level] $levelpad$message\n" + @replacement "�" + + @doc """ + Prunes non-valid UTF-8 codepoints. + + Typically called after formatting when the data cannot be printed. + """ + @spec prune(IO.chardata) :: IO.chardata + def prune(binary) when is_binary(binary), do: prune_binary(binary, "") + def prune([h | t]) when h in 0..1114111, do: [h | prune(t)] + def prune([h | t]), do: [prune(h) | prune(t)] + def prune([]), do: [] + def prune(_), do: @replacement + + defp prune_binary(<>, acc), + do: prune_binary(t, <>) + defp prune_binary(<<_, t::binary>>, acc), + do: prune_binary(t, <>) + defp prune_binary(<<>>, acc), + do: acc + + @doc ~S""" + Compiles a format string into a data structure that the `format/5` can handle. + + Check the module doc for documentation on the valid parameters. If you + pass `nil`, it defaults to: `$time $metadata [$level] $levelpad$message\n` + + If you would like to make your own custom formatter simply pass + `{module, function}` to `compile/1` and the rest is handled. + + iex> Logger.Formatter.compile("$time $metadata [$level] $message\n") + [:time, " ", :metadata, " [", :level, "] ", :message, "\n"] + """ + @spec compile(binary | nil) :: [pattern | binary] + @spec compile({atom, atom}) :: {atom, atom} + + def compile(nil), do: compile(@default_pattern) + def compile({mod, fun}) when is_atom(mod) and is_atom(fun), do: {mod, fun} + + def compile(str) do + regex = Regex.recompile!(~r/(?)\$[a-z]+(?)/) + + for part <- Regex.split(regex, str, on: [:head, :tail], trim: true) do + case part do + "$" <> code -> compile_code(String.to_atom(code)) + _ -> part + end + end + end + + defp compile_code(key) when key in @valid_patterns, do: key + defp compile_code(key) when is_atom(key) do + raise(ArgumentError, message: "$#{key} is an invalid format pattern.") + end + + @doc """ + Takes a compiled format and injects the, level, timestamp, message and + metadata listdict and returns a properly formatted string. + """ + + @spec format({atom, atom} | [pattern | binary], Logger.level, Logger.message, time, Keyword.t) :: + IO.chardata + def format({mod, fun}, level, msg, ts, md) do + apply(mod, fun, [level, msg, ts, md]) + end + + def format(config, level, msg, ts, md) do + for c <- config do + output(c, level, msg, ts, md) + end + end + + defp output(:message, _, msg, _, _), do: msg + defp output(:date, _, _, {date, _time}, _), do: Logger.Utils.format_date(date) + defp output(:time, _, _, {_date, time}, _), do: Logger.Utils.format_time(time) + defp output(:level, level, _, _, _), do: Atom.to_string(level) + defp output(:node, _, _, _, _), do: Atom.to_string(node()) + + defp output(:metadata, _, _, _, []), do: "" + defp output(:metadata, _, _, _, meta) do + Enum.map(meta, fn {key, val} -> + [to_string(key), ?=, metadata(val), ?\s] + end) + end + + defp output(:levelpad, level, _, _, _) do + levelpad(level) + end + + defp output(other, _, _, _, _), do: other + + defp levelpad(:debug), do: "" + defp levelpad(:info), do: " " + defp levelpad(:warn), do: " " + defp levelpad(:error), do: "" + + defp metadata(pid) when is_pid(pid) do + :erlang.pid_to_list(pid) + end + defp metadata(ref) when is_reference(ref) do + '#Ref' ++ rest = :erlang.ref_to_list(ref) + rest + end + defp metadata(atom) when is_atom(atom) do + case Atom.to_string(atom) do + "Elixir." <> rest -> rest + "nil" -> "" + binary -> binary + end + end + defp metadata(other), do: to_string(other) +end diff --git a/lib/logger/lib/logger/translator.ex b/lib/logger/lib/logger/translator.ex new file mode 100644 index 00000000000..7b3eb449403 --- /dev/null +++ b/lib/logger/lib/logger/translator.ex @@ -0,0 +1,403 @@ +defmodule Logger.Translator do + @moduledoc """ + Default translation for Erlang log messages. + + Logger allows developers to rewrite log messages provided by + Erlang applications into a format more compatible with Elixir + log messages by providing a translator. + + A translator is simply a tuple containing a module and a function + that can be added and removed via the `Logger.add_translator/1` and + `Logger.remove_translator/1` functions and is invoked for every Erlang + message above the minimum log level with four arguments: + + * `min_level` - the current Logger level + * `level` - the level of the message being translated + * `kind` - if the message is a report or a format + * `message` - the message to format. If it is a report, it is a tuple + with `{report_type, report_data}`, if it is a format, it is a + tuple with `{format_message, format_args}` + + The function must return: + + * `{:ok, chardata}` - if the message was translated with its translation + * `:skip` - if the message is not meant to be translated nor logged + * `:none` - if there is no translation, which triggers the next translator + + See the function `translate/4` in this module for an example implementation + and the default messages translated by Logger. + """ + + # The name_or_id checks are required to support old OTP projects. + + def translate(min_level, level, kind, message) + + def translate(min_level, :error, :format, message) do + opts = Application.get_env(:logger, :translator_inspect_opts) + + case message do + {'** Generic server ' ++ _, [name, last, state, reason | client]} -> + msg = ["GenServer #{inspect name} terminating", format_stop(reason), + "\nLast message#{format_from(client)}: #{inspect last, opts}"] + if min_level == :debug do + {:ok, [msg, "\nState: #{inspect state, opts}" | + format_client(client)]} + else + {:ok, msg} + end + + {'** gen_event handler ' ++ _, [name, manager, last, state, reason]} -> + msg = ["GenEvent handler #{inspect name} installed in #{inspect manager} terminating", + format_stop(reason), "\nLast message: #{inspect last, opts}"] + if min_level == :debug do + {:ok, [msg | "\nState: #{inspect state, opts}"]} + else + {:ok, msg} + end + + {'** Task ' ++ _, [name, starter, function, args, reason]} -> + msg = ["Task #{inspect name} started from #{inspect starter} terminating", + format_stop(reason), + "\nFunction: #{inspect function, opts}" | + "\n Args: #{inspect args, opts}"] + {:ok, msg} + + {'Error in process ' ++ _, [pid, {reason, stack}]} -> + msg = ["Process ", inspect(pid), " raised an exception" | + format(:error, reason, stack)] + {:ok, msg} + + _ -> + :none + end + end + + def translate(_min_level, :info, :report, + {:std_info, [application: app, exited: reason, type: _type]}) do + {:ok, "Application #{app} exited: #{Application.format_error(reason)}"} + end + + def translate(min_level, :error, :report, {:supervisor_report, data}) do + translate_supervisor(min_level, data) + end + + def translate(min_level, :error, :report, {:crash_report, data}) do + translate_crash(min_level, data) + end + + def translate(min_level, :info, :report, {:progress, data}) do + translate_progress(min_level, data) + end + + def translate(_min_level, _level, _kind, _message) do + :none + end + + defp translate_supervisor(min_level, + [supervisor: sup, errorContext: context, + reason: reason, + offender: [{:pid, pid}, {name_or_id, name} | offender]]) + when is_pid(pid) and context !== :shutdown and name_or_id in [:name, :id] do + {:ok, ["Child ", inspect(name), " of Supervisor ", + sup_name(sup), ?\s, sup_context(context), + "\n** (exit) ", offender_reason(reason, context), + "\nPid: ", inspect(pid) | + child_info(min_level, offender)]} + end + + defp translate_supervisor(min_level, + [supervisor: sup, errorContext: context, + reason: reason, + offender: [{:pid, _pid}, + {name_or_id, name} | offender]]) when name_or_id in [:name, :id] do + {:ok, ["Child ", inspect(name), " of Supervisor ", + sup_name(sup), ?\s, sup_context(context), + "\n** (exit) ", offender_reason(reason, context) | + child_info(min_level, offender)]} + end + + defp translate_supervisor(min_level, + [supervisor: sup, errorContext: context, + reason: reason, + offender: [{:pid, pid} | offender]]) do + {:ok, ["Child of Supervisor ", + sup_name(sup), ?\s, sup_context(context), + "\n** (exit) ", offender_reason(reason, context), + "\nPid: ", inspect(pid) | + child_info(min_level, offender)]} + end + + defp translate_supervisor(min_level, + [supervisor: sup, errorContext: context, + reason: reason, + offender: [{:nb_children, n}, + {name_or_id, name} | offender]]) when name_or_id in [:name, :id] do + {:ok, ["Children ", inspect(name), " of Supervisor ", + sup_name(sup), ?\s, sup_context(context), + "\n** (exit) ", offender_reason(reason, context), + "\nNumber: ", inspect(n) | + child_info(min_level, offender)]} + end + + defp translate_supervisor(_min_level, _other), do: :none + + defp translate_progress(_min_level, + [application: app, started_at: node_name]) do + {:ok, ["Application ", to_string(app), " started at " | inspect(node_name)]} + end + + defp translate_progress(min_level, + [supervisor: sup, + started: [{:pid, pid}, {name_or_id, name} | started]]) when name_or_id in [:name, :id] do + {:ok, ["Child ", inspect(name), " of Supervisor ", + sup_name(sup), " started", + "\nPid: ", inspect(pid) | + child_info(min_level, started)]} + end + + defp translate_progress(min_level, + [supervisor: sup, + started: [{:pid, pid} | started]]) do + {:ok, ["Child of Supervisor ", sup_name(sup), " started", + "\nPid: ", inspect(pid) | + child_info(min_level, started)]} + end + + defp translate_progress(_min_level, _other), do: :none + + defp sup_name({:local, name}), do: inspect(name) + defp sup_name({:global, name}), do: inspect(name) + defp sup_name({:via, _mod, name}), do: inspect(name) + defp sup_name({pid, mod}), do: [inspect(pid), " (", inspect(mod), ?)] + + defp sup_context(:start_error), do: "failed to start" + defp sup_context(:child_terminated), do: "terminated" + defp sup_context(:shutdown), do: "caused shutdown" + defp sup_context(:shutdown_error), do: "shutdown abnormally" + + defp child_info(min_level, [{:mfargs, {mod, fun, args}} | debug]) do + ["\nStart Call: ", format_mfa(mod, fun, args) | + child_debug(min_level, debug)] + end + + defp child_info(min_level, [{:mfa, {mod, fun, args}} | debug]) do + ["\nStart Call: ", format_mfa(mod, fun, args) | + child_debug(min_level, debug)] + end + + defp child_info(min_level, [{:mod, mod} | debug]) do + ["\nStart Module: ", inspect(mod) | + child_debug(min_level, debug)] + end + + defp child_debug(:debug, + [restart_type: restart, shutdown: shutdown, child_type: type]) do + ["\nRestart: ", inspect(restart), + "\nShutdown: ", inspect(shutdown), + "\nType: ", inspect(type)] + end + + defp child_debug(_min_level, _child) do + [] + end + + # If start call raises reason will be of form {:EXIT, reason} + defp offender_reason({:EXIT, reason}, :start_error) do + Exception.format_exit(reason) + end + + defp offender_reason(reason, _context) do + Exception.format_exit(reason) + end + + defp translate_crash(min_level, + [[{:initial_call, _} = initial_call, + {:pid, pid}, + {:registered_name, name}, + {:error_info, {kind, exception, stack}} | crashed], + linked]) do + {:ok, ["Process ", crash_name(pid, name), " terminating", + format(kind, exception, stack), + crash_info(min_level, [initial_call | crashed]) | + crash_linked(min_level, linked)]} + end + + defp translate_crash(min_level, + [[{:pid, pid}, + {:registered_name, name}, + {:error_info, {kind, exception, stack}} | crashed], + linked]) do + {:ok, ["Process ", crash_name(pid, name), " terminating", + format(kind, exception, stack), + crash_info(min_level, crashed), + crash_linked(min_level, linked)]} + end + + defp crash_name(pid, []), do: inspect(pid) + defp crash_name(pid, name), do: [inspect(name), " (", inspect(pid), ?)] + + defp crash_info(min_level, info, prefix \\ [?\n]) + + defp crash_info(min_level, + [{:initial_call, {mod, fun, args}} | info], prefix) do + [prefix, "Initial Call: ", crash_call(mod, fun, args) | + crash_info(min_level, info, prefix)] + end + + defp crash_info(min_level, + [{:current_function, {mod, fun, args}} | info], prefix) do + [prefix, "Current Call: ", crash_call(mod, fun, args) | + crash_info(min_level, info, prefix)] + end + + defp crash_info(min_level, [{:current_function, []} | info], prefix) do + crash_info(min_level, info, prefix) + end + + defp crash_info(min_level, + [{:ancestors, ancestors} | debug], prefix) do + [prefix, "Ancestors: ", inspect(ancestors) | + crash_debug(min_level, debug, prefix)] + end + + defp crash_call(mod, fun, arity) when is_integer(arity) do + format_mfa(mod, fun, arity) + end + + defp crash_call(mod, fun, args) do + format_mfa(mod, fun, length(args)) + end + + defp crash_debug(:debug, + [messages: msgs, links: links, dictionary: dict, + trap_exit: trap, status: status, heap_size: heap_size, + stack_size: stack_size, reductions: reductions], prefix) do + [prefix, "Messages: ", inspect(msgs), + prefix, "Links: ", inspect(links), + prefix, "Dictionary: ", inspect(dict), + prefix, "Trapping Exits: ", inspect(trap), + prefix, "Status: ", inspect(status), + prefix, "Heap Size: ", inspect(heap_size), + prefix, "Stack Size: ", inspect(stack_size), + prefix, "Reductions: ", inspect(reductions)] + end + + defp crash_debug(_min_level, _info, _prefix) do + [] + end + + defp crash_linked(_min_level, []), do: [] + + defp crash_linked(min_level, neighbours) do + Enum.reduce(neighbours, "\nNeighbours:", fn({:neighbour, info}, acc) -> + [acc | crash_neighbour(min_level, info)] + end) + end + + defp crash_neighbour(min_level, + [{:pid, pid}, {:registered_name, []} | info]) do + indent = " " + [?\n, indent, inspect(pid) | + crash_info(min_level, info, [?\n, indent | indent])] + end + + defp crash_neighbour(min_level, + [{:pid, pid}, {:registered_name, name} | info]) do + indent = " " + [?\n, indent, inspect(name), " (", inspect(pid), ")" | + crash_info(min_level, info, [?\n, indent | indent])] + end + + defp format_stop({maybe_exception, [_ | _] = maybe_stacktrace} = reason) do + try do + format_stacktrace(maybe_stacktrace) + catch + :error, _ -> + format_stop_banner(reason) + else + formatted_stacktrace -> + [format_stop_banner(maybe_exception, maybe_stacktrace) | formatted_stacktrace] + end + end + + defp format_stop(reason) do + format_stop_banner(reason) + end + + defp format_stop_banner(reason) do + ["\n** (stop) " | Exception.format_exit(reason)] + end + + # OTP processes rewrite the :undef error to these reasons when logging + @gen_undef [:"module could not be loaded", :"function not exported"] + + defp format_stop_banner(undef, [{mod, fun, args, _info} | _] = stacktrace) + when undef in @gen_undef and is_atom(mod) and is_atom(fun) do + cond do + is_list(args) -> + format_undef(mod, fun, length(args), undef, stacktrace) + is_integer(args) -> + format_undef(mod, fun, args, undef, stacktrace) + true -> + format_stop_banner(undef) + end + end + + defp format_stop_banner(reason, stacktrace) do + # If this is already an exception (even an ErlangError), we format it as an + # exception. Otherwise, we try to normalize it, and if it's normalized as an + # ErlangError we instead format it as an exit. + if Exception.exception?(reason) do + [?\n | Exception.format_banner(:error, reason, stacktrace)] + else + case Exception.normalize(:error, reason, stacktrace) do + %ErlangError{} -> + format_stop_banner(reason) + exception -> + [?\n | Exception.format_banner(:error, exception, stacktrace)] + end + end + end + + defp format_undef(mod, fun, arity, undef, stacktrace) do + opts = [module: mod, function: fun, arity: arity, reason: undef] + exception = UndefinedFunctionError.exception(opts) + [?\n | Exception.format_banner(:error, exception, stacktrace)] + end + + defp format(kind, payload, stacktrace) do + [?\n, Exception.format_banner(kind, payload, stacktrace) | + format_stacktrace(stacktrace)] + end + + defp format_stacktrace(stacktrace) do + for entry <- stacktrace do + [<<"\n ">> | Exception.format_stacktrace_entry(entry)] + end + end + + defp format_mfa(mod, fun, :undefined), + do: [inspect(mod), ?., Inspect.Function.escape_name(fun) | "/?"] + defp format_mfa(mod, fun, args), + do: Exception.format_mfa(mod, fun, args) + + defp format_from([]), + do: "" + defp format_from([from]), + do: " (from #{inspect(from)})" + defp format_from([from, stacktrace]) when is_list(stacktrace), + do: " (from #{inspect(from)})" + defp format_from([from, node_name]) when is_atom(node_name), + do: " (from #{inspect(from)} on #{inspect(node_name)})" + + defp format_client([from]) do + "\nClient #{inspect(from)} is dead" + end + defp format_client([from, stacktrace]) when is_list(stacktrace) do + ["\nClient #{inspect(from)} is alive\n" | + Exception.format_stacktrace(stacktrace)] + end + defp format_client(_) do + [] + end +end diff --git a/lib/logger/lib/logger/utils.ex b/lib/logger/lib/logger/utils.ex new file mode 100644 index 00000000000..d0378350417 --- /dev/null +++ b/lib/logger/lib/logger/utils.ex @@ -0,0 +1,270 @@ +defmodule Logger.Utils do + @moduledoc false + + @doc """ + Truncates a `chardata` into `n` bytes. + + There is a chance we truncate in the middle of a grapheme + cluster but we never truncate in the middle of a binary + codepoint. For this reason, truncation is not exact. + """ + @spec truncate(IO.chardata, non_neg_integer) :: IO.chardata + def truncate(chardata, :infinity) when is_binary(chardata) or is_list(chardata) do + chardata + end + def truncate(chardata, n) when n >= 0 do + {chardata, n} = truncate_n(chardata, n) + if n >= 0, do: chardata, else: [chardata, " (truncated)"] + end + + defp truncate_n(_, n) when n < 0 do + {"", n} + end + + defp truncate_n(binary, n) when is_binary(binary) do + remaining = n - byte_size(binary) + if remaining < 0 do + # There is a chance we are cutting at the wrong + # place so we need to fix the binary. + {fix_binary(binary_part(binary, 0, n)), remaining} + else + {binary, remaining} + end + end + + defp truncate_n(int, n) when int in 0..127, do: {int, n - 1} + defp truncate_n(int, n) when int in 127..0x07FF, do: {int, n - 2} + defp truncate_n(int, n) when int in 0x800..0xFFFF, do: {int, n - 3} + defp truncate_n(int, n) when int >= 0x10000 and is_integer(int), do: {int, n - 4} + + defp truncate_n(list, n) when is_list(list) do + truncate_n_list(list, n, []) + end + + defp truncate_n_list(_, n, acc) when n < 0 do + {:lists.reverse(acc), n} + end + + defp truncate_n_list([h | t], n, acc) do + {h, n} = truncate_n(h, n) + truncate_n_list(t, n, [h | acc]) + end + + defp truncate_n_list([], n, acc) do + {:lists.reverse(acc), n} + end + + defp truncate_n_list(t, n, acc) do + {t, n} = truncate_n(t, n) + {:lists.reverse(acc, t), n} + end + + defp fix_binary(binary) do + # Use a thirteen-bytes offset to look back in the binary. + # This should allow at least two codepoints of 6 bytes. + suffix_size = min(byte_size(binary), 13) + prefix_size = byte_size(binary) - suffix_size + <> = binary + prefix <> fix_binary(suffix, "") + end + + defp fix_binary(<>, acc) do + acc <> <> <> fix_binary(t, "") + end + + defp fix_binary(<>, acc) do + fix_binary(t, <>) + end + + defp fix_binary(<<>>, _acc) do + <<>> + end + + @doc """ + Receives a format string and arguments and replace `~p`, + `~P`, `~w` and `~W` by its inspected variants. + """ + def inspect(format, args, truncate, opts \\ %Inspect.Opts{}) + + def inspect(format, args, truncate, opts) when is_atom(format) do + do_inspect(Atom.to_charlist(format), args, truncate, opts) + end + + def inspect(format, args, truncate, opts) when is_binary(format) do + do_inspect(:binary.bin_to_list(format), args, truncate, opts) + end + + def inspect(format, args, truncate, opts) when is_list(format) do + do_inspect(format, args, truncate, opts) + end + + defp do_inspect(format, [], _truncate, _opts), do: {format, []} + defp do_inspect(format, args, truncate, opts) do + # A pre-pass that removes binaries from + # arguments according to the truncate limit. + {args, _} = Enum.map_reduce(args, truncate, fn arg, acc -> + if is_binary(arg) do + truncate_n(arg, acc) + else + {arg, acc} + end + end) + do_inspect(format, args, [], [], opts) + end + + defp do_inspect([?~ | t], args, used_format, used_args, opts) do + {t, args, cc_format, cc_args} = collect_cc(:width, t, args, [?~], [], opts) + do_inspect(t, args, cc_format ++ used_format, cc_args ++ used_args, opts) + end + + defp do_inspect([h | t], args, used_format, used_args, opts), + do: do_inspect(t, args, [h | used_format], used_args, opts) + + defp do_inspect([], [], used_format, used_args, _opts), + do: {:lists.reverse(used_format), :lists.reverse(used_args)} + + ## width + + defp collect_cc(:width, [?- | t], args, used_format, used_args, opts), + do: collect_value(:width, t, args, [?- | used_format], used_args, opts, :precision) + + defp collect_cc(:width, t, args, used_format, used_args, opts), + do: collect_value(:width, t, args, used_format, used_args, opts, :precision) + + ## precision + + defp collect_cc(:precision, [?. | t], args, used_format, used_args, opts), + do: collect_value(:precision, t, args, [?. | used_format], used_args, opts, :pad_char) + + defp collect_cc(:precision, t, args, used_format, used_args, opts), + do: collect_cc(:pad_char, t, args, used_format, used_args, opts) + + ## pad char + + defp collect_cc(:pad_char, [?., ?* | t], [arg | args], used_format, used_args, opts), + do: collect_cc(:encoding, t, args, [?*, ?. | used_format], [arg | used_args], opts) + + defp collect_cc(:pad_char, [?., p | t], args, used_format, used_args, opts), + do: collect_cc(:encoding, t, args, [p, ?. | used_format], used_args, opts) + + defp collect_cc(:pad_char, t, args, used_format, used_args, opts), + do: collect_cc(:encoding, t, args, used_format, used_args, opts) + + ## encoding + + defp collect_cc(:encoding, [?l | t], args, used_format, used_args, opts), + do: collect_cc(:done, t, args, [?l | used_format], used_args, %{opts | charlists: :as_lists}) + + defp collect_cc(:encoding, [?t | t], args, used_format, used_args, opts), + do: collect_cc(:done, t, args, [?t | used_format], used_args, opts) + + defp collect_cc(:encoding, t, args, used_format, used_args, opts), + do: collect_cc(:done, t, args, used_format, used_args, opts) + + ## done + + defp collect_cc(:done, [?W | t], [data, limit | args], _used_format, _used_args, opts), + do: collect_inspect(t, args, data, %{opts | limit: limit, width: :infinity}) + + defp collect_cc(:done, [?w | t], [data | args], _used_format, _used_args, opts), + do: collect_inspect(t, args, data, %{opts | width: :infinity}) + + defp collect_cc(:done, [?P | t], [data, limit | args], _used_format, _used_args, opts), + do: collect_inspect(t, args, data, %{opts | limit: limit}) + + defp collect_cc(:done, [?p | t], [data | args], _used_format, _used_args, opts), + do: collect_inspect(t, args, data, opts) + + defp collect_cc(:done, [h | t], args, used_format, used_args, _opts) do + {args, used_args} = collect_cc(h, args, used_args) + {t, args, [h | used_format], used_args} + end + + defp collect_cc(?x, [a, prefix | args], used), do: {args, [prefix, a | used]} + defp collect_cc(?X, [a, prefix | args], used), do: {args, [prefix, a | used]} + defp collect_cc(?s, [a | args], used), do: {args, [a | used]} + defp collect_cc(?e, [a | args], used), do: {args, [a | used]} + defp collect_cc(?f, [a | args], used), do: {args, [a | used]} + defp collect_cc(?g, [a | args], used), do: {args, [a | used]} + defp collect_cc(?b, [a | args], used), do: {args, [a | used]} + defp collect_cc(?B, [a | args], used), do: {args, [a | used]} + defp collect_cc(?+, [a | args], used), do: {args, [a | used]} + defp collect_cc(?#, [a | args], used), do: {args, [a | used]} + defp collect_cc(?c, [a | args], used), do: {args, [a | used]} + defp collect_cc(?i, [a | args], used), do: {args, [a | used]} + defp collect_cc(?~, args, used), do: {args, used} + defp collect_cc(?n, args, used), do: {args, used} + + defp collect_inspect(t, args, data, opts) do + data = + data + |> Inspect.Algebra.to_doc(opts) + |> Inspect.Algebra.format(opts.width) + {t, args, 'st~', [data]} + end + + defp collect_value(current, [?* | t], [arg | args], used_format, used_args, opts, next) + when is_integer(arg) do + collect_cc(next, t, args, [?* | used_format], [arg | used_args], + put_value(opts, current, arg)) + end + + defp collect_value(current, [c | t], args, used_format, used_args, opts, next) + when is_integer(c) and c >= ?0 and c <= ?9 do + {t, c} = collect_value([c | t], []) + collect_cc(next, t, args, c ++ used_format, used_args, + put_value(opts, current, c |> :lists.reverse |> List.to_integer)) + end + + defp collect_value(_current, t, args, used_format, used_args, opts, next), + do: collect_cc(next, t, args, used_format, used_args, opts) + + defp collect_value([c | t], buffer) + when is_integer(c) and c >= ?0 and c <= ?9, + do: collect_value(t, [c | buffer]) + + defp collect_value(other, buffer), + do: {other, buffer} + + defp put_value(opts, key, value) do + if Map.has_key?(opts, key) do + Map.put(opts, key, value) + else + opts + end + end + + @doc """ + Returns a timestamp that includes milliseconds. + """ + def timestamp(utc_log?) do + {_, _, micro} = now = :os.timestamp() + {date, {hours, minutes, seconds}} = + case utc_log? do + true -> :calendar.now_to_universal_time(now) + false -> :calendar.now_to_local_time(now) + end + {date, {hours, minutes, seconds, div(micro, 1000)}} + end + + @doc """ + Formats time as chardata. + """ + def format_time({hh, mi, ss, ms}) do + [pad2(hh), ?:, pad2(mi), ?:, pad2(ss), ?., pad3(ms)] + end + + @doc """ + Formats date as chardata. + """ + def format_date({yy, mm, dd}) do + [Integer.to_string(yy), ?-, pad2(mm), ?-, pad2(dd)] + end + + defp pad3(int) when int < 10, do: [?0, ?0, Integer.to_string(int)] + defp pad3(int) when int < 100, do: [?0, Integer.to_string(int)] + defp pad3(int), do: Integer.to_string(int) + + defp pad2(int) when int < 10, do: [?0, Integer.to_string(int)] + defp pad2(int), do: Integer.to_string(int) +end diff --git a/lib/logger/lib/logger/watcher.ex b/lib/logger/lib/logger/watcher.ex new file mode 100644 index 00000000000..faf238341d9 --- /dev/null +++ b/lib/logger/lib/logger/watcher.ex @@ -0,0 +1,110 @@ +defmodule Logger.Watcher do + @moduledoc false + + require Logger + use GenServer + @name Logger.Watcher + + @doc """ + Starts the watcher supervisor. + """ + def start_link(m, f, a) do + options = [strategy: :one_for_one, name: @name, max_restarts: 30, max_seconds: 3] + case Supervisor.start_link([], options) do + {:ok, _} = ok -> + _ = for {mod, handler, args} <- apply(m, f, a) do + {:ok, _} = watch(mod, handler, args) + end + ok + {:error, _} = error -> + error + end + end + + @doc """ + Removes the given handler. + """ + def unwatch(mod, handler) do + child_id = {__MODULE__, {mod, handler}} + case Supervisor.terminate_child(@name, child_id) do + :ok -> + _ = Supervisor.delete_child(@name, child_id) + :ok + {:error, _} = error -> + error + end + end + + @doc """ + Watches the given handler as part of the watcher supervision tree. + """ + def watch(mod, handler, args) do + import Supervisor.Spec + id = {__MODULE__, {mod, handler}} + child = worker(__MODULE__, [mod, handler, args], id: id, function: :watcher, restart: :transient) + case Supervisor.start_child(@name, child) do + {:error, :already_present} -> + _ = Supervisor.delete_child(@name, id) + watch(mod, handler, args) + other -> + other + end + end + + @doc """ + Starts a watcher server. + + This is useful when there is a need to start a handler + outside of the handler supervision tree. + """ + def watcher(mod, handler, args) do + GenServer.start_link(__MODULE__, {mod, handler, args}) + end + + ## Callbacks + + @doc false + def init({mod, handler, args}) do + case :gen_event.delete_handler(mod, handler, :ok) do + {:error, :module_not_found} -> + res = :gen_event.add_sup_handler(mod, handler, args) + do_init(res, mod, handler) + _ -> + init({mod, handler, args}) + end + end + + defp do_init(res, mod, handler) do + case res do + :ok -> + {:ok, {mod, handler}} + {:error, :ignore} -> + # Can't return :ignore as a transient child under a one_for_one. + # Instead return ok and then immediately exit normally - using a fake + # message. + send(self(), {:gen_event_EXIT, handler, :normal}) + {:ok, {mod, handler}} + {:error, reason} -> + {:stop, reason} + end + end + + @doc false + def handle_info({:gen_event_EXIT, handler, reason}, {_, handler} = state) + when reason in [:normal, :shutdown] do + {:stop, reason, state} + end + + def handle_info({:gen_event_EXIT, handler, reason}, {mod, handler} = state) do + _ = Logger.error ":gen_event handler #{inspect handler} installed at #{inspect mod}\n" <> + "** (exit) #{format_exit(reason)}" + {:stop, reason, state} + end + + def handle_info(_msg, state) do + {:noreply, state} + end + + defp format_exit({:EXIT, reason}), do: Exception.format_exit(reason) + defp format_exit(reason), do: Exception.format_exit(reason) +end diff --git a/lib/logger/mix.exs b/lib/logger/mix.exs new file mode 100644 index 00000000000..5663810c0a7 --- /dev/null +++ b/lib/logger/mix.exs @@ -0,0 +1,27 @@ +defmodule Logger.Mixfile do + use Mix.Project + + def project do + [app: :logger, + version: System.version, + build_per_environment: false] + end + + def application do + [registered: [Logger, Logger.Supervisor, Logger.Watcher], + mod: {Logger.App, []}, + env: [level: :debug, + utc_log: false, + truncate: 8096, + backends: [:console], + translators: [{Logger.Translator, :translate}], + sync_threshold: 20, + handle_otp_reports: true, + handle_sasl_reports: false, + compile_time_purge_level: :debug, + compile_time_application: nil, + discard_threshold_for_error_logger: 500, + translator_inspect_opts: [], + console: []]] + end +end diff --git a/lib/logger/test/logger/backends/console_test.exs b/lib/logger/test/logger/backends/console_test.exs new file mode 100644 index 00000000000..0386d874c03 --- /dev/null +++ b/lib/logger/test/logger/backends/console_test.exs @@ -0,0 +1,162 @@ +defmodule Logger.Backends.ConsoleTest do + use Logger.Case + + require Logger + import ExUnit.CaptureIO + + setup do + on_exit fn -> + :ok = Logger.configure_backend(:console, + [format: nil, device: :user, level: nil, metadata: [], colors: [enabled: false]]) + end + end + + test "does not start when there is no user" do + :ok = Logger.remove_backend(:console) + user = Process.whereis(:user) + + try do + Process.unregister(:user) + assert :gen_event.add_handler(Logger, Logger.Backends.Console, :console) == + {:error, :ignore} + after + Process.register(user, :user) + end + after + {:ok, _} = Logger.add_backend(:console) + end + + test "may use another device" do + Logger.configure_backend(:console, device: :standard_error) + + assert capture_io(:standard_error, fn -> + Logger.debug("hello") + Logger.flush() + end) =~ "hello" + end + + test "can configure format" do + Logger.configure_backend(:console, format: "$message [$level]") + + assert capture_log(fn -> + Logger.debug("hello") + end) =~ "hello [debug]" + end + + test "can configure metadata" do + Logger.configure_backend(:console, format: "$metadata$message", metadata: [:user_id]) + + assert capture_log(fn -> + Logger.debug("hello") + end) =~ "hello" + + Logger.metadata(user_id: 11) + Logger.metadata(user_id: 13) + + assert capture_log(fn -> + Logger.debug("hello") + end) =~ "user_id=13 hello" + end + + test "can configure formatter to {module, function} tuple" do + Logger.configure_backend(:console, format: {__MODULE__, :format}) + + assert capture_log(fn -> + Logger.debug("hello") + end) =~ "my_format: hello" + end + + def format(_level, message, _ts, _metadata) do + "my_format: #{message}" + end + + test "can configure metadata to :all" do + Logger.configure_backend(:console, format: "$metadata", metadata: :all) + + Logger.metadata(user_id: 11) + Logger.metadata(dynamic_metadata: 5) + + %{module: mod, function: {name, arity}, file: file, line: line} = __ENV__ + + log = capture_log(fn -> + Logger.debug("hello") + end) + + assert log =~ "file=#{file}" + assert log =~ "line=#{line + 3}" + assert log =~ "module=#{inspect(mod)}" + assert log =~ "function=#{name}/#{arity}" + assert log =~ "dynamic_metadata=5 user_id=11" + end + + test "metadata defaults" do + Logger.configure_backend(:console, + format: "$metadata", metadata: [:file, :line, :module, :function]) + + %{module: mod, function: {name, arity}, file: file, line: line} = __ENV__ + + assert capture_log(fn -> + Logger.debug("hello") + end) =~ "file=#{file} line=#{line + 3} module=#{inspect(mod)} function=#{name}/#{arity}" + end + + test "can configure level" do + Logger.configure_backend(:console, level: :info) + + assert capture_log(fn -> + Logger.debug("hello") + end) == "" + end + + test "can configure colors" do + Logger.configure_backend(:console, [format: "$message", colors: [enabled: true]]) + + assert capture_log(fn -> + Logger.debug("hello") + end) == IO.ANSI.cyan() <> "hello" <> IO.ANSI.reset() + + Logger.configure_backend(:console, [colors: [debug: :magenta]]) + + assert capture_log(fn -> + Logger.debug("hello") + end) == IO.ANSI.magenta() <> "hello" <> IO.ANSI.reset() + + assert capture_log(fn -> + Logger.info("hello") + end) == IO.ANSI.normal() <> "hello" <> IO.ANSI.reset() + + Logger.configure_backend(:console, [colors: [info: :cyan]]) + + assert capture_log(fn -> + Logger.info("hello") + end) == IO.ANSI.cyan() <> "hello" <> IO.ANSI.reset() + + assert capture_log(fn -> + Logger.warn("hello") + end) == IO.ANSI.yellow() <> "hello" <> IO.ANSI.reset() + + Logger.configure_backend(:console, [colors: [warn: :cyan]]) + + assert capture_log(fn -> + Logger.warn("hello") + end) == IO.ANSI.cyan() <> "hello" <> IO.ANSI.reset() + + assert capture_log(fn -> + Logger.error("hello") + end) == IO.ANSI.red() <> "hello" <> IO.ANSI.reset() + + Logger.configure_backend(:console, [colors: [error: :cyan]]) + + assert capture_log(fn -> + Logger.error("hello") + end) == IO.ANSI.cyan() <> "hello" <> IO.ANSI.reset() + end + + test "can use colors from metadata" do + Logger.configure_backend(:console, [format: "$message", colors: [enabled: true]]) + + assert capture_log(fn -> + Logger.log(:error, "hello", ansi_color: :yellow) + end) == IO.ANSI.yellow() <> "hello" <> IO.ANSI.reset() + end +end diff --git a/lib/logger/test/logger/error_handler_test.exs b/lib/logger/test/logger/error_handler_test.exs new file mode 100644 index 00000000000..89e4a095d7c --- /dev/null +++ b/lib/logger/test/logger/error_handler_test.exs @@ -0,0 +1,64 @@ +defmodule Logger.ErrorHandlerTest do + use Logger.Case + + test "survives after crashes" do + assert capture_log(fn -> + :error_logger.info_msg("~p~n", []) + wait_for_handler(:error_logger, Logger.ErrorHandler) + end) =~ "[error] :gen_event handler Logger.ErrorHandler installed at :error_logger\n" <> + "** (exit) an exception was raised:" + assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello") + end + + test "survives after Logger exit" do + Process.whereis(Logger) |> Process.exit(:kill) + wait_for_logger() + wait_for_handler(:error_logger, Logger.ErrorHandler) + assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello") + end + + test "formats error_logger info message" do + assert error_log(:info_msg, "hello", []) =~ msg("[info] hello") + assert error_log(:info_msg, "~p~n", [:hello]) =~ msg("[info] :hello") + end + + test "formats error_logger info report" do + assert error_log(:info_report, "hello") =~ msg("[info] \"hello\"") + assert error_log(:info_report, :hello) =~ msg("[info] :hello") + assert error_log(:info_report, :special, :hello) == "" + end + + test "formats error_logger error message" do + assert error_log(:error_msg, "hello", []) =~ msg("[error] hello") + assert error_log(:error_msg, "~p~n", [:hello]) =~ msg("[error] :hello") + end + + test "formats error_logger error report" do + assert error_log(:error_report, "hello") =~ msg("[error] \"hello\"") + assert error_log(:error_report, :hello) =~ msg("[error] :hello") + assert error_log(:error_report, :special, :hello) == "" + end + + test "formats error_logger warning message" do + assert error_log(:warning_msg, "hello", []) =~ msg("[warn] hello") + assert error_log(:warning_msg, "~p~n", [:hello]) =~ msg("[warn] :hello") + end + + test "formats error_logger warning report" do + assert error_log(:warning_report, "hello") =~ msg("[warn] \"hello\"") + assert error_log(:warning_report, :hello) =~ msg("[warn] :hello") + assert error_log(:warning_report, :special, :hello) == "" + end + + defp error_log(fun, format) do + do_error_log(fun, [format]) + end + + defp error_log(fun, format, args) do + do_error_log(fun, [format, args]) + end + + defp do_error_log(fun, args) do + capture_log(fn -> apply(:error_logger, fun, args) end) + end +end diff --git a/lib/logger/test/logger/formatter_test.exs b/lib/logger/test/logger/formatter_test.exs new file mode 100644 index 00000000000..483145ffe8b --- /dev/null +++ b/lib/logger/test/logger/formatter_test.exs @@ -0,0 +1,81 @@ +defmodule Logger.FormatterTest do + use Logger.Case, async: true + doctest Logger.Formatter + + import Logger.Formatter + + test "prune/1" do + assert prune(1) == "�" + assert prune(<<"hí", 233>>) == "hí�" + assert prune(["hi" | 233]) == ["hi" | "�"] + assert prune([233 | "hi"]) == [233 | "hi"] + assert prune([[] | []]) == [[]] + end + + defmodule CompileMod do + def format(_level, _msg, _ts, _md) do + true + end + end + + test "compile/1 with nil" do + assert compile(nil) == + ["\n", :time, " ", :metadata, "[", :level, "] ", :levelpad, :message, "\n"] + end + + test "compile/1 with str" do + assert compile("$level $time $date $metadata $message $node") == + Enum.intersperse([:level, :time, :date, :metadata, :message, :node], " ") + + assert_raise ArgumentError, "$bad is an invalid format pattern.", fn -> + compile("$bad $good") + end + end + + test "compile/1 with {mod, fun}" do + assert compile({CompileMod, :format}) == {CompileMod, :format} + end + + test "format with {mod, fun}" do + assert format({CompileMod, :format}, nil, nil, nil, nil) == true + end + + test "format with format string" do + compiled = compile("[$level] $message") + assert format(compiled, :error, "hello", nil, []) == + ["[", "error", "] ", "hello"] + + compiled = compile("$node") + assert format(compiled, :error, nil, nil, []) == [Atom.to_string(node())] + + compiled = compile("$metadata") + assert IO.chardata_to_string(format(compiled, :error, nil, nil, [meta: :data])) == + "meta=data " + assert IO.chardata_to_string(format(compiled, :error, nil, nil, + [meta: :data, pid: :erlang.list_to_pid('<0.123.4>')])) == "meta=data pid=<0.123.4> " + + # Hack to get the same predictable reference for every test run. + ref = <<131, 114, 0, 3, 100, 0, 13, 110, 111, 110, 111, 100, 101, 64, 110, 111, 104, 111, 115, 116, 0, 0, 0, 0, 80, 0, 0, 0, 0, 0, 0, 0, 0>> |> :erlang.binary_to_term + assert "#Reference<0.0.0.80>" == inspect(ref) # ensure the deserialization worked correctly + assert IO.chardata_to_string(format(compiled, :error, nil, nil, + [meta: :data, ref: ref])) == "meta=data ref=<0.0.0.80> " + + assert IO.chardata_to_string(format(compiled, :error, nil, nil, [])) == + "" + + timestamp = {{2014, 12, 30}, {12, 6, 30, 100}} + compiled = compile("$date $time") + assert IO.chardata_to_string(format(compiled, :error, nil, timestamp, [])) == + "2014-12-30 12:06:30.100" + end + + test "padding takes account of length of level" do + compiled = compile("[$level] $levelpad $message") + assert format(compiled, :error, "hello", nil, []) == + ["[", "error", "] ", "", " ", "hello"] + + assert format(compiled, :info, "hello", nil, []) == + ["[", "info", "] ", " ", " ", "hello"] + + end +end diff --git a/lib/logger/test/logger/translator_test.exs b/lib/logger/test/logger/translator_test.exs new file mode 100644 index 00000000000..d23080221de --- /dev/null +++ b/lib/logger/test/logger/translator_test.exs @@ -0,0 +1,768 @@ +defmodule Logger.TranslatorTest do + use Logger.Case + import Supervisor.Spec + + defmodule MyGenServer do + use GenServer + + def handle_cast(:error, _) do + raise "oops" + end + + def handle_call(:error, _, _) do + raise "oops" + end + def handle_call(:error_on_down, {pid, _}, _) do + mon = Process.monitor(pid) + assert_receive {:DOWN, ^mon, _, _, _} + raise "oops" + end + end + + defmodule MyGenEvent do + @behaviour :gen_event + + def init(args) do + {:ok, args} + end + + def handle_event(_event, state) do + {:ok, state} + end + + def handle_call(:error, _) do + raise "oops" + end + + def handle_info(_msg, state) do + {:ok, state} + end + + def code_change(_old_vsn, state, _extra) do + {:ok, state} + end + + def terminate(_reason, _state) do + :ok + end + end + + defmodule MyBridge do + @behaviour :supervisor_bridge + + def init(reason) do + {:ok, pid} = Task.start_link(Kernel, :exit, [reason]) + {:ok, pid, pid} + end + + def terminate(_reason, pid) do + Process.exit(pid, :shutdown) + end + end + + setup_all do + sasl_reports? = Application.get_env(:logger, :handle_sasl_reports, false) + Application.put_env(:logger, :handle_sasl_reports, true) + + # Restart the app but change the level before to avoid warnings + level = Logger.level() + Logger.configure(level: :error) + Logger.App.stop() + Application.start(:logger) + Logger.configure(level: level) + + on_exit(fn() -> + Application.put_env(:logger, :handle_sasl_reports, sasl_reports?) + Logger.App.stop() + Application.start(:logger) + end) + end + + test "translates GenServer crashes" do + {:ok, pid} = GenServer.start(MyGenServer, :ok) + + assert capture_log(:info, fn -> + catch_exit(GenServer.call(pid, :error)) + end) =~ """ + [error] GenServer #{inspect pid} terminating + ** (RuntimeError) oops + """ + end + + test "translates GenServer crashes with custom inspect options" do + {:ok, pid} = GenServer.start(MyGenServer, List.duplicate(:ok, 1000)) + Application.put_env(:logger, :translator_inspect_opts, [limit: 3]) + + assert capture_log(:debug, fn -> + catch_exit(GenServer.call(pid, :error)) + end) =~ """ + [:ok, :ok, :ok, ...] + """ + after + Application.put_env(:logger, :translator_inspect_opts, []) + end + + # TODO: Remove this check once we depend only on 20 + if :erlang.system_info(:otp_release) >= '20' do + test "translates GenServer crashes on debug" do + {:ok, pid} = GenServer.start(MyGenServer, :ok) + + assert capture_log(:debug, fn -> + catch_exit(GenServer.call(pid, :error)) + end) =~ ~r""" + \[error\] GenServer #PID<\d+\.\d+\.\d+> terminating + \*\* \(RuntimeError\) oops + .* + Last message \(from #PID<\d+\.\d+\.\d+>\): :error + State: :ok + Client #PID<\d+\.\d+\.\d+> is alive + .* + """s + end + + test "translates GenServer crashes with named client on debug" do + {:ok, pid} = GenServer.start(MyGenServer, :ok) + + assert capture_log(:debug, fn -> + Process.register(self(), :named_client) + catch_exit(GenServer.call(pid, :error)) + end) =~ ~r""" + \[error\] GenServer #PID<\d+\.\d+\.\d+> terminating + \*\* \(RuntimeError\) oops + .* + Last message \(from :named_client\): :error + State: :ok + Client :named_client is alive + .* + """s + end + + test "translates GenServer crashes with dead client on debug" do + {:ok, pid} = GenServer.start(MyGenServer, :ok) + + assert capture_log(:debug, fn -> + mon = Process.monitor(pid) + spawn_link(fn() -> + catch_exit(GenServer.call(pid, :error_on_down, 0)) + end) + assert_receive {:DOWN, ^mon, _, _, _} + end) =~ ~r""" + \[error\] GenServer #PID<\d+\.\d+\.\d+> terminating + \*\* \(RuntimeError\) oops + .* + Last message \(from #PID<\d+\.\d+\.\d+>\): :error_on_down + State: :ok + Client #PID<\d+\.\d+\.\d+> is dead + """s + end + else + test "translates GenServer crashes on debug" do + {:ok, pid} = GenServer.start(MyGenServer, :ok) + + assert capture_log(:debug, fn -> + catch_exit(GenServer.call(pid, :error)) + end) =~ ~r""" + \[error\] GenServer #PID<\d+\.\d+\.\d+> terminating + \*\* \(RuntimeError\) oops + .* + Last message: :error + State: :ok + """s + end + end + + test "translates GenServer crashes with no client" do + {:ok, pid} = GenServer.start(MyGenServer, :ok) + + assert capture_log(:debug, fn -> + mon = Process.monitor(pid) + GenServer.cast(pid, :error) + assert_receive {:DOWN, ^mon, _, _, _} + end) =~ ~r""" + \[error\] GenServer #PID<\d+\.\d+\.\d+> terminating + \*\* \(RuntimeError\) oops + .* + Last message: {:"\$gen_cast", :error} + State: :ok + """s + end + + test "translates GenServer crashes with no client on debug" do + {:ok, pid} = GenServer.start(MyGenServer, :ok) + + refute capture_log(:debug, fn -> + mon = Process.monitor(pid) + GenServer.cast(pid, :error) + assert_receive {:DOWN, ^mon, _, _, _} + end) =~ "Client" + end + + test "translates :gen_event crashes" do + {:ok, pid} = :gen_event.start() + :ok = :gen_event.add_handler(pid, MyGenEvent, :ok) + + assert capture_log(:info, fn -> + :gen_event.call(pid, MyGenEvent, :error) + end) =~ """ + [error] GenEvent handler Logger.TranslatorTest.MyGenEvent installed in #{inspect pid} terminating + ** (RuntimeError) oops + """ + end + + test "translates :gen_event crashes on debug" do + {:ok, pid} = :gen_event.start() + :ok = :gen_event.add_handler(pid, MyGenEvent, :ok) + + assert capture_log(:debug, fn -> + :gen_event.call(pid, MyGenEvent, :error) + end) =~ ~r""" + \[error\] GenEvent handler Logger.TranslatorTest.MyGenEvent installed in #PID<\d+\.\d+\.\d+> terminating + \*\* \(RuntimeError\) oops + .* + Last message: :error + State: :ok + """s + end + + test "translates Task crashes" do + {:ok, pid} = Task.start_link(__MODULE__, :task, [self()]) + + assert capture_log(fn -> + ref = Process.monitor(pid) + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Task #PID<\d+\.\d+\.\d+> started from #PID<\d+\.\d+\.\d+> terminating + \*\* \(RuntimeError\) oops + .* + Function: &Logger.TranslatorTest.task\/1 + Args: \[#PID<\d+\.\d+\.\d+>\] + """s + end + + test "translates Task async_stream crashes with neighbour" do + fun = fn -> Task.async_stream([:oops], :erlang, :error, []) |> Enum.to_list() end + {:ok, pid} = Task.start(__MODULE__, :task, [self(), fun]) + + assert capture_log(:debug, fn -> + ref = Process.monitor(pid) + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + Neighbours: + #{inspect pid} + Initial Call: Logger\.TranslatorTest\.task/2 + """ + end + + test "translates Task undef module crash" do + assert capture_log(fn -> + {:ok, pid} = Task.start(:module_does_not_exist, :undef, []) + ref = Process.monitor(pid) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Task #PID<\d+\.\d+\.\d+> started from #PID<\d+\.\d+\.\d+> terminating + \*\* \(UndefinedFunctionError\) function :module_does_not_exist.undef/0 is undefined \(module :module_does_not_exist is not available\) + .* + Function: &:module_does_not_exist.undef/0 + Args: \[\] + """s + end + + test "translates Task undef function crash" do + assert capture_log(fn -> + {:ok, pid} = Task.start(__MODULE__, :undef, []) + ref = Process.monitor(pid) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Task #PID<\d+\.\d+\.\d+> started from #PID<\d+\.\d+\.\d+> terminating + \*\* \(UndefinedFunctionError\) function Logger.TranslatorTest.undef/0 is undefined or private + .* + Function: &Logger.TranslatorTest.undef/0 + Args: \[\] + """s + end + + test "translates Task raising ErlangError" do + assert capture_log(fn -> + exception = + try do + :erlang.error(:foo) + rescue + x -> + x + end + {:ok, pid} = Task.start(:erlang, :error, [exception]) + ref = Process.monitor(pid) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Task #PID<\d+\.\d+\.\d+> started from #PID<\d+\.\d+\.\d+> terminating + \*\* \(ErlangError\) Erlang error: :foo + .* + Function: &:erlang\.error/1 + Args: \[%ErlangError{.*}\] + """s + end + + test "translates Task raising Erlang badarg error" do + assert capture_log(fn -> + {:ok, pid} = Task.start(:erlang, :error, [:badarg]) + ref = Process.monitor(pid) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Task #PID<\d+\.\d+\.\d+> started from #PID<\d+\.\d+\.\d+> terminating + \*\* \(ArgumentError\) argument error + .* + Function: &:erlang\.error/1 + Args: \[:badarg\] + """s + end + + test "translates Task exiting abnormally" do + assert capture_log(fn -> + {:ok, pid} = Task.start(:erlang, :exit, [:abnormal]) + ref = Process.monitor(pid) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Task #PID<\d+\.\d+\.\d+> started from #PID<\d+\.\d+\.\d+> terminating + \*\* \(stop\) :abnormal + .* + Function: &:erlang\.exit/1 + Args: \[:abnormal\] + """s + end + + test "translates application stop" do + assert capture_log(fn -> + :ok = Application.start(:eex) + Application.stop(:eex) + end) =~ """ + Application eex exited: :stopped + """ + end + + test "translates application start" do + assert capture_log(fn -> + Application.start(:eex) + Application.stop(:eex) + end) =~ """ + Application eex started at #{inspect(node())} + """ + end + + test "translates Process crashes" do + assert capture_log(:info, fn -> + {_, ref} = spawn_monitor(fn() -> raise "oops" end) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + # Even though the monitor has been received the emulator may not have + # sent the message to the error logger + Process.sleep(200) + end) =~ ~r""" + \[error\] Process #PID<\d+\.\d+\.\d+>\ raised an exception + \*\* \(RuntimeError\) oops + """ + end + + test "translates :proc_lib crashes" do + {:ok, pid} = Task.start_link(__MODULE__, :task, [self()]) + + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Process #PID<\d+\.\d+\.\d+> terminating + \*\* \(exit\) an exception was raised: + \*\* \(RuntimeError\) oops + .* + Initial Call: Logger.TranslatorTest.task/1 + Ancestors: \[#PID<\d+\.\d+\.\d+>\] + """s + end + + test "translates :proc_lib crashes with name" do + {:ok, pid} = Task.start_link(__MODULE__, :task, + [self(), fn() -> + Process.register(self(), __MODULE__) + raise "oops" + end]) + + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + send(pid, :message) + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Process Logger.TranslatorTest \(#PID<\d+\.\d+\.\d+>\) terminating + \*\* \(exit\) an exception was raised: + \*\* \(RuntimeError\) oops + .* + Initial Call: Logger.TranslatorTest.task/2 + Ancestors: \[#PID<\d+\.\d+\.\d+>\] + """s + end + + test "translates :proc_lib crashes without initial call" do + {:ok, pid} = Task.start_link(__MODULE__, :task, + [self(), fn() -> + Process.delete(:"$initial_call") + raise "oops" + end]) + + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + send(pid, :message) + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[error\] Process #PID<\d+\.\d+\.\d+> terminating + \*\* \(exit\) an exception was raised: + \*\* \(RuntimeError\) oops + .* + Ancestors: \[#PID<\d+\.\d+\.\d+>\] + """s + end + + test "translates :proc_lib crashes with neighbour" do + {:ok, pid} = Task.start_link(__MODULE__, :sub_task, [self()]) + + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + send(pid, :message) + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + Ancestors: \[#PID<\d+\.\d+\.\d+>\] + Neighbours: + #PID<\d+\.\d+\.\d+> + Initial Call: Logger.TranslatorTest.sleep/1 + Current Call: Logger.TranslatorTest.sleep/1 + Ancestors: \[#PID<\d+\.\d+\.\d+>, #PID<\d+\.\d+\.\d+>\] + """ + end + + test "translates :proc_lib crashes with neighbour with name" do + {:ok, pid} = Task.start_link(__MODULE__, :sub_task, + [self(), fn(pid2) -> + Process.register(pid2, __MODULE__) + raise "oops" + end]) + + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + send(pid, :message) + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + Ancestors: \[#PID<\d+\.\d+\.\d+>\] + Neighbours: + Logger.TranslatorTest \(#PID<\d+\.\d+\.\d+>\) + Initial Call: Logger.TranslatorTest.sleep/1 + Current Call: Logger.TranslatorTest.sleep/1 + Ancestors: \[#PID<\d+\.\d+\.\d+>, #PID<\d+\.\d+\.\d+>\] + """ + end + + test "translates :proc_lib crashes on debug" do + {:ok, pid} = Task.start_link(__MODULE__, :task, [self()]) + + assert capture_log(:debug, fn -> + ref = Process.monitor(pid) + send(pid, :message) + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + Ancestors: \[#PID<\d+\.\d+\.\d+>\] + Messages: \[:message\] + Links: \[\] + Dictionary: \[\] + Trapping Exits: false + Status: :running + Heap Size: \d+ + Stack Size: \d+ + Reductions: \d+ + """ + end + + test "translates :proc_lib crashes with neighbour on debug" do + {:ok, pid} = Task.start_link(__MODULE__, :sub_task, [self()]) + + assert capture_log(:debug, fn -> + ref = Process.monitor(pid) + send(pid, :message) + send(pid, :go) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + Ancestors: \[#PID<\d+\.\d+\.\d+>, #PID<\d+\.\d+\.\d+>\] + Messages: \[\] + Links: \[#PID<\d+\.\d+\.\d+>\] + Dictionary: \[\] + Trapping Exits: false + Status: :waiting + Heap Size: \d+ + Stack Size: \d+ + Reductions: \d+ + """ + end + + test "translates Supervisor progress" do + {:ok, pid} = Supervisor.start_link([], [strategy: :one_for_one]) + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + Supervisor.start_child(pid, worker(Task, [__MODULE__, :sleep, [self()]])) + Process.exit(pid, :normal) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[info\] Child Task of Supervisor #PID<\d+\.\d+\.\d+> \(Supervisor\.Default\) started + Pid: #PID<\d+\.\d+\.\d+> + Start Call: Task.start_link\(Logger.TranslatorTest, :sleep, \[#PID<\d+\.\d+\.\d+>\]\) + """ + end + + test "translates Supervisor progress with name" do + {:ok, pid} = Supervisor.start_link([], + [strategy: :one_for_one, name: __MODULE__]) + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + Supervisor.start_child(pid, worker(Task, [__MODULE__, :sleep, [self()]])) + Process.exit(pid, :normal) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[info\] Child Task of Supervisor Logger.TranslatorTest started + """ + + {:ok, pid} = Supervisor.start_link([], + [strategy: :one_for_one, name: {:global, __MODULE__}]) + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + Supervisor.start_child(pid, worker(Task, [__MODULE__, :sleep, [self()]])) + Process.exit(pid, :normal) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[info\] Child Task of Supervisor Logger.TranslatorTest started + """ + + {:ok, pid} = Supervisor.start_link([], + [strategy: :one_for_one, name: {:via, :global, __MODULE__}]) + assert capture_log(:info, fn -> + ref = Process.monitor(pid) + Supervisor.start_child(pid, worker(Task, [__MODULE__, :sleep, [self()]])) + Process.exit(pid, :normal) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + \[info\] Child Task of Supervisor Logger.TranslatorTest started + """ + end + + test "translates Supervisor progress on debug" do + {:ok, pid} = Supervisor.start_link([], [strategy: :one_for_one]) + assert capture_log(:debug, fn -> + ref = Process.monitor(pid) + Supervisor.start_child(pid, worker(Task, [__MODULE__, :sleep, [self()]])) + Process.exit(pid, :normal) + receive do: ({:DOWN, ^ref, _, _, _} -> :ok) + end) =~ ~r""" + Start Call: Task.start_link\(Logger.TranslatorTest, :sleep, \[#PID<\d+\.\d+\.\d+>\]\) + Restart: :permanent + Shutdown: 5000 + Type: :worker + """ + end + + test "translates Supervisor reports start error" do + assert capture_log(:info, fn -> + trap = Process.flag(:trap_exit, true) + children = [worker(__MODULE__, [], function: :error)] + Supervisor.start_link(children, strategy: :one_for_one) + receive do: ({:EXIT, _, {:shutdown, {:failed_to_start_child, _, _}}} -> :ok) + Process.flag(:trap_exit, trap) + end) =~ ~r""" + \[error\] Child Logger.TranslatorTest of Supervisor #PID<\d+\.\d+\.\d+> \(Supervisor\.Default\) failed to start + \*\* \(exit\) :stop + Start Call: Logger.TranslatorTest.error\(\) + """ + end + + test "translates Supervisor reports start error with raise" do + assert capture_log(:info, fn -> + trap = Process.flag(:trap_exit, true) + children = [worker(__MODULE__, [], function: :undef)] + Supervisor.start_link(children, strategy: :one_for_one) + receive do: ({:EXIT, _, {:shutdown, {:failed_to_start_child, _, _}}} -> :ok) + Process.flag(:trap_exit, trap) + end) =~ ~r""" + \[error\] Child Logger.TranslatorTest of Supervisor #PID<\d+\.\d+\.\d+> \(Supervisor\.Default\) failed to start + \*\* \(exit\) an exception was raised: + \*\* \(UndefinedFunctionError\) function Logger.TranslatorTest.undef/0 is undefined or private + .* + Start Call: Logger.TranslatorTest.undef\(\) + """s + end + + test "translates Supervisor reports terminated" do + assert capture_log(:info, fn -> + trap = Process.flag(:trap_exit, true) + children = [worker(Task, [Kernel, :exit, [:stop]])] + {:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one, max_restarts: 0) + receive do: ({:EXIT, ^pid, _} -> :ok) + Process.flag(:trap_exit, trap) + end) =~ ~r""" + \[error\] Child Task of Supervisor #PID<\d+\.\d+\.\d+> \(Supervisor\.Default\) terminated + \*\* \(exit\) :stop + Pid: #PID<\d+\.\d+\.\d+> + Start Call: Task.start_link\(Kernel, :exit, \[:stop\]\) + """ + end + + test "translates Supervisor reports max restarts shutdown" do + assert capture_log(:info, fn -> + trap = Process.flag(:trap_exit, true) + children = [worker(Task, [Kernel, :exit, [:stop]])] + {:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one, max_restarts: 0) + receive do: ({:EXIT, ^pid, _} -> :ok) + Process.flag(:trap_exit, trap) + end) =~ ~r""" + \[error\] Child Task of Supervisor #PID<\d+\.\d+\.\d+> \(Supervisor\.Default\) caused shutdown + \*\* \(exit\) :reached_max_restart_intensity + Start Call: Task.start_link\(Kernel, :exit, \[:stop\]\) + """ + end + + test "translates Supervisor reports abnormal shutdown" do + assert capture_log(:info, fn -> + children = [worker(__MODULE__, [], function: :abnormal)] + {:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one) + :ok = Supervisor.terminate_child(pid, __MODULE__) + end) =~ ~r""" + \[error\] Child Logger.TranslatorTest of Supervisor #PID<\d+\.\d+\.\d+> \(Supervisor\.Default\) shutdown abnormally + \*\* \(exit\) :stop + Pid: #PID<\d+\.\d+\.\d+> + Start Call: Logger.TranslatorTest.abnormal\(\) + """ + end + + test "translates Supervisor reports abnormal shutdown on debug" do + assert capture_log(:debug, fn -> + children = [worker(__MODULE__, [], function: :abnormal, restart: :permanent, shutdown: 5000)] + {:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one) + :ok = Supervisor.terminate_child(pid, __MODULE__) + end) =~ ~r""" + \*\* \(exit\) :stop + Pid: #PID<\d+\.\d+\.\d+> + Start Call: Logger.TranslatorTest.abnormal\(\) + Restart: :permanent + Shutdown: 5000 + Type: :worker + """ + end + + test "translates Supervisor reports abnormal shutdown in simple_one_for_one" do + assert capture_log(:info, fn -> + trap = Process.flag(:trap_exit, true) + children = [worker(__MODULE__, [], function: :abnormal)] + {:ok, pid} = Supervisor.start_link(children, strategy: :simple_one_for_one) + {:ok, _pid2} = Supervisor.start_child(pid, []) + Process.exit(pid, :normal) + receive do: ({:EXIT, ^pid, _} -> :ok) + Process.flag(:trap_exit, trap) + end) =~ ~r""" + \[error\] Children Logger.TranslatorTest of Supervisor #PID<\d+\.\d+\.\d+> \(Supervisor\.Default\) shutdown abnormally + \*\* \(exit\) :stop + Number: 1 + Start Call: Logger.TranslatorTest.abnormal\(\) + """ + end + + test "translates :supervisor_bridge progress" do + assert capture_log(:info, fn -> + trap = Process.flag(:trap_exit, true) + {:ok, pid} = :supervisor_bridge.start_link(MyBridge, :normal) + receive do: ({:EXIT, ^pid, _} -> :ok) + Process.flag(:trap_exit, trap) + end) =~ ~r""" + \[info\] Child of Supervisor #PID<\d+\.\d+\.\d+> \(Logger\.TranslatorTest\.MyBridge\) started + Pid: #PID<\d+\.\d+\.\d+> + Start Call: Logger.TranslatorTest.MyBridge.init\(:normal\) + """ + end + + test "translates :supervisor_bridge reports" do + assert capture_log(:info, fn -> + trap = Process.flag(:trap_exit, true) + {:ok, pid} = :supervisor_bridge.start_link(MyBridge, :stop) + receive do: ({:EXIT, ^pid, _} -> :ok) + Process.flag(:trap_exit, trap) + end) =~ ~r""" + \[error\] Child of Supervisor #PID<\d+\.\d+\.\d+> \(Logger\.TranslatorTest\.MyBridge\) terminated + \*\* \(exit\) :stop + Pid: #PID<\d+\.\d+\.\d+> + Start Module: Logger.TranslatorTest.MyBridge + """ + end + + test "handles :undefined MFA properly" do + defmodule WeirdFunctionNamesGenServer do + use GenServer + def unquote(:"start link")(), do: GenServer.start_link(__MODULE__, []) + def handle_call(_call, _from, _state), do: raise("oops") + end + + child_opts = [restart: :temporary, function: :"start link"] + children = [Supervisor.Spec.worker(WeirdFunctionNamesGenServer, [], child_opts)] + {:ok, sup} = Supervisor.start_link(children, strategy: :simple_one_for_one) + + log = capture_log(:info, fn -> + {:ok, pid} = Supervisor.start_child(sup, []) + catch_exit(GenServer.call(pid, :error)) + [] = Supervisor.which_children(sup) + end) + + assert log =~ ~s(Start Call: Logger.TranslatorTest.WeirdFunctionNamesGenServer."start link"/?) + after + :code.purge(WeirdFunctionNamesGenServer) + :code.delete(WeirdFunctionNamesGenServer) + end + + def task(parent, fun \\ (fn() -> raise "oops" end)) do + mon = Process.monitor(parent) + Process.unlink(parent) + receive do + :go -> + fun.() + {:DOWN, ^mon, _, _, _} -> + exit(:shutdown) + end + end + + def sub_task(parent, fun \\ (fn(_) -> raise "oops" end)) do + mon = Process.monitor(parent) + Process.unlink(parent) + {:ok, pid} = Task.start_link(__MODULE__, :sleep, [self()]) + receive do: (:sleeping -> :ok) + receive do + :go -> + fun.(pid) + {:DOWN, ^mon, _, _, _} -> + exit(:shutdown) + end + end + + def sleep(pid) do + mon = Process.monitor(pid) + send(pid, :sleeping) + receive do: ({:DOWN, ^mon, _, _, _} -> exit(:shutdown)) + end + + def error(), do: {:error, :stop} + + def abnormal() do + :proc_lib.start_link(__MODULE__, :abnormal_init, []) + end + + def abnormal_init() do + Process.flag(:trap_exit, true) + :proc_lib.init_ack({:ok, self()}) + receive do: ({:EXIT, _, _} -> exit(:stop)) + end +end diff --git a/lib/logger/test/logger/utils_test.exs b/lib/logger/test/logger/utils_test.exs new file mode 100644 index 00000000000..1009b63578c --- /dev/null +++ b/lib/logger/test/logger/utils_test.exs @@ -0,0 +1,103 @@ +defmodule Logger.UtilsTest do + use Logger.Case, async: true + + import Logger.Utils + + import Kernel, except: [inspect: 2] + defp inspect(format, args), do: Logger.Utils.inspect(format, args, 10) + + test "truncate/2" do + # ASCII binaries + assert truncate("foo", 4) == "foo" + assert truncate("foo", 3) == "foo" + assert truncate("foo", 2) == ["fo", " (truncated)"] + + # UTF-8 binaries + assert truncate("olá", 2) == ["ol", " (truncated)"] + assert truncate("olá", 3) == ["ol", " (truncated)"] + assert truncate("olá", 4) == "olá" + assert truncate("ááááá:", 10) == ["ááááá", " (truncated)"] + assert truncate("áááááá:", 10) == ["ááááá", " (truncated)"] + + # Charlists + assert truncate('olá', 2) == ['olá', " (truncated)"] + assert truncate('olá', 3) == ['olá', " (truncated)"] + assert truncate('olá', 4) == 'olá' + + # Chardata + assert truncate('ol' ++ "á", 2) == ['ol' ++ "", " (truncated)"] + assert truncate('ol' ++ "á", 3) == ['ol' ++ "", " (truncated)"] + assert truncate('ol' ++ "á", 4) == 'ol' ++ "á" + + # :infinity + long_string = String.duplicate("foo", 10_000) + assert truncate(long_string, :infinity) == long_string + end + + test "inspect/2 formats" do + assert inspect('~p', [1]) == {'~ts', [["1"]]} + assert inspect("~p", [1]) == {'~ts', [["1"]]} + assert inspect(:"~p", [1]) == {'~ts', [["1"]]} + end + + test "inspect/2 sigils" do + assert inspect('~10.10tp', [1]) == {'~ts', [["1"]]} + assert inspect('~-10.10tp', [1]) == {'~ts', [["1"]]} + + assert inspect('~10.10lp', [1]) == {'~ts', [["1"]]} + assert inspect('~10.10x~p~n', [1, 2, 3]) == {'~10.10x~ts~n', [1, 2, ["3"]]} + end + + test "inspect/2 with modifier t has no effect (as it is the default)" do + assert inspect('~tp', [1]) == {'~ts', [["1"]]} + assert inspect('~tw', [1]) == {'~ts', [["1"]]} + end + + test "inspect/2 with modifier l always prints lists" do + assert inspect('~lp', ['abc']) == + {'~ts', [["[", "97", ",", " ", "98", ",", " ", "99", "]"]]} + assert inspect('~lw', ['abc']) == + {'~ts', [["[", "97", ",", " ", "98", ",", " ", "99", "]"]]} + end + + test "inspect/2 with modifier for width" do + assert inspect('~5lp', ['abc']) == + {'~ts', [["[", "97", ",", "\n ", "98", ",", "\n ", "99", "]"]]} + + assert inspect('~5lw', ['abc']) == + {'~ts', [["[", "97", ",", " ", "98", ",", " ", "99", "]"]]} + end + + test "inspect/2 with modifier for limit" do + assert inspect('~5lP', ['abc', 2]) == + {'~ts', [["[", "97", ",", "\n ", "98", ",", "\n ", "...", "]"]]} + + assert inspect('~5lW', ['abc', 2]) == + {'~ts', [["[", "97", ",", " ", "98", ",", " ", "...", "]"]]} + end + + test "inspect/2 truncates binaries" do + assert inspect('~ts', ["abcdeabcdeabcdeabcde"]) == + {'~ts', ["abcdeabcde"]} + + assert inspect('~ts~ts~ts', ["abcdeabcde", "abcde", "abcde"]) == + {'~ts~ts~ts', ["abcdeabcde", "", ""]} + end + + test "timestamp/1" do + assert {{_, _, _}, {_, _, _, _}} = timestamp(true) + end + + test "format_date/1" do + date = {2015, 1, 30} + assert format_date(date) == ["2015", ?-, [?0, "1"], ?-, "30"] + end + + test "format_time/1" do + time = {12, 30, 10, 1} + assert format_time(time) == ["12", ?:, "30", ?:, "10", ?., [?0, ?0, "1"]] + + time = {12, 30, 10, 10} + assert format_time(time) == ["12", ?:, "30", ?:, "10", ?., [?0, "10"]] + end +end diff --git a/lib/logger/test/logger_test.exs b/lib/logger/test/logger_test.exs new file mode 100644 index 00000000000..c14a638994f --- /dev/null +++ b/lib/logger/test/logger_test.exs @@ -0,0 +1,369 @@ +defmodule LoggerTest do + use Logger.Case + require Logger + + setup_all do + Logger.configure_backend(:console, metadata: [:application, :module]) + on_exit(fn -> + Logger.configure_backend(:console, metadata: []) + end) + end + + defp msg_with_meta(text) do + msg("module=LoggerTest #{text}") + end + + test "add_translator/1 and remove_translator/1" do + defmodule CustomTranslator do + def t(:debug, :info, :format, {'hello: ~p', [:ok]}) do + :skip + end + + def t(:debug, :info, :format, {'world: ~p', [:ok]}) do + {:ok, "rewritten"} + end + + def t(_, _, _, _) do + :none + end + end + + assert Logger.add_translator({CustomTranslator, :t}) + + assert capture_log(fn -> + :error_logger.info_msg('hello: ~p', [:ok]) + end) == "" + + assert capture_log(fn -> + :error_logger.info_msg('world: ~p', [:ok]) + end) =~ "\[info\] rewritten" + after + assert Logger.remove_translator({CustomTranslator, :t}) + end + + test "add_backend/1 and remove_backend/1" do + assert :ok = Logger.remove_backend(:console) + assert Application.get_env(:logger, :backends) == [] + assert Logger.remove_backend(:console) == + {:error, :not_found} + + assert capture_log(fn -> + assert Logger.debug("hello", []) == :ok + end) == "" + + assert {:ok, _pid} = Logger.add_backend(:console) + assert Application.get_env(:logger, :backends) == [:console] + assert Logger.add_backend(:console) == {:error, :already_present} + assert Application.get_env(:logger, :backends) == [:console] + end + + test "add_backend/1 with {module, id}" do + defmodule MyBackend do + @behaviour :gen_event + + def init({MyBackend, :hello}) do + {:ok, :hello} + end + + def handle_event(_event, state) do + {:ok, state} + end + + def handle_call(:error, _) do + raise "oops" + end + + def handle_info(_msg, state) do + {:ok, state} + end + + def code_change(_old_vsn, state, _extra) do + {:ok, state} + end + + def terminate(_reason, _state) do + :ok + end + end + + assert {:ok, _} = Logger.add_backend({MyBackend, :hello}) + assert {:error, :already_present} = Logger.add_backend({MyBackend, :hello}) + assert :ok = Logger.remove_backend({MyBackend, :hello}) + end + + test "level/0" do + assert Logger.level == :debug + end + + test "process metadata" do + assert Logger.metadata(data: true) == :ok + assert Logger.metadata == [data: true] + assert Logger.metadata(data: true) == :ok + assert Logger.metadata == [data: true] + assert Logger.metadata(meta: 1) == :ok + metadata = Logger.metadata + assert Enum.sort(metadata) == [data: true, meta: 1] + assert Logger.metadata(data: nil) == :ok + assert Logger.metadata == [meta: 1] + + assert Logger.reset_metadata([meta: 2]) == :ok + assert Logger.metadata == [meta: 2] + assert Logger.reset_metadata([data: true, app: nil]) == :ok + assert Logger.metadata == [data: true] + assert Logger.reset_metadata == :ok + assert Logger.metadata == [] + end + + test "metadata merge" do + assert Logger.metadata([module: Sample]) == :ok + + assert capture_log(fn -> + assert Logger.bare_log(:info, "ok", [application: nil, module: LoggerTest]) == :ok + end) =~ msg("application= module=LoggerTest [info] ok") + end + + test "metadata merge when the argument function returns metadata" do + assert Logger.metadata([module: Sample]) == :ok + + assert capture_log(fn -> + assert Logger.bare_log(:info, fn -> {"ok", [module: "Function"]} end, [application: nil, module: LoggerTest]) == :ok + end) =~ msg("application= module=Function [info] ok") + end + + test "enable/1 and disable/1" do + assert Logger.metadata([]) == :ok + + assert capture_log(fn -> + assert Logger.debug("hello", []) == :ok + end) =~ msg_with_meta("[debug] hello") + + assert Logger.disable(self()) == :ok + + assert capture_log(fn -> + assert Logger.debug("hello", []) == :ok + end) == "" + + assert Logger.metadata([]) == :ok + + assert capture_log(fn -> + assert Logger.debug("hello", []) == :ok + end) == "" + + assert Logger.enable(self()) == :ok + + assert capture_log(fn -> + assert Logger.debug("hello", []) == :ok + end) =~ msg_with_meta("[debug] hello") + end + + test "compare_levels/2" do + assert Logger.compare_levels(:debug, :debug) == :eq + assert Logger.compare_levels(:debug, :info) == :lt + assert Logger.compare_levels(:debug, :warn) == :lt + assert Logger.compare_levels(:debug, :error) == :lt + + assert Logger.compare_levels(:info, :debug) == :gt + assert Logger.compare_levels(:info, :info) == :eq + assert Logger.compare_levels(:info, :warn) == :lt + assert Logger.compare_levels(:info, :error) == :lt + + assert Logger.compare_levels(:warn, :debug) == :gt + assert Logger.compare_levels(:warn, :info) == :gt + assert Logger.compare_levels(:warn, :warn) == :eq + assert Logger.compare_levels(:warn, :error) == :lt + + assert Logger.compare_levels(:error, :debug) == :gt + assert Logger.compare_levels(:error, :info) == :gt + assert Logger.compare_levels(:error, :warn) == :gt + assert Logger.compare_levels(:error, :error) == :eq + end + + test "debug/2" do + assert capture_log(fn -> + assert Logger.debug("hello", []) == :ok + end) =~ msg_with_meta("[debug] hello") + + assert capture_log(:info, fn -> + assert Logger.debug("hello", []) == :ok + end) == "" + end + + test "info/2" do + assert capture_log(fn -> + assert Logger.info("hello", []) == :ok + end) =~ msg_with_meta("[info] hello") + + assert capture_log(:warn, fn -> + assert Logger.info("hello", []) == :ok + end) == "" + end + + test "warn/2" do + assert capture_log(fn -> + assert Logger.warn("hello", []) == :ok + end) =~ msg_with_meta("[warn] hello") + + assert capture_log(:error, fn -> + assert Logger.warn("hello", []) == :ok + end) == "" + end + + test "error/2" do + assert capture_log(fn -> + assert Logger.error("hello", []) == :ok + end) =~ msg_with_meta("[error] hello") + end + + test "remove unused calls at compile time" do + Logger.configure(compile_time_purge_level: :info) + + defmodule Sample do + def debug do + Logger.debug "hello" + end + + def info do + Logger.info "hello" + end + end + + assert capture_log(fn -> + assert Sample.debug == :ok + end) == "" + + assert capture_log(fn -> + assert Sample.info == :ok + end) =~ msg("module=LoggerTest.Sample [info] hello") + after + Logger.configure(compile_time_purge_level: :debug) + end + + test "unused variable warnings suppressed when we remove macros from the AST" do + Logger.configure(compile_time_purge_level: :info) + + # This should not warn, even if the Logger call is purged from the AST. + assert ExUnit.CaptureIO.capture_io(:stderr, fn -> + Code.eval_string """ + defmodule Unused do + require Logger + + def hello(a, b) do + Logger.debug(["a: ", inspect(a), ", b: ", inspect(b)]) + end + end + """ + end) == "" + + assert Unused.hello(1, 2) == :ok + after + :code.purge(Unused) + :code.delete(Unused) + Logger.configure(compile_time_purge_level: :debug) + end + + test "set application metadata at compile time" do + Logger.configure(compile_time_application: nil) + defmodule SampleNoApp do + def info do + Logger.info "hello" + end + end + + assert capture_log(fn -> + assert SampleNoApp.info == :ok + end) =~ msg("module=LoggerTest.SampleNoApp [info] hello") + + Logger.configure(compile_time_application: :sample_app) + defmodule SampleApp do + def info do + Logger.info "hello" + end + end + + assert capture_log(fn -> + assert SampleApp.info == :ok + end) =~ msg("application=sample_app module=LoggerTest.SampleApp [info] hello") + after + Logger.configure(compile_time_application: nil) + end + + test "log/2 truncates messages" do + Logger.configure(truncate: 4) + assert capture_log(fn -> + Logger.log(:debug, "hello") + end) =~ "hell (truncated)" + after + Logger.configure(truncate: 8096) + end + + test "log/2 with to_string/1 conversion" do + Logger.configure(truncate: 4) + assert capture_log(fn -> + Logger.log(:debug, :hello) + end) =~ "hell (truncated)" + after + Logger.configure(truncate: 8096) + end + + test "log/2 does not fails when the logger is off" do + logger = Process.whereis(Logger) + Process.unregister(Logger) + + try do + assert Logger.log(:debug, "hello") == {:error, :noproc} + after + Process.register(logger, Logger) + end + end + + test "log/2 prunes bad unicode chars" do + assert capture_log(fn -> + assert Logger.log(:debug, "he" <> <<185>> <> "lo") == :ok + end) =~ "he�lo" + end + + test "log/2 relies on sync_threshold" do + Logger.remove_backend(:console) + Logger.configure(sync_threshold: 0) + for _ <- 1..1000, do: Logger.log(:info, "some message") + after + Logger.configure(sync_threshold: 20) + Logger.add_backend(:console) + end + + test "stop the application silently" do + Application.put_env(:logger, :backends, []) + Logger.App.stop() + Application.start(:logger) + + assert capture_log(fn -> + assert Logger.debug("hello", []) == :ok + end) == "" + + assert {:ok, _} = Logger.add_backend(:console) + assert Logger.add_backend(:console) == + {:error, :already_present} + after + Application.put_env(:logger, :backends, [:console]) + Logger.App.stop() + Application.start(:logger) + end + + test "restarts Logger.Config on Logger exits" do + Process.whereis(Logger) |> Process.exit(:kill) + wait_for_logger() + wait_for_handler(Logger, Logger.Config) + wait_for_handler(:error_logger, Logger.ErrorHandler) + end + + test "Logger.Config updates config on config_change/3" do + :ok = Logger.configure([level: :debug]) + try do + Application.put_env(:logger, :level, :error) + assert Logger.App.config_change([level: :error], [], []) === :ok + assert Logger.level() === :error + after + Logger.configure([level: :debug]) + end + end +end diff --git a/lib/logger/test/test_helper.exs b/lib/logger/test/test_helper.exs new file mode 100644 index 00000000000..ff3dcacea60 --- /dev/null +++ b/lib/logger/test/test_helper.exs @@ -0,0 +1,47 @@ +Logger.configure_backend(:console, colors: [enabled: false]) +ExUnit.start() + +defmodule Logger.Case do + use ExUnit.CaseTemplate + import ExUnit.CaptureIO + + using _ do + quote do + import Logger.Case + end + end + + def msg(msg) do + ~r/\d\d\:\d\d\:\d\d\.\d\d\d #{Regex.escape(msg)}/ + end + + def wait_for_handler(manager, handler) do + unless handler in :gen_event.which_handlers(manager) do + Process.sleep(10) + wait_for_handler(manager, handler) + end + end + + def wait_for_logger() do + try do + :gen_event.which_handlers(Logger) + else + _ -> + :ok + catch + :exit, _ -> + Process.sleep(10) + wait_for_logger() + end + end + + def capture_log(level \\ :debug, fun) do + Logger.configure(level: level) + capture_io(:user, fn -> + fun.() + Logger.flush() + end) + after + Logger.configure(level: :debug) + end +end diff --git a/lib/mix/lib/mix.ex b/lib/mix/lib/mix.ex index 4db88d5f3cc..597265d077a 100644 --- a/lib/mix/lib/mix.ex +++ b/lib/mix/lib/mix.ex @@ -1,22 +1,185 @@ defmodule Mix do - @moduledoc """ - Mix is a build tool that provides tasks for creating, compiling and - testing Elixir projects. Mix is inspired by the Leiningen - build tool for Clojure and was written by one of its contributors. + @moduledoc ~S""" + Mix is a build tool that provides tasks for creating, compiling, + and testing Elixir projects, managing its dependencies, and more. - This module works as a facade for accessing the most common functionality - in Elixir, such as the shell and the current project configuration. + ## Mix.Project + + The foundation of Mix is a project. A project can be defined by using + `Mix.Project` in a module, usually placed in a file named `mix.exs`: + + defmodule MyApp.Mixfile do + use Mix.Project + + def project do + [app: :my_app, + version: "1.0.0"] + end + end + + See the `Mix.Project` module for detailed documentation on Mix projects. + + Once the project is defined, a number of default Mix tasks can be run + directly from the command line: + + * `mix compile` - compiles the current project + * `mix test` - runs tests for the given project + * `mix run` - runs a particular command inside the project + + Each task has its own options and sometimes specific configuration + to be defined in the `project/0` function. You can use `mix help` + to list all available tasks and `mix help NAME` to show help for + a particular task. + + The best way to get started with your first project is by calling + `mix new my_project` from the command line. + + ## Mix.Task + + Tasks are what make Mix extensible. + + Projects can extend Mix behaviour by adding their own tasks. For + example, adding the task below inside your project will + make it available to everyone that uses your project: + + defmodule Mix.Tasks.Hello do + use Mix.Task + + def run(_) do + Mix.shell.info "hello" + end + end + + The task can now be invoked with `mix hello`. + + ## Dependencies + + Mix also manages your dependencies and integrates nicely with the [Hex package + manager](https://hex.pm). + + In order to use dependencies, you need to add a `:deps` key + to your project configuration. We often extract the list of dependencies + into its own function: + + defmodule MyApp.Mixfile do + use Mix.Project + + def project do + [app: :my_app, + version: "1.0.0", + deps: deps()] + end + + defp deps do + [{:ecto, "~> 0.2.5"}, + {:plug, github: "elixir-lang/plug"}] + end + end + + You can run `mix help deps` to learn more about dependencies in Mix. + + ## Environments + + Mix supports different environments. Environments allow developers to prepare + and organize their project specifically for different scenarios. By default, + Mix provides three environments: + + * `:dev` - the default environment + * `:test` - the environment `mix test` runs on + * `:prod` - the environment your dependencies run on + + The environment can be changed via the command line by setting + the `MIX_ENV` environment variable, for example: + + $ MIX_ENV=prod mix run server.exs + + ## Aliases + + Aliases are shortcuts or tasks specific to the current project. + + In the `Mix.Task` section, we have defined a task that would be + available to everyone using our project as a dependency. What if + we wanted the task to only be available for our project? Just + define an alias: + + defmodule MyApp.Mixfile do + use Mix.Project + + def project do + [app: :my_app, + version: "1.0.0", + aliases: aliases()] + end + + defp aliases do + [c: "compile", + hello: &hello/1] + end + + defp hello(_) do + Mix.shell.info "Hello world" + end + end + + In the example above, we have defined two aliases. One is `mix c` + which is a shortcut for `mix compile`. The other is named + `mix hello`, which is the equivalent to the `Mix.Tasks.Hello` + we have defined in the `Mix.Task` section. + + Aliases may also be lists, specifying multiple tasks to be run + consecutively: + + [all: [&hello/1, "deps.get --only #{Mix.env}", "compile"]] + + In the example above, we have defined an alias named `mix all`, + that prints hello, then fetches dependencies specific to the + current environment and compiles it. + + Arguments given to the alias will be appended to the arguments + of the last task in the list, if the last task is a function + they will be given as a list of strings to the function. + + Finally, aliases can also be used to augment existing tasks. + Let's suppose you want to augment `mix clean` to clean another + directory Mix does not know about: + + [clean: ["clean", &clean_extra/1]] + + Where `&clean_extra/1` would be a function in your `mix.exs` + with extra clean up logic. + + Note aliases do not show up on `mix help`. + Aliases defined in the current project do not affect its dependencies and aliases defined in dependencies are not accessible from the current project. + + ## Environment variables + + Several environment variables can be used to modify Mix's behaviour. + + Mix responds to the following variables: + + * `MIX_ARCHIVES` - specifies the directory into which the archives should be installed + * `MIX_DEBUG` - outputs debug information about each task before running it + * `MIX_ENV` - specifies which environment should be used. See [Environments](#module-environments) + * `MIX_EXS` - changes the full path to the `mix.exs` file + * `MIX_HOME` - path to mix's home directory, stores configuration files and scripts used by mix + * `MIX_PATH` - appends extra code paths + * `MIX_QUIET` - does not print information messages to the terminal + * `MIX_REBAR` - path to rebar command that overrides the one mix installs + * `MIX_REBAR3` - path to rebar3 command that overrides the one mix installs + + Environment variables that are not meant to hold a value (and act basically as + flags) should be set to either `1` or `true`, for example: + + $ MIX_DEBUG=1 mix compile - For getting started with Elixir, checkout out the guide available on - [Elixir's website](http://elixir-lang.org). """ use Application @doc false def start do - Application.start(:elixir) - Application.start(:mix) + {:ok, _} = Application.ensure_all_started(:mix) + :ok end @doc false @@ -24,85 +187,92 @@ defmodule Mix do import Supervisor.Spec children = [ + worker(Mix.State, []), worker(Mix.TasksServer, []), worker(Mix.ProjectStack, []) ] - opts = [strategy: :one_for_one, name: Mix.Supervisor] - stat = Supervisor.start_link(children, opts) - - if env = System.get_env("MIX_ENV") do - env(String.to_atom env) - end - - stat + opts = [strategy: :one_for_one, name: Mix.Supervisor, max_restarts: 0] + Supervisor.start_link(children, opts) end @doc """ - Returns the mix environment. + Returns the Mix environment. """ def env do # env is not available on bootstrapping, so set a :dev default - Application.get_env(:mix, :env, :dev) + Mix.State.get(:env, :dev) end @doc """ - Changes the current mix env. + Changes the current Mix environment to `env`. Be careful when invoking this function as any project configuration won't be reloaded. """ def env(env) when is_atom(env) do - Application.put_env(:mix, :env, env) + Mix.State.put(:env, env) + end + + @doc """ + Returns the default compilers used by Mix. + + It can be used in your `mix.exs` to prepend or + append new compilers to Mix: + + def project do + [compilers: Mix.compilers ++ [:foo, :bar]] + end + + """ + def compilers do + [:yecc, :leex, :erlang, :elixir, :xref, :app] end @doc """ - The shell is a wrapper for doing IO. + Returns the current shell. + + `shell/0` can be used as a wrapper for the current shell. It contains + conveniences for requesting information from the user, printing to the shell and so + forth. The Mix shell is swappable (see `shell/1`), allowing developers to use + a test shell that simply sends messages to the current process instead of + performing IO (see `Mix.Shell.Process`). - It contains conveniences for asking the user information, - printing status and so forth. It is also swappable, - allowing developers to use a test shell that simply sends the - messages to the current process. + By default, this returns `Mix.Shell.IO`. """ def shell do - Application.get_env(:mix, :shell, Mix.Shell.IO) + Mix.State.get(:shell, Mix.Shell.IO) end @doc """ Sets the current shell. + + After calling this function, `shell` becomes the shell that is returned by + `shell/0`. """ def shell(shell) do - Application.put_env(:mix, :shell, shell) + Mix.State.put(:shell, shell) end @doc """ - Raises a mix error that is nicely formatted. + Returns true if Mix is in debug mode. """ - def raise(message) when is_binary(message) do - Kernel.raise Mix.Error, mix: mix_info, message: message + def debug? do + Mix.State.get(:debug, false) end @doc """ - Raises a mix compatible exception. - - A mix compatible exception has a `mix_error` field which mix - uses to store the project or application name which is - automatically by the formatting tools. + Sets Mix debug mode. """ - def raise(exception, opts) when is_atom(exception) do - Kernel.raise %{exception.exception(opts) | mix: mix_info} + def debug(debug) when is_boolean(debug) do + Mix.State.put(:debug, debug) end - defp mix_info do - case Mix.ProjectStack.peek do - %{name: name, config: config, pos: pos} when pos > 0 -> - if app = config[:app] do - {:app, app} - else - {:project, name} - end - _ -> - :none - end + @doc """ + Raises a Mix error that is nicely formatted. + """ + @spec raise(binary) :: no_return + def raise(message) when is_binary(message) do + Kernel.raise Mix.Error, mix: true, message: message end end diff --git a/lib/mix/lib/mix/archive.ex b/lib/mix/lib/mix/archive.ex deleted file mode 100644 index fa2c751f80d..00000000000 --- a/lib/mix/lib/mix/archive.ex +++ /dev/null @@ -1,86 +0,0 @@ -defmodule Mix.Archive do - @moduledoc """ - Module responsible for managing [archives](http://www.erlang.org/doc/man/code.html). - - An archive is a zip file containing the app and beam files. - A valid archive must be named with the name of the application and - it should contain the relative paths beginning with the application - name, e.g. the root of the zip file should be `my_app/ebin/Elixir.My.App.beam`. - """ - - @doc """ - Returns the archive name based on `app` and `version`. - - ## Examples - - iex> Mix.Archive.name("foo", nil) - "foo.ez" - - iex> Mix.Archive.name("foo", "0.1.0") - "foo-0.1.0.ez" - - """ - def name(app, nil), do: "#{app}.ez" - def name(app, vsn), do: "#{app}-#{vsn}.ez" - - @doc """ - Returns the archive internal directory from its `path`. - - ## Examples - - iex> Mix.Archive.dir("foo/bar/baz-0.1.0.ez") - "baz-0.1.0" - - """ - def dir(path) do - path |> Path.basename |> Path.rootname - end - - @doc """ - Returns the ebin directory inside the given archive path. - - ## Examples - - iex> Mix.Archive.ebin("foo/bar/baz-0.1.0.ez") - "foo/bar/baz-0.1.0.ez/baz-0.1.0/ebin" - - """ - def ebin(path) do - dir = dir(path) - Path.join [path, dir, "ebin"] - end - - @doc """ - Creates an application archive. - - It receives the archive file in the format - `path/to/archive/app-vsn.ez` and the path to the root of - the project to be archived. Everything in the `ebin` and - `priv` directories is archived. Dependencies are not - archived. - """ - def create(source, target) do - source_path = Path.expand(source) - target_path = Path.expand(target) - dir = dir(target_path) |> String.to_char_list - {:ok, _} = :zip.create(String.to_char_list(target_path), - files_to_add(source_path, dir), - uncompress: ['.beam', '.app']) - end - - defp files_to_add(path, dir) do - File.cd! path, fn -> - ebin = Path.wildcard("ebin/*.{beam,app}") - priv = Path.wildcard("priv/**/*") - - Enum.reduce ebin ++ priv, [], fn(f, acc) -> - case File.read(f) do - {:ok, bin} -> - [{Path.join(dir, f) |> String.to_char_list, bin}|acc] - {:error, _} -> - acc - end - end - end - end -end diff --git a/lib/mix/lib/mix/cli.ex b/lib/mix/lib/mix/cli.ex index 4fd0484d0db..e11319d24e0 100644 --- a/lib/mix/lib/mix/cli.ex +++ b/lib/mix/lib/mix/cli.ex @@ -8,6 +8,9 @@ defmodule Mix.CLI do Mix.Local.append_archives Mix.Local.append_paths + if env_variable_activated?("MIX_QUIET"), do: Mix.shell(Mix.Shell.Quiet) + if env_variable_activated?("MIX_DEBUG"), do: Mix.debug(true) + case check_for_shortcuts(args) do :help -> proceed(["help"]) @@ -19,82 +22,90 @@ defmodule Mix.CLI do end defp proceed(args) do - Mix.Tasks.Local.Hex.maybe_update() load_dot_config() - args = load_mixfile(args) + load_mixfile() {task, args} = get_task(args) + ensure_hex(task) change_env(task) run_task(task, args) end - defp load_mixfile(args) do + defp load_mixfile() do file = System.get_env("MIX_EXS") || "mix.exs" - if File.regular?(file) do + _ = if File.regular?(file) do Code.load_file(file) end - args end - defp get_task(["-" <> _|_]) do - Mix.shell.error "** (Mix) Cannot implicitly pass flags to default mix task, " <> - "please invoke instead: mix #{Mix.Project.config[:default_task]}" - exit(1) + defp get_task(["-" <> _ | _]) do + Mix.shell.error "** (Mix) Mix requires a task name when passing flags, " <> + "try invoking \"mix #{Mix.Project.config[:default_task]}\" instead" + exit({:shutdown, 1}) end - defp get_task([h|t]) do + defp get_task([h | t]) do {h, t} end defp get_task([]) do - {Mix.Project.config[:default_task], []} + case Mix.Project.get do + nil -> + Mix.shell.error "** (Mix) \"mix\" with no arguments must be executed in a directory with a mix.exs file" + Mix.shell.info """ + + Usage: mix [task] + + Examples: + + mix - Invokes the default task (current: "mix run") + mix new PATH - Creates a new Elixir project at the given path + mix help - Lists all available tasks + mix help TASK - Prints documentation for a given task + """ + exit({:shutdown, 1}) + _ -> + {Mix.Project.config[:default_task], []} + end end defp run_task(name, args) do try do - if Mix.Project.get do - Mix.Task.run "loadconfig" - Mix.Task.run "deps.loadpaths", ["--no-deps-check"] - Mix.Task.run "loadpaths", ["--no-elixir-version-check"] - Mix.Task.reenable "deps.loadpaths" - Mix.Task.reenable "loadpaths" - end - - # If the task is not available, let's try to - # compile the repository and then run it again. - cond do - Mix.Task.get(name) -> - Mix.Task.run(name, args) - Mix.Project.get -> - Mix.Task.run("compile") - Mix.Task.run(name, args) - true -> - # Raise no task error - Mix.Task.get!(name) - end + ensure_no_slashes(name) + Mix.Task.run "loadconfig" + Mix.Task.run name, args rescue - # We only rescue exceptions in the mix namespace, all + # We only rescue exceptions in the Mix namespace, all # others pass through and will explode on the users face exception -> stacktrace = System.stacktrace - if info = Map.get(exception, :mix) do + if Map.get(exception, :mix) && not Mix.debug? do mod = exception.__struct__ |> Module.split() |> Enum.at(0, "Mix") - Mix.shell.error "** (#{mod})#{show_mix_info(info)} #{Exception.message(exception)}" + Mix.shell.error "** (#{mod}) #{Exception.message(exception)}" + exit({:shutdown, 1}) else reraise exception, stacktrace end - - exit(1) end end - defp show_mix_info({:project, proj}), do: " [#{inspect proj}]" - defp show_mix_info({:app, app}), do: " [#{app}]" - defp show_mix_info(:none), do: "" + defp env_variable_activated?(name) do + System.get_env(name) in ~w(1 true) + end + + defp ensure_hex("local.hex"), + do: :ok + defp ensure_hex(_task), + do: Mix.Hex.ensure_updated?() + + defp ensure_no_slashes(task) do + if String.contains?(task, "/") do + raise Mix.NoTaskError, task: task + end + end defp change_env(task) do - if nil?(System.get_env("MIX_ENV")) && - (env = Mix.Project.config[:preferred_cli_env][task]) do + if env = preferred_cli_env(task) do Mix.env(env) if project = Mix.Project.pop do %{name: name, file: file} = project @@ -103,6 +114,15 @@ defmodule Mix.CLI do end end + defp preferred_cli_env(task) do + if System.get_env("MIX_ENV") do + nil + else + task = String.to_atom(task) + Mix.Project.config[:preferred_cli_env][task] || Mix.Task.preferred_cli_env(task) + end + end + defp load_dot_config do path = Path.join(Mix.Utils.mix_home, "config.exs") if File.regular?(path) do @@ -111,15 +131,16 @@ defmodule Mix.CLI do end defp display_version() do - IO.puts "Elixir #{System.version}" + IO.puts :erlang.system_info(:system_version) + IO.puts "Mix " <> System.build_info[:build] end # Check for --help or --version in the args - defp check_for_shortcuts([first_arg|_]) when first_arg in - ["--help", "-h", "-help"], do: :help + defp check_for_shortcuts([first_arg | _]) when first_arg in ["--help", "-h"], + do: :help - defp check_for_shortcuts([first_arg|_]) when first_arg in - ["--version", "-v"], do: :version + defp check_for_shortcuts([first_arg | _]) when first_arg in ["--version", "-v"], + do: :version defp check_for_shortcuts(_), do: nil end diff --git a/lib/mix/lib/mix/compilers/elixir.ex b/lib/mix/lib/mix/compilers/elixir.ex index f85ae9cc786..d55679ff616 100644 --- a/lib/mix/lib/mix/compilers/elixir.ex +++ b/lib/mix/lib/mix/compilers/elixir.ex @@ -1,202 +1,423 @@ defmodule Mix.Compilers.Elixir do @moduledoc false + @manifest_vsn :v6 + + import Record + + defrecord :module, [:module, :kind, :sources, :beam, :binary] + defrecord :source, [ + source: nil, + size: 0, + compile_references: [], + runtime_references: [], + compile_dispatches: [], + runtime_dispatches: [], + external: [] + ] + @doc """ Compiles stale Elixir files. - It expects a manifest file, the source directories, the extensions - to read in sources, the destination directory, a flag to know if - compilation is being forced or not and a callback to be invoked - once (and only if) compilation starts. + It expects a `manifest` file, the source directories, the destination + directory, a flag to know if compilation is being forced or not, and a + list of any additional compiler options. - The manifest is written down with information including dependencies - in between modules, which helps it recompile only the modules that + The `manifest` is written down with information including dependencies + between modules, which helps it recompile only the modules that have changed at runtime. """ - def compile(manifest, srcs, exts, dest, force, on_start) do - all = Mix.Utils.extract_files(srcs, exts) - all_entries = read_manifest(manifest) + def compile(manifest, srcs, dest, exts, force, opts) do + # We fetch the time from before we read files so any future + # change to files are still picked up by the compiler. This + # timestamp is used when writing BEAM files and the manifest. + timestamp = :calendar.universal_time() + all_paths = MapSet.new(Mix.Utils.extract_files(srcs, exts)) + + {all_modules, all_sources} = parse_manifest(manifest, dest) + modified = Mix.Utils.last_modified(manifest) + prev_paths = + for source(source: source) <- all_sources, into: MapSet.new(), do: source removed = - for {_b, _m, source, _d, _f} <- all_entries, not(source in all), do: source + prev_paths + |> MapSet.difference(all_paths) + |> MapSet.to_list changed = if force do # A config, path dependency or manifest has # changed, let's just compile everything - all + MapSet.to_list(all_paths) else - modified = Mix.Utils.last_modified(manifest) + sources_stats = mtimes_and_sizes(all_sources) - # Otherwise let's start with the new ones - # plus the ones that have changed - for(source <- all, - not Enum.any?(all_entries, fn {_b, _m, s, _d, _f} -> s == source end), - do: source) - ++ - for({_b, _m, source, _d, files} <- all_entries, - Mix.Utils.stale?([source|files], [modified]), + # Otherwise let's start with the new sources + new_paths = + all_paths + |> MapSet.difference(prev_paths) + |> MapSet.to_list + + # Plus the sources that have changed in disk + for(source(source: source, external: external, size: size) <- all_sources, + {last_mtime, last_size} = Map.fetch!(sources_stats, source), + times = Enum.map(external, &(sources_stats |> Map.fetch!(&1) |> elem(0))), + size != last_size or Mix.Utils.stale?([last_mtime | times], [modified]), + into: new_paths, do: source) end - {entries, changed} = remove_stale_entries(all_entries, removed ++ changed, [], []) - stale = changed -- removed + {modules, changed} = + update_stale_entries( + all_modules, + all_sources, + removed ++ changed, + stale_local_deps(manifest, modified) + ) + + stale = changed -- removed + sources = update_stale_sources(all_sources, removed, changed) cond do stale != [] -> - do_compile(manifest, entries, stale, dest, on_start) - :ok + compile_manifest(manifest, exts, modules, sources, stale, dest, timestamp, opts) removed != [] -> - :ok + write_manifest(manifest, modules, sources, dest, timestamp) true -> - :noop + :ok end + + {stale, removed} + end + + defp mtimes_and_sizes(sources) do + Enum.reduce(sources, %{}, fn source(source: source, external: external), map -> + Enum.reduce([source | external], map, fn file, map -> + Map.put_new_lazy(map, file, fn -> Mix.Utils.last_modified_and_size(file) end) + end) + end) end @doc """ - Removes compiled files. + Removes compiled files for the given `manifest`. """ - def clean(manifest) do - case File.read(manifest) do - {:ok, contents} -> - contents - |> String.split("\n") - |> Enum.each &(&1 |> String.split("\t") |> hd |> File.rm) - File.rm(manifest) - {:error, _} -> + def clean(manifest, compile_path) do + Enum.each(read_manifest(manifest, compile_path), fn + module(beam: beam) -> + File.rm(beam) + _ -> :ok + end) + end + + @doc """ + Returns protocols and implementations for the given `manifest`. + """ + def protocols_and_impls(manifest, compile_path) do + for module(beam: beam, module: module, kind: kind) <- read_manifest(manifest, compile_path), + match?(:protocol, kind) or match?({:impl, _}, kind), + do: {module, kind, beam} + end + + @doc """ + Reads the manifest. + """ + def read_manifest(manifest, compile_path) do + try do + manifest |> File.read!() |> :erlang.binary_to_term() + else + [@manifest_vsn | data] -> + expand_beam_paths(data, compile_path) + _ -> + [] + rescue + _ -> [] end end - defp do_compile(manifest, entries, stale, dest, on_start) do - Mix.Project.build_structure - on_start.() + defp compile_manifest(manifest, exts, modules, sources, stale, dest, timestamp, opts) do + Mix.Utils.compiling_n(length(stale), hd(exts)) + Mix.Project.ensure_structure() + true = Code.prepend_path(dest) + set_compiler_opts(opts) cwd = File.cwd! + extra = + if opts[:verbose] do + [each_file: &each_file/1] + else + [] + end + # Starts a server responsible for keeping track which files - # were compiled and the dependencies in between them. - {:ok, pid} = Agent.start_link(fn -> - Enum.map(entries, &Tuple.insert_at(&1, 5, nil)) - end) + # were compiled and the dependencies between them. + {:ok, pid} = Agent.start_link(fn -> {modules, sources} end) + long_compilation_threshold = opts[:long_compilation_threshold] || 10 try do - Kernel.ParallelCompiler.files :lists.usort(stale), - each_module: &each_module(pid, dest, cwd, &1, &2, &3), - each_file: &each_file(&1) - Agent.cast pid, fn entries -> - write_manifest(manifest, entries) - entries + _ = Kernel.ParallelCompiler.files stale, + [each_module: &each_module(pid, cwd, &1, &2, &3), + each_long_compilation: &each_long_compilation(&1, long_compilation_threshold), + long_compilation_threshold: long_compilation_threshold, + dest: dest] ++ extra + Agent.cast pid, fn {modules, sources} -> + write_manifest(manifest, modules, sources, dest, timestamp) + {modules, sources} end after - Agent.stop pid + Agent.stop(pid, :normal, :infinity) end :ok end - defp each_module(pid, dest, cwd, source, module, binary) do - source = Path.relative_to(source, cwd) - bin = Atom.to_string(module) - beam = dest - |> Path.join(bin <> ".beam") - |> Path.relative_to(cwd) + defp set_compiler_opts(opts) do + opts + |> Keyword.take(Code.available_compiler_options) + |> Code.compiler_options() + end + + defp each_module(pid, cwd, source, module, binary) do + {compile_references, runtime_references} = Kernel.LexicalTracker.remote_references(module) + + compile_references = + compile_references + |> List.delete(module) + |> Enum.reject(&match?("elixir_" <> _, Atom.to_string(&1))) + + runtime_references = + runtime_references + |> List.delete(module) + + {compile_dispatches, runtime_dispatches} = Kernel.LexicalTracker.remote_dispatches(module) + + compile_dispatches = + compile_dispatches + |> Enum.reject(&match?("elixir_" <> _, Atom.to_string(elem(&1, 0)))) + + runtime_dispatches = + runtime_dispatches + |> Enum.to_list + + kind = detect_kind(module) + source = Path.relative_to(source, cwd) + external = get_external_resources(module, cwd) + + Agent.cast pid, fn {modules, sources} -> + source_external = case List.keyfind(sources, source, source(:source)) do + source(external: old_external) -> external ++ old_external + nil -> external + end + + module_sources = case List.keyfind(modules, module, module(:module)) do + module(sources: old_sources) -> [source | List.delete(old_sources, source)] + nil -> [source] + end - deps = Kernel.LexicalTracker.remotes(module) - |> List.delete(module) - |> :lists.usort - |> Enum.map(&Atom.to_string(&1)) - |> Enum.reject(&match?("elixir_" <> _, &1)) + new_module = module( + module: module, + kind: kind, + sources: module_sources, + beam: nil, # They are calculated when writing the manifest + binary: binary + ) - files = for file <- get_external_resources(module), - File.regular?(file), - relative = Path.relative_to(file, cwd), - Path.type(relative) == :relative, - do: relative + new_source = source( + source: source, + size: :filelib.file_size(source), + compile_references: compile_references, + runtime_references: runtime_references, + compile_dispatches: compile_dispatches, + runtime_dispatches: runtime_dispatches, + external: source_external + ) - Agent.cast pid, &:lists.keystore(beam, 1, &1, {beam, bin, source, deps, files, binary}) + modules = List.keystore(modules, module, module(:module), new_module) + sources = List.keystore(sources, source, source(:source), new_source) + {modules, sources} + end end - defp get_external_resources(module) do - module.__info__(:attributes)[:external_resource] || [] + defp detect_kind(module) do + protocol_metadata = Module.get_attribute(module, :protocol_impl) + + cond do + is_list(protocol_metadata) and protocol_metadata[:protocol] -> + {:impl, protocol_metadata[:protocol]} + is_list(Module.get_attribute(module, :protocol)) -> + :protocol + true -> + :module + end end - defp each_file(file) do - Mix.shell.info "Compiled #{file}" + defp get_external_resources(module, cwd) do + for file <- Module.get_attribute(module, :external_resource), + do: Path.relative_to(file, cwd) + end + + defp each_file(source) do + Mix.shell.info "Compiled #{source}" + end + + defp each_long_compilation(source, threshold) do + Mix.shell.info "Compiling #{source} (it's taking more than #{threshold}s)" end ## Resolution + defp update_stale_sources(sources, removed, changed) do + # Remove delete sources + sources = + Enum.reduce(removed, sources, &List.keydelete(&2, &1, source(:source))) + # Store empty sources for the changed ones as the compiler appends data + sources = + Enum.reduce(changed, sources, &List.keystore(&2, &1, source(:source), source(source: &1))) + sources + end + # This function receives the manifest entries and some source # files that have changed. It then, recursively, figures out - # all the files that changed (thanks to the dependencies) and - # return their sources as the remaining entries. - defp remove_stale_entries(all, []) do - {all, []} + # all the files that changed (via the module dependencies) and + # return the non-changed entries and the removed sources. + defp update_stale_entries(modules, _sources, [], stale) when stale == %{} do + {modules, []} end - defp remove_stale_entries(all, changed) do - remove_stale_entries(all, :lists.usort(changed), [], []) + defp update_stale_entries(modules, sources, changed, stale) do + changed = Enum.into(changed, %{}, &{&1, true}) + remove_stale_entries(modules, sources, stale, changed) end - defp remove_stale_entries([{beam, module, source, _d, _f} = entry|t], changed, removed, acc) do - if source in changed do - File.rm(beam) - remove_stale_entries(t, changed, [module|removed], acc) + defp remove_stale_entries(modules, sources, old_stale, old_changed) do + {rest, new_stale, new_changed} = + Enum.reduce modules, {[], old_stale, old_changed}, &remove_stale_entry(&1, &2, sources) + + if map_size(new_stale) > map_size(old_stale) or + map_size(new_changed) > map_size(old_changed) do + remove_stale_entries(rest, sources, new_stale, new_changed) else - remove_stale_entries(t, changed, removed, [entry|acc]) + {rest, Map.keys(new_changed)} + end + end + + defp remove_stale_entry(module(module: module, beam: beam, sources: sources) = entry, + {rest, stale, changed}, sources_records) do + {compile_references, runtime_references} = + Enum.reduce(sources, {[], []}, fn source, {compile_acc, runtime_acc} -> + source(compile_references: compile_refs, runtime_references: runtime_refs) = + List.keyfind(sources_records, source, source(:source)) + {compile_refs ++ compile_acc, runtime_refs ++ runtime_acc} + end) + + cond do + # If I changed in disk or have a compile time reference to + # something stale, I need to be recompiled. + has_any_key?(changed, sources) or has_any_key?(stale, compile_references) -> + remove_and_purge(beam, module) + {rest, + Map.put(stale, module, true), + Enum.reduce(sources, changed, &Map.put(&2, &1, true))} + + # If I have a runtime references to something stale, + # I am stale too. + has_any_key?(stale, runtime_references) -> + {[entry | rest], Map.put(stale, module, true), changed} + + # Otherwise, we don't store it anywhere + true -> + {[entry | rest], stale, changed} end end - defp remove_stale_entries([], changed, removed, acc) do - # If any of the dependencies for the remaining entries - # were removed, get its source so we can remove them. - next_changed = for {_b, _m, source, deps, _f} <- acc, - Enum.any?(deps, &(&1 in removed)), - do: source + defp has_any_key?(map, enumerable) do + Enum.any?(enumerable, &Map.has_key?(map, &1)) + end + + defp stale_local_deps(manifest, modified) do + base = Path.basename(manifest) + for %{scm: scm, opts: opts} = dep <- Mix.Dep.cached(), + not scm.fetchable?, + Mix.Utils.last_modified(Path.join(opts[:build], base)) > modified, + path <- Mix.Dep.load_paths(dep), + beam <- Path.wildcard(Path.join(path, "*.beam")), + Mix.Utils.last_modified(beam) > modified, + do: {beam |> Path.basename |> Path.rootname |> String.to_atom, true}, + into: %{} + end - {acc, next} = remove_stale_entries(Enum.reverse(acc), next_changed) - {acc, next ++ changed} + defp remove_and_purge(beam, module) do + _ = File.rm(beam) + _ = :code.purge(module) + _ = :code.delete(module) end ## Manifest handling - # Reads the manifest returning the results as tuples. - # The beam files are read, removed and stored in memory. - defp read_manifest(manifest) do - case File.read(manifest) do - {:ok, contents} -> - Enum.reduce String.split(contents, "\n"), [], fn x, acc -> - case String.split(x, "\t") do - [beam, module, source|deps] -> - {deps, files} = - case Enum.split_while(deps, &(&1 != "Elixir")) do - {deps, ["Elixir"|files]} -> {deps, files} - {deps, _} -> {deps, []} - end - [{beam, module, source, deps, files}|acc] - _ -> - acc - end - end - {:error, _} -> - [] + # Similar to read_manifest, but supports data migration. + defp parse_manifest(manifest, compile_path) do + try do + manifest |> File.read!() |> :erlang.binary_to_term() + rescue + _ -> {[], []} + else + [@manifest_vsn | data] -> do_parse_manifest(data, compile_path) + _ -> {[], []} end end - # Writes the manifest separating entries by tabs. - defp write_manifest(_manifest, []) do - :ok + defp do_parse_manifest(data, compile_path) do + Enum.reduce(data, {[], []}, fn + module() = module, {modules, sources} -> + {[expand_beam_path(module, compile_path) | modules], sources} + source() = source, {modules, sources} -> + {modules, [source | sources]} + end) + end + + defp expand_beam_path(module(beam: beam) = module, compile_path) do + module(module, beam: Path.join(compile_path, beam)) end - defp write_manifest(manifest, entries) do - lines = Enum.map(entries, fn - {beam, module, source, deps, files, binary} -> - if binary, do: File.write!(beam, binary) - tail = deps ++ ["Elixir"] ++ files - [beam, module, source | tail] |> Enum.join("\t") + defp expand_beam_paths(modules, ""), do: modules + defp expand_beam_paths(modules, compile_path) do + Enum.map(modules, fn + module() = module -> + expand_beam_path(module, compile_path) + other -> + other end) + end + defp write_manifest(manifest, [], [], _compile_path, _timestamp) do + File.rm(manifest) + :ok + end + + defp write_manifest(manifest, modules, sources, compile_path, timestamp) do File.mkdir_p!(Path.dirname(manifest)) - File.write!(manifest, Enum.join(lines, "\n")) + + modules = + for module(binary: binary, module: module) = entry <- modules do + beam = Atom.to_string(module) <> ".beam" + if binary do + beam_path = Path.join(compile_path, beam) + File.write!(beam_path, binary) + File.touch!(beam_path, timestamp) + end + module(entry, binary: nil, beam: beam) + end + + manifest_data = + [@manifest_vsn | modules ++ sources] + |> :erlang.term_to_binary([:compressed]) + + File.write!(manifest, manifest_data) + File.touch!(manifest, timestamp) + + # Since Elixir is a dependency itself, we need to touch the lock + # so the current Elixir version, used to compile the files above, + # is properly stored. + Mix.Dep.ElixirSCM.update end end diff --git a/lib/mix/lib/mix/compilers/erlang.ex b/lib/mix/lib/mix/compilers/erlang.ex index 12fba983bdd..1ca4501b92f 100644 --- a/lib/mix/lib/mix/compilers/erlang.ex +++ b/lib/mix/lib/mix/compilers/erlang.ex @@ -2,11 +2,13 @@ defmodule Mix.Compilers.Erlang do @moduledoc false @doc """ - Compiles the files in `src_dirs` with given extensions into + Compiles the files in `mappings` with given extensions into the destination, automatically invoking the callback for each - stale input and output pair (or for all if `force` is true) and + stale input and output pair (or for all if `force` is `true`) and removing files that no longer have a source, while keeping the - manifest up to date. + `manifest` up to date. + + `mappings` should be a list of tuples in the form of `{src, dest}` paths. ## Examples @@ -16,7 +18,7 @@ defmodule Mix.Compilers.Erlang do manifest = Path.join Mix.Project.manifest_path, ".compile.lfe" dest = Mix.Project.compile_path - compile manifest, [{"src", dest}], :lfe, :beam, opts[:force], fn + compile manifest, [{"src", dest}], :lfe, :beam, opts, fn input, output -> :lfe_comp.file(to_erl_file(input), [output_dir: Path.dirname(output)]) @@ -24,74 +26,118 @@ defmodule Mix.Compilers.Erlang do The command above will: - 1. look for files ending with the `lfe` extension in `src` - and their `beam` counterpart in `ebin` + 1. look for files ending with the `lfe` extension in `src` path + and their `beam` counterpart in `ebin` path - 2. for each stale file (or for all if `force` is true), + 2. for each stale file (or for all if `force` is `true`), invoke the callback passing the calculated input and output 3. update the manifest with the newly compiled outputs - 4. remove any output in the manifest that that does not + 4. remove any output in the manifest that does not have an equivalent source The callback must return `{:ok, mod}` or `:error` in case of error. An error is raised at the end if any of the files failed to compile. """ - def compile(manifest, mappings, src_ext, dest_ext, force, callback) do - files = for {src, dest} <- mappings do - extract_targets(src, src_ext, dest, dest_ext, force) - end |> Enum.concat - compile(manifest, files, callback) + def compile(manifest, mappings, src_ext, dest_ext, force, callback) when is_boolean(force) do + compile(manifest, mappings, src_ext, dest_ext, [force: force], callback) + end + + def compile(manifest, mappings, src_ext, dest_ext, opts, callback) do + force = opts[:force] + files = + for {src, dest} <- mappings do + extract_targets(src, src_ext, dest, dest_ext, force) + end |> Enum.concat + compile(manifest, files, src_ext, opts, callback) end @doc """ - Compiles the given src/dest tuples. + Compiles the given `mappings`. + + `mappings` should be a list of tuples in the form of `{src, dest}`. - A manifest file and a callback to be invoked for each src/dest pair - must be given. A src/dest pair where destination is nil is considered + A `manifest` file and a `callback` to be invoked for each src/dest pair + must be given. A src/dest pair where destination is `nil` is considered to be up to date and won't be (re-)compiled. """ - def compile(manifest, tuples, callback) do - stale = for {:stale, src, dest} <- tuples, do: {src, dest} + def compile(manifest, mappings, opts \\ [], callback) do + compile(manifest, mappings, :erl, opts, callback) + end + + defp compile(manifest, mappings, ext, opts, callback) do + stale = for {:stale, src, dest} <- mappings, do: {src, dest} # Get the previous entries from the manifest + timestamp = :calendar.universal_time() entries = read_manifest(manifest) # Files to remove are the ones in the manifest # but they no longer have a source removed = Enum.filter(entries, fn entry -> - not Enum.any?(tuples, fn {_status, _src, dest} -> dest == entry end) + not Enum.any?(mappings, fn {_status, _src, dest} -> dest == entry end) end) if stale == [] && removed == [] do :noop else - # Build the project structure so we can write down compiled files. - Mix.Project.build_structure + Mix.Utils.compiling_n(length(stale), ext) + Mix.Project.ensure_structure() + + # Let's prepend the newly created path so compiled files + # can be accessed still during compilation (for behaviours + # and what not). + Code.prepend_path(Mix.Project.compile_path) # Remove manifest entries with no source Enum.each(removed, &File.rm/1) + verbose = opts[:verbose] # Compile stale files and print the results - results = for {input, output} <- stale do - interpret_result(input, callback.(input, output)) - end + results = + for {input, output} <- stale do + result = callback.(input, output) + + with {:ok, _} <- result do + File.touch!(output, timestamp) + verbose && Mix.shell.info "Compiled #{input}" + end + + result + end # Write final entries to manifest entries = (entries -- removed) ++ Enum.map(stale, &elem(&1, 1)) - write_manifest(manifest, :lists.usort(entries)) + write_manifest(manifest, :lists.usort(entries), timestamp) # Raise if any error, return :ok otherwise - if :error in results, do: raise CompileError + if :error in results do + Mix.raise "Encountered compilation errors" + end :ok end end @doc """ - Removes compiled files. + Ensures the native Erlang application is available. + """ + def ensure_application!(app, input) do + case Application.ensure_all_started(app) do + {:ok, _} -> + :ok + {:error, _} -> + Mix.raise "Could not compile #{inspect Path.relative_to_cwd(input)} because " <> + "the application \"#{app}\" could not be found. This may happen if " <> + "your package manager broke Erlang into multiple packages and may " <> + "be fixed by installing the missing \"erlang-dev\" and \"erlang-#{app}\" packages" + end + end + + @doc """ + Removes compiled files for the given `manifest`. """ def clean(manifest) do Enum.each read_manifest(manifest), &File.rm/1 @@ -99,11 +145,26 @@ defmodule Mix.Compilers.Erlang do end @doc """ - Converts the given file to a format accepted by + Converts the given `file` to a format accepted by the Erlang compilation tools. """ def to_erl_file(file) do - to_char_list(file) + to_charlist(file) + end + + @doc """ + Asserts that the `:erlc_paths` configuration option that many Mix tasks + rely on is valid. + + Raises a `Mix.Error` exception if the option is not valid, returns `:ok` + otherwise. + """ + def assert_valid_erlc_paths(erlc_paths) do + if is_list(erlc_paths) do + :ok + else + Mix.raise ":erlc_paths should be a list of paths, got: #{inspect(erlc_paths)}" + end end defp extract_targets(src_dir, src_ext, dest_dir, dest_ext, force) do @@ -125,14 +186,6 @@ defmodule Mix.Compilers.Erlang do artifact |> Path.basename |> Path.rootname end - defp interpret_result(file, result) do - case result do - {:ok, _} -> Mix.shell.info "Compiled #{file}" - :error -> :error - end - result - end - defp read_manifest(file) do case File.read(file) do {:ok, contents} -> String.split(contents, "\n") @@ -140,8 +193,9 @@ defmodule Mix.Compilers.Erlang do end end - defp write_manifest(file, entries) do + defp write_manifest(file, entries, timestamp) do Path.dirname(file) |> File.mkdir_p! File.write!(file, Enum.join(entries, "\n")) + File.touch!(file, timestamp) end end diff --git a/lib/mix/lib/mix/compilers/test.ex b/lib/mix/lib/mix/compilers/test.ex new file mode 100644 index 00000000000..94e8d4e1a5a --- /dev/null +++ b/lib/mix/lib/mix/compilers/test.ex @@ -0,0 +1,274 @@ +defmodule Mix.Compilers.Test do + @moduledoc false + + require Mix.Compilers.Elixir, as: CE + + import Record + + defrecordp :source, [ + source: nil, + compile_references: [], + runtime_references: [], + external: [] + ] + + @stale_manifest ".compile.test_stale" + @manifest_vsn :v1 + + @doc """ + Requires and runs test files. + + It expects all of the test patterns, the test files that were matched for the + test patterns, the test paths, and the opts from the test task. + """ + def require_and_run(test_patterns, matched_test_files, test_paths, opts) do + stale = opts[:stale] + + {test_files_to_run, stale_manifest_pid, parallel_require_callbacks} = + if stale do + set_up_stale(matched_test_files, test_paths, opts) + else + {matched_test_files, nil, []} + end + + case test_files_to_run do + [] when stale -> + Mix.shell.info "No stale tests." + :noop + + [] when test_patterns == [] -> + Mix.shell.info "There are no tests to run" + :noop + + [] -> + Mix.shell.error "Test patterns did not match any file: " <> Enum.join(test_patterns, ", ") + :noop + + test_files -> + try do + task = Task.async(ExUnit, :run, []) + Kernel.ParallelRequire.files(test_files, parallel_require_callbacks) + ExUnit.Server.cases_loaded() + %{failures: failures} = results = Task.await(task, :infinity) + + if failures == 0 do + agent_write_manifest(stale_manifest_pid) + end + + {:ok, results} + after + agent_stop(stale_manifest_pid) + end + end + end + + defp set_up_stale(matched_test_files, test_paths, opts) do + manifest = manifest() + modified = Mix.Utils.last_modified(manifest) + all_sources = read_manifest() + + removed = + for source(source: source) <- all_sources, + not(source in matched_test_files), + do: source + + configs = Mix.Project.config_files + force = opts[:force] || Mix.Utils.stale?(configs, [manifest]) || test_helper_stale?(test_paths) + + changed = + if force do + # let's just require everything + matched_test_files + else + sources_mtimes = mtimes(all_sources) + + # Otherwise let's start with the new sources + for(source <- matched_test_files, + not List.keymember?(all_sources, source, source(:source)), + do: source) + ++ + # Plus the sources that have changed in disk + for(source(source: source, external: external) <- all_sources, + times = Enum.map([source | external], &Map.fetch!(sources_mtimes, &1)), + Mix.Utils.stale?(times, [modified]), + do: source) + end + + stale = MapSet.new(changed -- removed) + sources = update_stale_sources(all_sources, removed, changed) + + test_files_to_run = + sources + |> tests_with_changed_references() + |> MapSet.union(stale) + |> MapSet.to_list() + + if test_files_to_run == [] do + write_manifest(sources) + {[], nil, nil} + else + {:ok, pid} = Agent.start_link(fn -> sources end) + cwd = File.cwd!() + parallel_require_callbacks = [each_module: &each_module(pid, cwd, &1, &2, &3)] + {test_files_to_run, pid, parallel_require_callbacks} + end + end + + defp agent_write_manifest(nil), + do: :noop + + defp agent_write_manifest(pid) do + Agent.cast pid, fn sources -> + write_manifest(sources) + sources + end + end + + defp agent_stop(nil), + do: :noop + + defp agent_stop(pid) do + Agent.stop(pid, :normal, :infinity) + end + + ## Setup helpers + + defp test_helper_stale?(test_paths) do + test_paths + |> Enum.map(&Path.join(&1, "test_helper.exs")) + |> Mix.Utils.stale?([manifest()]) + end + + defp mtimes(sources) do + Enum.reduce(sources, %{}, fn source(source: source, external: external), map -> + Enum.reduce([source | external], map, fn file, map -> + Map.put_new_lazy(map, file, fn -> Mix.Utils.last_modified(file) end) + end) + end) + end + + defp update_stale_sources(sources, removed, changed) do + sources = + Enum.reject(sources, fn source(source: source) -> source in removed end) + sources = + Enum.reduce(changed, sources, &List.keystore(&2, &1, source(:source), source(source: &1))) + sources + end + + ## Manifest + + defp manifest, do: Path.join(Mix.Project.manifest_path, @stale_manifest) + + defp read_manifest() do + try do + [@manifest_vsn | sources] = + manifest() |> File.read!() |> :erlang.binary_to_term() + sources + rescue + _ -> [] + end + end + + defp write_manifest([]) do + manifest() + |> File.rm() + + :ok + end + + defp write_manifest(sources) do + manifest = manifest() + + manifest + |> Path.dirname() + |> File.mkdir_p!() + + manifest_data = + [@manifest_vsn | sources] + |> :erlang.term_to_binary([:compressed]) + + File.write!(manifest, manifest_data) + end + + ## Test changed dependency resolution + + defp tests_with_changed_references(test_sources) do + test_manifest = manifest() + [elixir_manifest] = Mix.Tasks.Compile.Elixir.manifests() + + if Mix.Utils.stale?([elixir_manifest], [test_manifest]) do + elixir_manifest_entries = + CE.read_manifest(elixir_manifest, Mix.Project.compile_path()) + |> Enum.group_by(&elem(&1, 0)) + + stale_modules = + for CE.module(module: module, beam: beam) <- elixir_manifest_entries.module, + Mix.Utils.stale?([beam], [test_manifest]), + do: module, + into: MapSet.new() + + stale_modules = find_all_dependant_on(stale_modules, elixir_manifest_entries.source, elixir_manifest_entries.module) + + for module <- stale_modules, + source(source: source, runtime_references: r, compile_references: c) <- test_sources, + module in r or module in c, + do: source, + into: MapSet.new() + else + MapSet.new() + end + end + + defp find_all_dependant_on(modules, sources, all_modules, resolved \\ MapSet.new()) do + new_modules = + for module <- modules, + module not in resolved, + dependant_module <- dependant_modules(module, all_modules, sources), + do: dependant_module, + into: modules + + if MapSet.size(new_modules) == MapSet.size(modules) do + new_modules + else + find_all_dependant_on(new_modules, sources, all_modules, modules) + end + end + + defp dependant_modules(module, modules, sources) do + for CE.source(source: source, runtime_references: r, compile_references: c) <- sources, + module in r or module in c, + CE.module(sources: sources, module: dependant_module) <- modules, + source in sources, + do: dependant_module + end + + ## ParallelRequire callback + + defp each_module(pid, cwd, source, module, _binary) do + {compile_references, runtime_references} = Kernel.LexicalTracker.remote_references(module) + external = get_external_resources(module, cwd) + source = Path.relative_to(source, cwd) + + Agent.cast pid, fn sources -> + external = + case List.keyfind(sources, source, source(:source)) do + source(external: old_external) -> external ++ old_external + nil -> external + end + + new_source = source( + source: source, + compile_references: compile_references, + runtime_references: runtime_references, + external: external + ) + + List.keystore(sources, source, source(:source), new_source) + end + end + + defp get_external_resources(module, cwd) do + for file <- Module.get_attribute(module, :external_resource), + do: Path.relative_to(file, cwd) + end +end diff --git a/lib/mix/lib/mix/config.ex b/lib/mix/lib/mix/config.ex index 0586997f2af..7900f5d210c 100644 --- a/lib/mix/lib/mix/config.ex +++ b/lib/mix/lib/mix/config.ex @@ -18,6 +18,16 @@ defmodule Mix.Config do Furthermore, this module provides functions like `read!/1`, `merge/2` and friends which help manipulate configurations in general. + + Configuration set using `Mix.Config` will set the application env, so + that `Application.get_env/3` and other `Application` functions can be used + at run or compile time to retrieve or change the configuration. + + For example, the `:key1` value from application `:plug` (see above) can be + retrieved with: + + "value1" = Application.fetch_env!(:plug, :key1) + """ defmodule LoadError do @@ -33,13 +43,16 @@ defmodule Mix.Config do defmacro __using__(_) do quote do import Mix.Config, only: [config: 2, config: 3, import_config: 1] - var!(config, Mix.Config) = [] + {:ok, agent} = Mix.Config.Agent.start_link + var!(config_agent, Mix.Config) = agent end end @doc """ Configures the given application. + Keyword lists are always deep merged. + ## Examples The given `opts` are merged into the existing configuration @@ -58,49 +71,66 @@ defmodule Mix.Config do [log_level: :info, mode: :truncate, threshold: 1024] + This final configuration can be retrieved at run or compile time: + + Application.get_all_env(:lager) + """ defmacro config(app, opts) do quote do - var!(config, Mix.Config) = - Mix.Config.merge(var!(config, Mix.Config), [{unquote(app), unquote(opts)}]) + Mix.Config.Agent.merge var!(config_agent, Mix.Config), [{unquote(app), unquote(opts)}] end end @doc """ Configures the given key for the given application. + Keyword lists are always deep merged. + ## Examples The given `opts` are merged into the existing values for `key` in the given `app`. Conflicting keys are overridden by the - ones specified in `opts`. For example, the declaration below: + ones specified in `opts`. For example, given the two configurations + below: config :ecto, Repo, - log_level: :warn + log_level: :warn, + adapter: Ecto.Adapters.Postgres config :ecto, Repo, log_level: :info, pool_size: 10 - Will have a final value for `Repo` of: + the final value of the configuration for the `Repo` key in the `:ecto` + application will be: - [log_level: :info, pool_size: 10] + [log_level: :info, pool_size: 10, adapter: Ecto.Adapters.Postgres] + + This final value can be retrieved at runtime or compile time with: + + Application.get_env(:ecto, Repo) """ defmacro config(app, key, opts) do quote do - var!(config, Mix.Config) = - Mix.Config.merge(var!(config, Mix.Config), - [{unquote(app), [{unquote(key), unquote(opts)}]}], - fn _app, _key, v1, v2 -> Keyword.merge(v1, v2) end) + Mix.Config.Agent.merge var!(config_agent, Mix.Config), + [{unquote(app), [{unquote(key), unquote(opts)}]}] end end @doc ~S""" - Imports configuration from the given file. + Imports configuration from the given file or files. + + If `path_or_wildcard` is a wildcard, then all the files + matching that wildcard will be imported; if no file matches + the wildcard, no errors are raised. If `path_or_wildcard` is + not a wildcard but a path to a single file, then that file is + imported; in case the file doesn't exist, an error is raised. + This behaviour is analogous to the one for `read_wildcard!/1`. - The path is expected to be related to the directory the - current configuration file is on. + If path/wildcard is a relative path/wildcard, it will be expanded relatively + to the directory the current configuration file is in. ## Examples @@ -110,45 +140,107 @@ defmodule Mix.Config do Or to import files from children in umbrella projects: - import_config "../apps/child/config/config.exs" + import_config "../apps/*/config/config.exs" """ - defmacro import_config(file) do + defmacro import_config(path_or_wildcard) do + loaded_paths_quote = + unless {:loaded_paths, Mix.Config} in __CALLER__.vars do + quote do + var!(loaded_paths, Mix.Config) = [__ENV__.file] + end + end + quote do - var!(config, Mix.Config) = - Mix.Config.merge(var!(config, Mix.Config), - Mix.Config.read!(Path.expand(unquote(file), __DIR__))) + unquote(loaded_paths_quote) + Mix.Config.Agent.merge( + var!(config_agent, Mix.Config), + Mix.Config.read_wildcard!(Path.expand(unquote(path_or_wildcard), __DIR__), var!(loaded_paths, Mix.Config)) + ) end end @doc """ Reads and validates a configuration file. + + `file` is the path to the configuration file to be read. If that file doesn't + exist or if there's an error loading it, a `Mix.Config.LoadError` exception + will be raised. + + `loaded_paths` is a list of configuration files that have been previously + read. If `file` exists in `loaded_paths`, a `Mix.Config.LoadError` exception + will be raised. """ - def read!(file) do + def read!(file, loaded_paths \\ []) do try do - {config, binding} = Code.eval_file(file) - config = - case List.keyfind(binding, {:config, Mix.Config}, 0) do - {_, value} -> value - nil -> config - end + if file in loaded_paths do + raise ArgumentError, message: "recursive load of #{file} detected" + end + + {config, binding} = Code.eval_string File.read!(file), [{{:loaded_paths, Mix.Config}, [file | loaded_paths]}], [file: file, line: 1] + + config = case List.keyfind(binding, {:config_agent, Mix.Config}, 0) do + {_, agent} -> get_config_and_stop_agent(agent) + nil -> config + end + validate!(config) config rescue e in [LoadError] -> reraise(e, System.stacktrace) - e -> raise LoadError, file: file, error: e + e -> reraise(LoadError, [file: file, error: e], System.stacktrace) end end + defp get_config_and_stop_agent(agent) do + config = Mix.Config.Agent.get(agent) + Mix.Config.Agent.stop(agent) + config + end + + @doc """ + Reads many configuration files given by wildcard into a single config. + + Raises an error if `path` is a concrete filename (with no wildcards) + but the corresponding file does not exist; if `path` matches no files, + no errors are raised. + + `loaded_paths` is a list of configuration files that have been previously + read. + """ + def read_wildcard!(path, loaded_paths \\ []) do + paths = if String.contains?(path, ~w(* ? [ {))do + Path.wildcard(path) + else + [path] + end + Enum.reduce(paths, [], &merge(&2, read!(&1, loaded_paths))) + end + @doc """ Persists the given configuration by modifying the configured applications environment. + + `config` should be a list of `{app, app_config}` tuples or a + `%{app => app_config}` map where `app` are the applications to + be configured and `app_config` are the configuration (as key-value + pairs) for each of those applications. + + Returns the configured applications. + + ## Examples + + Mix.Config.persist(logger: [level: :error], my_app: [my_config: 1]) + #=> [:logger, :my_app] + """ def persist(config) do - for {app, kw} <- config, {k, v} <- kw do - :application.set_env(app, k, v, persistent: true) + for {app, kw} <- config do + for {k, v} <- kw do + Application.put_env(app, k, v, persistent: true) + end + app end - :ok end @doc """ @@ -191,34 +283,15 @@ defmodule Mix.Config do """ def merge(config1, config2) do Keyword.merge(config1, config2, fn _, app1, app2 -> - Keyword.merge(app1, app2) + Keyword.merge(app1, app2, &deep_merge/3) end) end - @doc """ - Merges two configurations. - - The configuration of each application is merged together - and a callback is invoked in case of conflicts receiving - the app, the conflicting key and both values. It must return - a value that will be used as part of the conflict resolution. - - ## Examples - - iex> Mix.Config.merge([app: [k: :v1]], [app: [k: :v2]], - ...> fn app, k, v1, v2 -> {app, k, v1, v2} end) - [app: [k: {:app, :k, :v1, :v2}]] - - """ - def merge(config1, config2, callback) do - Keyword.merge(config1, config2, fn app, app1, app2 -> - Keyword.merge(app1, app2, fn k, v1, v2 -> - if v1 == v2 do - v1 - else - callback.(app, k, v1, v2) - end - end) - end) + defp deep_merge(_key, value1, value2) do + if Keyword.keyword?(value1) and Keyword.keyword?(value2) do + Keyword.merge(value1, value2, &deep_merge/3) + else + value2 + end end end diff --git a/lib/mix/lib/mix/config/agent.ex b/lib/mix/lib/mix/config/agent.ex new file mode 100644 index 00000000000..2a55d198daa --- /dev/null +++ b/lib/mix/lib/mix/config/agent.ex @@ -0,0 +1,25 @@ +defmodule Mix.Config.Agent do + @moduledoc false + + @typep config :: Keyword.t + + @spec start_link() :: {:ok, pid} + def start_link do + Agent.start_link fn -> [] end + end + + @spec stop(pid) :: :ok + def stop(agent) do + Agent.stop(agent) + end + + @spec get(pid) :: config + def get(agent) do + Agent.get(agent, &(&1)) + end + + @spec merge(pid, config) :: config + def merge(agent, new_config) do + Agent.update(agent, &Mix.Config.merge(&1, new_config)) + end +end diff --git a/lib/mix/lib/mix/dep.ex b/lib/mix/lib/mix/dep.ex index 7848fa2cfa8..7d3e9680a4c 100644 --- a/lib/mix/lib/mix/dep.ex +++ b/lib/mix/lib/mix/dep.ex @@ -2,7 +2,8 @@ defmodule Mix.Dep do @moduledoc false @doc """ - The Mix.Dep a struct keeps information about your project dependencies. + The Mix.Dep struct keeps information about your project dependencies. + It contains: * `scm` - a module representing the source code management tool (SCM) @@ -22,7 +23,7 @@ defmodule Mix.Dep do * `top_level` - true if dependency was defined in the top-level project * `manager` - the project management, possible values: - `:rebar` | `:mix` | `:make` | `nil` + `:rebar` | `:rebar3` | `:mix` | `:make` | `nil` * `from` - path to the file where the dependency was defined @@ -49,18 +50,41 @@ defmodule Mix.Dep do defstruct scm: nil, app: nil, requirement: nil, status: nil, opts: [], deps: [], top_level: false, extra: [], manager: nil, from: nil + @type t :: %__MODULE__{ + scm: module, + app: atom, + requirement: String.t | Regex.t | nil, + status: atom, + opts: Keyword.t, + top_level: boolean, + manager: :rebar | :rebar3 | :mix | :make | nil, + from: String.t, + extra: term} + @doc """ - Returns all children dependencies for the current project, - as well as the defined apps in case of umbrella projects. - The children dependencies returned by this function were - not loaded yet. + Returns loaded dependencies from the cache for the current environment. - ## Exceptions + Because the dependencies are cached during deps.loadpaths, + their status may be outdated (for example, `:compile` did not + yet become `:ok`). Therefore it is recommended to not rely + on their status, also given they haven't been checked + against the lock. - This function raises an exception if any of the dependencies - provided in the project are in the wrong format. + If MIX_NO_DEPS is set, we return an empty list of dependencies + without loading them. """ - defdelegate children(otps), to: Mix.Dep.Loader + def cached do + cond do + System.get_env("MIX_NO_DEPS") in ~w(1 true) -> + [] + project = Mix.Project.get -> + key = {:cached_deps, Mix.env, project} + Mix.ProjectStack.read_cache(key) || + Mix.ProjectStack.write_cache(key, loaded(env: Mix.env)) + true -> + loaded(env: Mix.env) + end + end @doc """ Returns loaded dependencies recursively as a `Mix.Dep` struct. @@ -88,9 +112,12 @@ defmodule Mix.Dep do # Ensure all apps are atoms apps = to_app_names(given) - - # We need to keep the order of deps, loaded/1 properly orders them - deps = Enum.filter(all_deps, &(&1.app in apps)) + deps = + if opts[:include_children] do + get_deps_with_children(all_deps, apps) + else + get_deps(all_deps, apps) + end Enum.each apps, fn(app) -> unless Enum.any?(all_deps, &(&1.app == app)) do @@ -101,6 +128,30 @@ defmodule Mix.Dep do deps end + defp get_deps(all_deps, apps) do + Enum.filter(all_deps, &(&1.app in apps)) + end + + defp get_deps_with_children(all_deps, apps) do + deps = get_children(all_deps, apps) + apps = deps |> Enum.map(& &1.app) |> Enum.uniq + get_deps(all_deps, apps) + end + + defp get_children(_all_deps, []), do: [] + defp get_children(all_deps, apps) do + # Current deps + deps = get_deps(all_deps, apps) + + # Children apps + apps = for %{deps: children} <- deps, + %{app: app} <- children, + do: app + + # Current deps + children deps + deps ++ get_children(all_deps, apps) + end + @doc """ Runs the given `fun` inside the given dependency project by changing the current working directory and loading the given @@ -110,13 +161,15 @@ defmodule Mix.Dep do """ def in_dependency(dep, post_config \\ [], fun) - def in_dependency(%Mix.Dep{app: app, opts: opts}, config, fun) do + def in_dependency(%Mix.Dep{app: app, opts: opts, scm: scm}, config, fun) do # Set the app_path to be the one stored in the dependency. # This is important because the name of application in the # mix.exs file can be different than the actual name and we # choose to respect the one in the mix.exs - config = Keyword.merge(Mix.Project.deps_config, config) - config = Keyword.put(config, :app_path, opts[:build]) + config = + Keyword.merge(Mix.Project.deps_config, config) + |> Keyword.put(:app_path, opts[:build]) + |> Keyword.put(:build_scm, scm) env = opts[:env] || :prod old_env = Mix.env @@ -136,110 +189,148 @@ defmodule Mix.Dep do do: "ok" def format_status(%Mix.Dep{status: {:noappfile, path}}), - do: "could not find an app file at #{Path.relative_to_cwd(path)}, " <> - "this may happen when you specified the wrong application name in your deps " <> - "or if the dependency did not compile (which can be amended with `#{mix_env_var}mix deps.compile`)" + do: "could not find an app file at #{inspect(Path.relative_to_cwd(path))}. " <> + "This may happen if the dependency was not yet compiled, " <> + "or you specified the wrong application name in your deps, " <> + "or the dependency indeed has no app file (then you can pass app: false as option)" def format_status(%Mix.Dep{status: {:invalidapp, path}}), - do: "the app file at #{Path.relative_to_cwd(path)} is invalid" + do: "the app file at #{inspect(Path.relative_to_cwd(path))} is invalid" def format_status(%Mix.Dep{status: {:invalidvsn, vsn}}), do: "the app file contains an invalid version: #{inspect vsn}" + def format_status(%Mix.Dep{status: {:nosemver, vsn}, requirement: req}), + do: "the app file specified a non-Semantic Versioning format: #{inspect vsn}. Mix can only match the " <> + "requirement #{inspect req} against semantic versions. Please fix the application version " <> + "or use a regex as a requirement to match against any version" + def format_status(%Mix.Dep{status: {:nomatchvsn, vsn}, requirement: req}), do: "the dependency does not match the requirement #{inspect req}, got #{inspect vsn}" def format_status(%Mix.Dep{status: {:lockmismatch, _}}), - do: "lock mismatch: the dependency is out of date" + do: "lock mismatch: the dependency is out of date. To fetch locked version run \"mix deps.get\"" def format_status(%Mix.Dep{status: :lockoutdated}), - do: "lock outdated: the lock is outdated compared to the options in your mixfile" + do: "lock outdated: the lock is outdated compared to the options in your mixfile. To fetch locked version run \"mix deps.get\"" def format_status(%Mix.Dep{status: :nolock}), - do: "the dependency is not locked" + do: "the dependency is not locked. To generate the \"mix.lock\" file run \"mix deps.get\"" def format_status(%Mix.Dep{status: :compile}), - do: "the dependency build is outdated, please run `#{mix_env_var}mix deps.compile`" + do: "the dependency build is outdated, please run \"#{mix_env_var()}mix deps.compile\"" - def format_status(%Mix.Dep{app: app, status: {:divergedreq, other}} = dep) do - "the dependency #{app} defined\n" <> + def format_status(%Mix.Dep{app: app, status: {:divergedreq, vsn, other}} = dep) do + "the dependency #{app} #{vsn}\n" <> "#{dep_status(dep)}" <> "\n does not match the requirement specified\n" <> "#{dep_status(other)}" <> - "\n Ensure they match or specify one of the above in your #{inspect Mix.Project.get} deps and set `override: true`" + "\n Ensure they match or specify one of the above in your deps and set \"override: true\"" + end + + def format_status(%Mix.Dep{app: app, status: {:divergedonly, other}} = dep) do + recommendation = + if Keyword.has_key?(other.opts, :only) do + "Ensure you specify at least the same environments in :only in your dep" + else + "Remove the :only restriction from your dep" + end + + "the :only option for dependency #{app}\n" <> + "#{dep_status(dep)}" <> + "\n does not match the :only option calculated for\n" <> + "#{dep_status(other)}" <> + "\n #{recommendation}" end def format_status(%Mix.Dep{app: app, status: {:diverged, other}} = dep) do "different specs were given for the #{app} app:\n" <> "#{dep_status(dep)}#{dep_status(other)}" <> - "\n Ensure they match or specify one of the above in your #{inspect Mix.Project.get} deps and set `override: true`" + "\n Ensure they match or specify one of the above in your deps and set \"override: true\"" end def format_status(%Mix.Dep{app: app, status: {:overridden, other}} = dep) do "the dependency #{app} in #{Path.relative_to_cwd(dep.from)} is overriding a child dependency:\n" <> "#{dep_status(dep)}#{dep_status(other)}" <> - "\n Ensure they match or specify one of the above in your #{inspect Mix.Project.get} deps and set `override: true`" + "\n Ensure they match or specify one of the above in your deps and set \"override: true\"" end def format_status(%Mix.Dep{status: {:unavailable, _}, scm: scm}) do if scm.fetchable? do - "the dependency is not available, run `mix deps.get`" + "the dependency is not available, run \"mix deps.get\"" else "the dependency is not available" end end def format_status(%Mix.Dep{status: {:elixirlock, _}}), - do: "the dependency is built with an out-of-date elixir version, run `#{mix_env_var}mix deps.compile`" + do: "the dependency was built with an out-of-date Elixir version, run \"#{mix_env_var()}mix deps.compile\"" + + def format_status(%Mix.Dep{status: {:scmlock, _}}), + do: "the dependency was built with another SCM, run \"#{mix_env_var()}mix deps.compile\"" - defp dep_status(%Mix.Dep{app: app, requirement: req, opts: opts, from: from}) do - info = {app, req, Dict.drop(opts, [:dest, :lock, :env, :build])} + defp dep_status(%Mix.Dep{app: app, requirement: req, manager: manager, opts: opts, from: from}) do + opts = Keyword.drop(opts, [:dest, :build, :lock, :manager, :checkout]) + opts = opts ++ (if manager, do: [manager: manager], else: []) + info = if req, do: {app, req, opts}, else: {app, opts} "\n > In #{Path.relative_to_cwd(from)}:\n #{inspect info}\n" end @doc """ Checks the lock for the given dependency and update its status accordingly. """ - def check_lock(%Mix.Dep{scm: scm, app: app, opts: opts} = dep, lock) do - if rev = lock[app] do - opts = Keyword.put(opts, :lock, rev) - end - + def check_lock(%Mix.Dep{scm: scm, opts: opts} = dep) do if available?(dep) do case scm.lock_status(opts) do :mismatch -> - status = if rev, do: {:lockmismatch, rev}, else: :nolock - %{dep | status: status, opts: opts} + status = if rev = opts[:lock], do: {:lockmismatch, rev}, else: :nolock + %{dep | status: status} :outdated -> # Don't include the lock in the dependency if it is outdated %{dep | status: :lockoutdated} :ok -> - if vsn = old_elixir_lock(dep) do - %{dep | status: {:elixirlock, vsn}, opts: opts} - else - %{dep | opts: opts} - end + check_manifest(dep, opts[:build]) end else - %{dep | opts: opts} + dep + end + end + + defp check_manifest(%{scm: scm} = dep, build_path) do + vsn = {System.version, :erlang.system_info(:otp_release)} + + case Mix.Dep.ElixirSCM.read(build_path) do + {:ok, old_vsn, _} when old_vsn != vsn -> + %{dep | status: {:elixirlock, old_vsn}} + {:ok, _, old_scm} when old_scm != scm -> + %{dep | status: {:scmlock, old_scm}} + _ -> + dep end end @doc """ - Returns true if the dependency is ok. + Returns `true` if the dependency is ok. """ def ok?(%Mix.Dep{status: {:ok, _}}), do: true def ok?(%Mix.Dep{}), do: false @doc """ - Checks if a dependency is available. Available dependencies - are the ones that can be loaded. + Checks if a dependency is available. + + Available dependencies are the ones that can be loaded. + """ + def available?(%Mix.Dep{status: {:unavailable, _}}), do: false + def available?(dep), do: not diverged?(dep) + + @doc """ + Checks if a dependency has diverged. """ - def available?(%Mix.Dep{status: {:overridden, _}}), do: false - def available?(%Mix.Dep{status: {:diverged, _}}), do: false - def available?(%Mix.Dep{status: {:divergedreq, _}}), do: false - def available?(%Mix.Dep{status: {:unavailable, _}}), do: false - def available?(%Mix.Dep{}), do: true + def diverged?(%Mix.Dep{status: {:overridden, _}}), do: true + def diverged?(%Mix.Dep{status: {:diverged, _}}), do: true + def diverged?(%Mix.Dep{status: {:divergedreq, _}}), do: true + def diverged?(%Mix.Dep{status: {:divergedonly, _}}), do: true + def diverged?(%Mix.Dep{}), do: false @doc """ Formats a dependency for printing. @@ -261,8 +352,8 @@ defmodule Mix.Dep do """ def load_paths(%Mix.Dep{opts: opts} = dep) do build_path = Path.dirname(opts[:build]) - Enum.map source_paths(dep), fn path -> - Path.join [build_path, Path.basename(path), "ebin"] + Enum.map source_paths(dep), fn {_, base} -> + Path.join [build_path, base, "ebin"] end end @@ -270,40 +361,40 @@ defmodule Mix.Dep do Returns all source paths. Source paths are the directories that contains ebin files for a given - dependency. All managers, except rebar, have only one source path. + dependency. All managers, except `:rebar`, have only one source path. """ - def source_paths(%Mix.Dep{manager: :rebar, opts: opts, extra: extra}) do + def source_paths(%Mix.Dep{manager: :rebar, app: app, opts: opts, extra: extra}) do + sub_dirs = extra[:sub_dirs] || [] + dest = opts[:dest] + # Add root dir and all sub dirs with ebin/ directory - sub_dirs = Enum.map(extra[:sub_dirs] || [], fn path -> - Path.join(opts[:dest], path) - end) - - [opts[:dest] | sub_dirs] - |> Enum.map(&Path.wildcard(&1)) - |> Enum.concat - |> Enum.filter(fn p -> p |> Path.join("ebin") |> File.dir? end) + [{opts[:dest], Atom.to_string(app)}] ++ + for(sub_dir <- sub_dirs, + path <- Path.wildcard(Path.join(dest, sub_dir)), + File.dir?(Path.join(path, "ebin")), + do: {path, Path.basename(path)}) end - def source_paths(%Mix.Dep{opts: opts}) do - [opts[:dest]] + def source_paths(%Mix.Dep{app: app, opts: opts}) do + [{opts[:dest], Atom.to_string(app)}] end @doc """ - Return `true` if dependency is a mix project. + Returns `true` if dependency is a Mix project. """ def mix?(%Mix.Dep{manager: manager}) do manager == :mix end @doc """ - Return `true` if dependency is a rebar project. + Returns `true` if dependency is a Rebar project. """ def rebar?(%Mix.Dep{manager: manager}) do - manager == :rebar + manager in [:rebar, :rebar3] end @doc """ - Return `true` if dependency is a make project. + Returns `true` if dependency is a Make project. """ def make?(%Mix.Dep{manager: manager}) do manager == :make @@ -324,11 +415,4 @@ defmodule Mix.Dep do if is_binary(app), do: String.to_atom(app), else: app end end - - defp old_elixir_lock(%Mix.Dep{opts: opts}) do - old_vsn = Mix.Dep.Lock.elixir_vsn(opts[:build]) - if old_vsn && old_vsn != System.version do - old_vsn - end - end end diff --git a/lib/mix/lib/mix/dep/converger.ex b/lib/mix/lib/mix/dep/converger.ex index f8b2316fbf3..8253b8a9357 100644 --- a/lib/mix/lib/mix/dep/converger.ex +++ b/lib/mix/lib/mix/dep/converger.ex @@ -5,9 +5,9 @@ defmodule Mix.Dep.Converger do @moduledoc false @doc """ - Topsorts the given dependencies. + Topologically sorts the given dependencies. """ - def topsort(deps) do + def topological_sort(deps) do graph = :digraph.new try do @@ -16,8 +16,11 @@ defmodule Mix.Dep.Converger do end) Enum.each(deps, fn %Mix.Dep{app: app, deps: other_deps} -> - Enum.each(other_deps, fn %Mix.Dep{app: other_app} -> - :digraph.add_edge(graph, other_app, app) + Enum.each(other_deps, fn + %Mix.Dep{app: ^app} -> + Mix.raise "App #{app} lists itself as a dependency" + %Mix.Dep{app: other_app} -> + :digraph.add_edge(graph, other_app, app) end) end) @@ -26,7 +29,7 @@ defmodule Mix.Dep.Converger do Enum.find(deps, fn(%Mix.Dep{app: other_app}) -> app == other_app end) end) else - Mix.raise "Could not sort dependencies. There are cycles in the dependency graph." + Mix.raise "Could not sort dependencies. There are cycles in the dependency graph" end after :digraph.delete(graph) @@ -45,68 +48,87 @@ defmodule Mix.Dep.Converger do """ def converge(acc, lock, opts, callback) do {deps, acc, lock} = all(acc, lock, opts, callback) - {topsort(deps), acc, lock} + if remote = Mix.RemoteConverger.get, do: remote.post_converge() + {topological_sort(deps), acc, lock} end defp all(acc, lock, opts, callback) do - main = Mix.Dep.Loader.children(opts) + main = Mix.Dep.Loader.children() main = Enum.map(main, &(%{&1 | top_level: true})) apps = Enum.map(main, &(&1.app)) + lock_given? = !!lock + env = opts[:env] + + # If no lock was given, let's read one to fill in the deps + lock = lock || Mix.Dep.Lock.read + # Run converger for all dependencies, except remote # dependencies. Since the remote converger may be # lazily loaded, we need to check for it on every # iteration. - {deps, rest, lock} = - all(main, [], [], apps, callback, acc, lock, fn dep -> - if (converger = Mix.RemoteConverger.get) && - converger.remote?(dep) do + all(main, apps, callback, acc, lock, env, fn dep -> + if (remote = Mix.RemoteConverger.get) && remote.remote?(dep) do {:loaded, dep} else {:unloaded, dep, nil} end end) - # Run remote converger if one is available and rerun mix's - # converger with the new information - if converger = Mix.RemoteConverger.get do + # Run remote converger and rerun Mix's converger with the new information. + # Don't run the remote if deps didn't converge, if the remote is not + # available or if none of the deps are handled by the remote. + remote = Mix.RemoteConverger.get + diverged? = Enum.any?(deps, &Mix.Dep.diverged?/1) + use_remote? = !!remote and Enum.any?(deps, &remote.remote?/1) + + if not diverged? and use_remote? do + # Make sure there are no cycles before calling remote converge + topological_sort(deps) + # If there is a lock, it means we are doing a get/update # and we need to hit the remote converger which do external - # requests and what not. In case of deps.check, deps and so + # requests and what not. In case of deps.loadpaths, deps and so # on, there is no lock, so we won't hit this branch. - if lock do - lock = converger.converge(deps, lock) - end + lock = if lock_given?, do: remote.converge(deps, lock), else: lock deps = deps - |> Enum.reject(&converger.remote?(&1)) - |> Enum.into(HashDict.new, &{&1.app, &1}) + |> Enum.reject(&remote.remote?(&1)) + |> Enum.into(%{}, &{&1.app, &1}) - # In case there is no lock, we will read the current lock - # which is potentially stale. So converger.deps/2 needs to - # always check if the data it finds in the lock is actually - # valid. - lock_for_converger = lock || Mix.Dep.Lock.read - - all(main, [], [], apps, callback, rest, lock, fn dep -> - cond do - cached = deps[dep.app] -> + # In case no lock was given, we will use the local lock + # which is potentially stale. So remote.deps/2 needs to always + # check if the data it finds in the lock is actually valid. + {deps, rest, lock} = + all(main, apps, callback, rest, lock, env, fn dep -> + if cached = deps[dep.app] do {:loaded, cached} - true -> - {:unloaded, dep, converger.deps(dep, lock_for_converger)} - end - end) + else + {:unloaded, dep, remote.deps(dep, lock)} + end + end) + + {reject_non_fulfilled_optional(deps), rest, lock} else - {deps, rest, lock} + {reject_non_fulfilled_optional(deps), rest, lock} end end - # We traverse the tree of dependencies in a breadth- - # first fashion. The reason for this is that we converge + defp all(main, apps, callback, rest, lock, env, cache) do + {deps, rest, lock} = all(main, [], [], apps, callback, rest, lock, env, cache) + deps = Enum.reverse(deps) + # When traversing dependencies, we keep skipped ones to + # find conflicts. We remove them now after traversal. + {deps, _} = Mix.Dep.Loader.partition_by_env(deps, env) + {deps, rest, lock} + end + + # We traverse the tree of dependencies in a breadth-first + # fashion. The reason for this is that we converge # dependencies, but allow the parent to override any # dependency in the child. Consider this tree with - # dependencies `a`, `b`, etc and the order they are + # dependencies "a", "b", etc and the order they are # converged: # # * project @@ -118,10 +140,10 @@ defmodule Mix.Dep.Converger do # 6) f # 7) d # - # Notice that the `d` dependency exists as a child of `b` - # and child of `f`. In case the dependency is the same, + # Notice that the "d" dependency exists as a child of "b" + # and child of "f". In case the dependency is the same, # we proceed. However, if there is a conflict, for instance - # different git repositories is used as source in each, we + # different Git repositories are used as source in each, we # raise an exception. # # In order to solve such dependencies, we allow the project @@ -138,38 +160,48 @@ defmodule Mix.Dep.Converger do # 8) d # 4) d # - # Now, since `d` was specified in a parent project, no + # Now, since "d" was specified in a parent project, no # exception is going to be raised since d is considered - # to be the authorative source. - defp all([dep|t], acc, upper_breadths, current_breadths, callback, rest, lock, cache) do + # to be the authoritative source. + defp all([dep | t], acc, upper_breadths, current_breadths, callback, rest, lock, env, cache) do cond do new_acc = diverged_deps(acc, upper_breadths, dep) -> - all(t, new_acc, upper_breadths, current_breadths, callback, rest, lock, cache) + all(t, new_acc, upper_breadths, current_breadths, callback, rest, lock, env, cache) + Mix.Dep.Loader.skip?(dep, env) -> + # We still keep skipped dependencies around to detect conflicts. + # They must be rejected after every all iteration. + all(t, [dep | acc], upper_breadths, current_breadths, callback, rest, lock, env, cache) true -> - dep = + {dep, rest, lock} = case cache.(dep) do {:loaded, cached_dep} -> - cached_dep + {cached_dep, rest, lock} {:unloaded, dep, children} -> - {dep, rest, lock} = callback.(dep, rest, lock) + {dep, rest, lock} = callback.(put_lock(dep, lock), rest, lock) # After we invoke the callback (which may actually check out the # dependency), we load the dependency including its latest info # and children information. - dep = Mix.Dep.Loader.load(dep) - %{dep | deps: Enum.filter(dep.deps, &(!children || &1.app in children))} + {Mix.Dep.Loader.load(dep, children), rest, lock} end - dep = %{dep | deps: reject_non_fullfilled_optional(dep.deps, current_breadths)} - {acc, rest, lock} = all(t, [dep|acc], upper_breadths, current_breadths, callback, rest, lock, cache) - all(dep.deps, acc, current_breadths, Enum.map(dep.deps, &(&1.app)) ++ current_breadths, callback, rest, lock, cache) + {acc, rest, lock} = + all(t, [dep | acc], upper_breadths, current_breadths, callback, rest, lock, env, cache) + + deps = reject_non_fulfilled_optional(dep.deps, Enum.map(acc, & &1.app)) + new_breadths = Enum.map(deps, &(&1.app)) ++ current_breadths + all(deps, acc, current_breadths, new_breadths, callback, rest, lock, env, cache) end end - defp all([], acc, _upper, _current, _callback, rest, lock, _cache) do + defp all([], acc, _upper, _current, _callback, rest, lock, _env, _cache) do {acc, rest, lock} end + defp put_lock(%Mix.Dep{app: app} = dep, lock) do + put_in dep.opts[:lock], lock[app] + end + # Look for divergence in dependencies. # # If the same dependency is specified more than once, @@ -180,21 +212,26 @@ defmodule Mix.Dep.Converger do # diverges is in the upper breadth, in those cases we # also check for the override option and mark the dependency # as overridden instead of diverged. - defp diverged_deps(list, upper_breadths, dep) do - %Mix.Dep{app: app} = dep + defp diverged_deps(list, upper_breadths, %Mix.Dep{app: app} = dep) do in_upper? = app in upper_breadths {acc, match} = Enum.map_reduce list, false, fn(other, match) -> %Mix.Dep{app: other_app, opts: other_opts} = other + if other_app == app and other.top_level and dep.top_level do + Mix.shell.error "warning: the dependency #{inspect dep.app} is " <> + "duplicated at the top level, please remove one " <> + "of them" + end cond do app != other_app -> {other, match} in_upper? && other_opts[:override] -> - {other, true} + {other |> with_matching_only(dep, in_upper?), true} converge?(other, dep) -> - {with_matching_req(other, dep), true} + {other |> with_matching_only(dep, in_upper?) + |> with_matching_req(dep) |> merge_manager(dep, in_upper?), true} true -> tag = if in_upper?, do: :overridden, else: :diverged {%{other | status: {tag, dep}}, true} @@ -204,23 +241,101 @@ defmodule Mix.Dep.Converger do if match, do: acc end + defp with_matching_only(%{opts: other_opts} = other, %{opts: opts} = dep, in_upper?) do + if opts[:optional] do + other + else + with_matching_only(other, other_opts, dep, opts, in_upper?) + end + end + + # When in_upper is true + # + # When a parent dependency specifies :only that is a subset + # of a child dependency, we are going to abort as the parent + # dependency must explicitly outline a superset of child + # dependencies. + # + # We could resolve such conflicts automatically but, since + # the user has likely written only: :env in their mix.exs + # file, we decided to go with a more explicit approach of + # asking them to change it to avoid later surprises and + # headaches. + defp with_matching_only(other, other_opts, dep, opts, true) do + case Keyword.fetch(other_opts, :only) do + {:ok, other_only} -> + case Keyword.fetch(opts, :only) do + {:ok, only} -> + case List.wrap(only) -- List.wrap(other_only) do + [] -> other + _ -> %{other | status: {:divergedonly, dep}} + end + :error -> + %{other | status: {:divergedonly, dep}} + end + :error -> + other + end + end + + # When in_upper is false + # + # In this case, the two dependencies do not have a common path and + # only solution is to merge the environments. We have decided to + # perform it explicitly as, opposite to in_upper above, the + # dependencies are never really laid out in the parent tree. + defp with_matching_only(other, other_opts, _dep, opts, false) do + other_only = Keyword.get(other_opts, :only) + only = Keyword.get(opts, :only) + if other_only && only do + put_in other.opts[:only], Enum.uniq(List.wrap(other_only) ++ List.wrap(only)) + else + %{other | opts: Keyword.delete(other_opts, :only)} + end + end + defp converge?(%Mix.Dep{scm: scm1, opts: opts1}, %Mix.Dep{scm: scm2, opts: opts2}) do - scm1 == scm2 and scm1.equal?(opts1, opts2) + scm1 == scm2 and opts_equal?(opts1, opts2) and scm1.equal?(opts1, opts2) end - defp reject_non_fullfilled_optional(children, upper_breadths) do + defp opts_equal?(opts1, opts2) do + keys = ~w(app env compile)a + Enum.all?(keys, &(Keyword.fetch(opts1, &1) == Keyword.fetch(opts2, &1))) + end + + defp reject_non_fulfilled_optional(deps) do + apps = Enum.map(deps, & &1.app) + for dep <- deps do + update_in dep.deps, &reject_non_fulfilled_optional(&1, apps) + end + end + + defp reject_non_fulfilled_optional(children, upper_breadths) do Enum.reject children, fn %Mix.Dep{app: app, opts: opts} -> opts[:optional] && not(app in upper_breadths) end end + defp merge_manager(%{manager: other_manager} = other, %{manager: manager}, in_upper?) do + %{other | manager: sort_manager(other_manager, manager, in_upper?)} + end + + @managers [:mix, :rebar3, :rebar, :make] + + defp sort_manager(other_manager, manager, true) do + other_manager || manager + end + defp sort_manager(other_manager, manager, false) do + priority = @managers -- (@managers -- (List.wrap(other_manager) ++ List.wrap(manager))) + List.first(priority) || other_manager || manager + end + defp with_matching_req(%Mix.Dep{} = other, %Mix.Dep{} = dep) do case other.status do - {:ok, vsn} when not nil?(vsn) -> - if Mix.Dep.Loader.vsn_match?(dep.requirement, vsn, dep.app) do - other - else - %{other | status: {:divergedreq, dep}} + {:ok, vsn} when not is_nil(vsn) -> + case Mix.Dep.Loader.vsn_match(dep.requirement, vsn, dep.app) do + {:ok, true} -> other + _ -> %{other | status: {:divergedreq, vsn, dep}} end _ -> other diff --git a/lib/mix/lib/mix/dep/elixir_scm.ex b/lib/mix/lib/mix/dep/elixir_scm.ex new file mode 100644 index 00000000000..9f9597d8a14 --- /dev/null +++ b/lib/mix/lib/mix/dep/elixir_scm.ex @@ -0,0 +1,35 @@ +# Manifest file where we treat Elixir and SCMs as a dependency. +defmodule Mix.Dep.ElixirSCM do + @moduledoc false + @manifest ".compile.elixir_scm" + @manifest_vsn :v2 + + def manifest(manifest_path \\ Mix.Project.manifest_path) do + Path.join(manifest_path, @manifest) + end + + def update(manifest_path \\ Mix.Project.manifest_path) do + config = Mix.Project.config + File.mkdir_p!(manifest_path) + + manifest_data = + {@manifest_vsn, {System.version, :erlang.system_info(:otp_release)}, config[:build_scm]} + |> :erlang.term_to_binary() + + File.write!(manifest(manifest_path), manifest_data) + end + + def read(manifest_path \\ Mix.Project.manifest_path) do + case File.read(manifest(manifest_path)) do + {:ok, contents} -> + try do + {@manifest_vsn, vsn, scm} = :erlang.binary_to_term(contents) + {:ok, vsn, scm} + rescue + _ -> {:ok, {"1.0.0", '17'}, nil} + end + _ -> + :error + end + end +end diff --git a/lib/mix/lib/mix/dep/fetcher.ex b/lib/mix/lib/mix/dep/fetcher.ex index 9e42b762ad4..ec19e4bab47 100644 --- a/lib/mix/lib/mix/dep/fetcher.ex +++ b/lib/mix/lib/mix/dep/fetcher.ex @@ -7,7 +7,7 @@ defmodule Mix.Dep.Fetcher do @moduledoc false - import Mix.Dep, only: [format_dep: 1, check_lock: 2, available?: 1, ok?: 1] + import Mix.Dep, only: [format_dep: 1, check_lock: 1, available?: 1] @doc """ Fetches all dependencies. @@ -27,7 +27,7 @@ defmodule Mix.Dep.Fetcher do {apps, deps} = do_finalize(result, old_lock, opts) # Check if all given dependencies are loaded or fail - Mix.Dep.loaded_by_name(names, deps, opts) + _ = Mix.Dep.loaded_by_name(names, deps, opts) apps end @@ -46,12 +46,12 @@ defmodule Mix.Dep.Fetcher do end defp do_fetch(dep, acc, lock) do - %Mix.Dep{app: app, scm: scm, opts: opts} = dep = check_lock(dep, lock) + %Mix.Dep{app: app, scm: scm, opts: opts} = dep = check_lock(dep) cond do # Dependencies that cannot be fetched are always compiled afterwards not scm.fetchable? -> - {dep, [app|acc], lock} + {dep, [app | acc], lock} # If the dependency is not available or we have a lock mismatch out_of_date?(dep) -> @@ -65,7 +65,7 @@ defmodule Mix.Dep.Fetcher do end if new do - {dep, [app|acc], Map.put(lock, app, new)} + {dep, [app | acc], Map.put(lock, app, new)} else {dep, acc, lock} end @@ -86,9 +86,6 @@ defmodule Mix.Dep.Fetcher do # Let's get the loaded versions of deps deps = Mix.Dep.loaded_by_name(apps, all_deps, opts) - # Do not mark dependencies that are not available - deps = Enum.filter(deps, &available?/1) - # Note we only retrieve the parent dependencies of the updated # deps if all dependencies are available. This is because if a # dependency is missing, it could directly affect one of the @@ -98,32 +95,38 @@ defmodule Mix.Dep.Fetcher do # If there is any other dependency that is not ok, we include # it for compilation too, this is our best to try to solve the # maximum we can at each deps.get and deps.update. - if Enum.all?(all_deps, &available?/1) do - deps = with_depending(deps, all_deps) ++ - Enum.filter(all_deps, fn dep -> not ok?(dep) end) - end + deps = + if Enum.all?(all_deps, &available?/1) do + Enum.uniq_by(with_depending(deps, all_deps), &(&1.app)) + else + deps + end # Merge the new lock on top of the old to guarantee we don't # leave out things that could not be fetched and save it. - lock = Dict.merge(old_lock, new_lock) + lock = Map.merge(old_lock, new_lock) Mix.Dep.Lock.write(lock) - mark_as_fetched(deps) + + # See if any of the deps diverged and abort. + show_diverged!(Enum.filter(all_deps, &Mix.Dep.diverged?/1)) + {apps, all_deps} end defp mark_as_fetched(deps) do # If the dependency is fetchable, we are going to write a .fetch # file to it. Each build, regardless of the environment and location, - # will compared against this .fetch file to know if the depednency + # will compared against this .fetch file to know if the dependency # needs recompiling. - for %Mix.Dep{scm: scm, opts: opts} <- deps, scm.fetchable? do - File.touch Path.join opts[:dest], ".fetch" + _ = for %Mix.Dep{scm: scm, opts: opts} <- deps, scm.fetchable? do + File.touch! Path.join opts[:dest], ".fetch" end + :ok end defp with_depending(deps, all_deps) do - (deps ++ do_with_depending(deps, all_deps)) |> Enum.uniq(&(&1.app)) + deps ++ do_with_depending(deps, all_deps) end defp do_with_depending([], _all_deps) do @@ -134,7 +137,7 @@ defmodule Mix.Dep.Fetcher do dep_names = Enum.map(deps, fn dep -> dep.app end) parents = Enum.filter all_deps, fn dep -> - Enum.any?(dep.deps, &(&1 in dep_names)) + Enum.any?(dep.deps, &(&1.app in dep_names)) end do_with_depending(parents, all_deps) ++ parents @@ -145,4 +148,17 @@ defmodule Mix.Dep.Fetcher do if is_binary(app), do: String.to_atom(app), else: app end) end + + defp show_diverged!([]), do: :ok + defp show_diverged!(deps) do + shell = Mix.shell + shell.error "Dependencies have diverged:" + + Enum.each deps, fn(dep) -> + shell.error "* #{Mix.Dep.format_dep dep}" + shell.error " #{Mix.Dep.format_status dep}" + end + + Mix.raise "Can't continue due to errors on dependencies" + end end diff --git a/lib/mix/lib/mix/dep/loader.ex b/lib/mix/lib/mix/dep/loader.ex index febf499b4d7..eba8b86feb6 100644 --- a/lib/mix/lib/mix/dep/loader.ex +++ b/lib/mix/lib/mix/dep/loader.ex @@ -4,90 +4,91 @@ defmodule Mix.Dep.Loader do @moduledoc false + import Mix.Dep, only: [ok?: 1, mix?: 1, rebar?: 1, make?: 1] + @doc """ Gets all direct children of the current `Mix.Project` as a `Mix.Dep` struct. Umbrella project dependencies are included as children. By default, it will filter all dependencies that does not match - current environment, behaviour can be overriden via options. - - ## Options + current environment, behaviour can be overridden via options. + """ + def children() do + mix_children([]) ++ Mix.Dep.Umbrella.unloaded + end - * `:env` - filter dependencies on given environments + @doc """ + Partitions loaded dependencies by environment. """ - def children(opts) do - from = Path.absname("mix.exs") - deps = Enum.map(Mix.Project.config[:deps] || [], &to_dep(&1, from)) - - # Filter deps not matching mix environment - if env = opts[:env] do - deps = - Enum.filter(deps, fn %Mix.Dep{opts: opts} -> - only = opts[:only] - if only, do: env in List.wrap(only), else: true - end) - end + def partition_by_env(deps, nil), do: {deps, []} + def partition_by_env(deps, env), do: Enum.split_with(deps, ¬ skip?(&1, env)) - deps ++ Mix.Dep.Umbrella.unloaded + @doc """ + Checks if a dependency must be skipped according to the environment. + """ + def skip?(_dep, nil), do: false + def skip?(%Mix.Dep{status: {:divergedonly, _}}, _), do: false + def skip?(%Mix.Dep{opts: opts}, env) do + only = opts[:only] + validate_only!(only) + only != nil and env not in List.wrap(only) end @doc """ Loads the given dependency information, including its latest status and children. """ - def load(dep) do - %Mix.Dep{manager: manager, scm: scm, opts: opts} = dep - dep = %{dep | status: scm_status(scm, opts)} - dest = opts[:dest] + def load(%Mix.Dep{manager: manager, scm: scm, opts: opts} = dep, children) do + manager = scm_manager(scm, opts) || + manager || + infer_manager(opts[:dest]) + + dep = %{dep | manager: manager, status: scm_status(scm, opts)} {dep, children} = cond do - not ok?(dep.status) -> + not ok?(dep) -> {dep, []} - manager == :rebar -> - rebar_dep(dep) + mix?(dep) -> + mix_dep(dep, children) - mix?(dest) -> - mix_dep(%{dep | manager: :mix}) + # If not an explicit Rebar or Mix dependency + # but came from Rebar, assume to be a Rebar dep. + rebar?(dep) -> + rebar_dep(dep, children, manager) - rebar?(dest) -> - rebar_dep(%{dep | manager: :rebar}) - - make?(dest) -> - {%{dep | manager: :make}, []} + make?(dep) -> + make_dep(dep) true -> {dep, []} end - %{validate_path(validate_app(dep)) | deps: children} + %{validate_app(dep) | deps: attach_only(children, opts)} end @doc """ Checks if a requirement from a dependency matches the given version. """ - def vsn_match?(nil, _actual, _app), - do: true - def vsn_match?(req, actual, app) do + def vsn_match(nil, _actual, _app), + do: {:ok, true} + def vsn_match(req, actual, app) do if Regex.regex?(req) do - actual =~ req + {:ok, actual =~ req} else case Version.parse(actual) do {:ok, version} -> case Version.parse_requirement(req) do {:ok, req} -> - Version.match?(version, req) + {:ok, Version.match?(version, req)} :error -> Mix.raise "Invalid requirement #{req} for app #{app}" end - :error -> - Mix.raise "The application #{app} specified a non Semantic Version #{actual}. " <> - "Mix can only match the requirement #{req} against Semantic Versions, to match against any " <> - "version, please use a regex as requirement" + {:error, :nosemver} end end end @@ -95,24 +96,37 @@ defmodule Mix.Dep.Loader do ## Helpers def to_dep(tuple, from, manager \\ nil) do - %{with_scm_and_app(tuple) | from: from, manager: manager} + %{opts: opts} = dep = with_scm_and_app(tuple) + %{dep | from: from, manager: opts[:manager] || manager} end - defp with_scm_and_app({app, opts}) when is_list(opts) do - with_scm_and_app({app, nil, opts}) + defp with_scm_and_app({app, opts} = original) when is_atom(app) and is_list(opts) do + with_scm_and_app(app, nil, opts, original) end - defp with_scm_and_app({app, req}) do - with_scm_and_app({app, req, []}) + defp with_scm_and_app({app, req} = original) when is_atom(app) do + if is_binary(req) or Regex.regex?(req) do + with_scm_and_app(app, req, [], original) + else + invalid_dep_format(original) + end end - defp with_scm_and_app({app, req, opts} = other) when is_atom(app) and is_list(opts) do - unless is_binary(req) or Regex.regex?(req) or nil?(req) do - invalid_dep_format(other) + defp with_scm_and_app({app, req, opts} = original) when is_atom(app) and is_list(opts) do + if is_binary(req) or Regex.regex?(req) do + with_scm_and_app(app, req, opts, original) + else + invalid_dep_format(original) end + end + defp with_scm_and_app(original) do + invalid_dep_format(original) + end + + defp with_scm_and_app(app, req, opts, original) do unless Keyword.keyword?(opts) do - invalid_dep_format(other) + invalid_dep_format(original) end bin_app = Atom.to_string(app) @@ -125,14 +139,16 @@ defmodule Mix.Dep.Loader do {scm, opts} = get_scm(app, opts) - unless scm do - Mix.Tasks.Local.Hex.maybe_install(app) - Mix.Tasks.Local.Hex.maybe_start() - {scm, opts} = get_scm(app, opts) - end + {scm, opts} = + if !scm && Mix.Hex.ensure_installed?(app) do + _ = Mix.Hex.start() + get_scm(app, opts) + else + {scm, opts} + end unless scm do - Mix.raise "Could not find a SCM for dependency #{inspect app} from #{inspect Mix.Project.get}" + Mix.raise "Could not find an SCM for dependency #{inspect app} from #{inspect Mix.Project.get}" end %Mix.Dep{ @@ -140,19 +156,24 @@ defmodule Mix.Dep.Loader do app: app, requirement: req, status: scm_status(scm, opts), - opts: opts} - end - - defp with_scm_and_app(other) do - invalid_dep_format(other) + opts: Keyword.put_new(opts, :env, :prod)} end defp get_scm(app, opts) do - Enum.find_value Mix.SCM.available, {nil, opts}, fn(scm) -> + Enum.find_value Mix.SCM.available, {nil, opts}, fn scm -> (new = scm.accepts_options(app, opts)) && {scm, new} end end + # Notice we ignore Make dependencies because the + # file based heuristic will always figure it out. + @scm_managers ~w(mix rebar rebar3)a + + defp scm_manager(scm, opts) do + managers = scm.managers(opts) + Enum.find(@scm_managers, &(&1 in managers)) + end + defp scm_status(scm, opts) do if scm.checked_out?(opts) do {:ok, nil} @@ -161,19 +182,17 @@ defmodule Mix.Dep.Loader do end end - defp ok?({:ok, _}), do: true - defp ok?(_), do: false - - defp mix?(dest) do - any_of?(dest, ["mix.exs"]) - end - - defp rebar?(dest) do - any_of?(dest, ["rebar", "rebar.config", "rebar.config.script"]) - end - - defp make?(dest) do - any_of?(dest, ["Makefile", "Makefile.win"]) + defp infer_manager(dest) do + cond do + any_of?(dest, ["mix.exs"]) -> + :mix + any_of?(dest, ["rebar", "rebar.config", "rebar.config.script", "rebar.lock"]) -> + :rebar3 + any_of?(dest, ["Makefile", "Makefile.win"]) -> + :make + true -> + nil + end end defp any_of?(dest, files) do @@ -196,60 +215,113 @@ defmodule Mix.Dep.Loader do requirement :: String.t | Regex.t opts :: Keyword.t + If you want to skip the requirement (not recommended), use ">= 0.0.0". """ end ## Fetching - defp mix_dep(%Mix.Dep{opts: opts} = dep) do + # We need to override the dependencies so they mirror + # the :only requirement in the parent. + defp attach_only(deps, opts) do + if only = opts[:only] do + Enum.map(deps, fn %{opts: opts} = dep -> + %{dep | opts: Keyword.put_new(opts, :only, only)} + end) + else + deps + end + end + + defp mix_dep(%Mix.Dep{opts: opts} = dep, nil) do Mix.Dep.in_dependency(dep, fn _ -> - config = Mix.Project.config - umbrella? = Mix.Project.umbrella? + opts = + if Mix.Project.umbrella? do + Keyword.put_new(opts, :app, false) + else + opts + end - if umbrella? do - opts = Keyword.put_new(opts, :app, false) - end + child_opts = + if opts[:from_umbrella] do + [] + else + [env: Keyword.fetch!(opts, :env)] + end + + deps = mix_children(child_opts) ++ Mix.Dep.Umbrella.unloaded + {%{dep | opts: opts}, deps} + end) + end + + # If we have a Mix dependency that came from a remote converger, + # we just use the dependencies given by the remote converger, + # we don't need to load the mixfile at all. We can only do this + # because umbrella projects are not supported in remotes. + defp mix_dep(%Mix.Dep{opts: opts} = dep, children) do + from = Path.join(opts[:dest], "mix.exs") + deps = Enum.map(children, &to_dep(&1, from)) + {dep, deps} + end - if req = old_elixir_req(config) do - Mix.shell.error "warning: the dependency #{dep.app} requires Elixir #{inspect req} " <> - "but you are running on v#{System.version}" + defp rebar_dep(%Mix.Dep{app: app, opts: opts, extra: overrides} = dep, children, manager) do + config = + File.cd!(opts[:dest], fn -> Mix.Rebar.load_config(".") end) + + config = + Mix.Rebar.apply_overrides(app, config, overrides) + + deps = + if children do + from = Path.join(opts[:dest], "rebar.config") + # Pass the manager because deps of a Rebar project need + # to default to Rebar if we cannot chose a manager from + # files in the dependency + Enum.map(children, &to_dep(&1, from, manager)) + else + rebar_children(config, manager, opts[:dest]) end - dep = %{dep | manager: :mix, opts: opts, extra: [umbrella: umbrella?]} - {dep, children(env: opts[:env] || :prod)} - end) + {%{dep | extra: config}, deps} end - defp rebar_dep(%Mix.Dep{} = dep) do - Mix.Dep.in_dependency(dep, fn _ -> - rebar = Mix.Rebar.load_config(".") - extra = Dict.take(rebar, [:sub_dirs]) - dep = %{dep | manager: :rebar, extra: extra} - {dep, rebar_children(rebar)} - end) + defp make_dep(dep) do + {dep, []} + end + + defp validate_only!(only) do + for entry <- List.wrap(only), not is_atom(entry) do + Mix.raise "Expected :only in dependency to be an atom or a list of atoms, got: #{inspect only}" + end + only end - defp rebar_children(root_config) do - from = Path.absname("rebar.config") + defp mix_children(opts) do + from = Path.absname("mix.exs") + (Mix.Project.config[:deps] || []) + |> Enum.map(&to_dep(&1, from)) + |> partition_by_env(opts[:env]) + |> elem(0) + end + + defp rebar_children(root_config, manager, dest) do + from = Path.absname(Path.join(dest, "rebar.config")) Mix.Rebar.recur(root_config, fn config -> - Mix.Rebar.deps(config) |> Enum.map(&to_dep(&1, from, :rebar)) + overrides = overrides(manager, config) + config + |> Mix.Rebar.deps() + |> Enum.map(fn dep -> %{to_dep(dep, from, manager) | extra: overrides} end) end) |> Enum.concat end - defp validate_path(%Mix.Dep{scm: scm, manager: manager} = dep) do - if scm == Mix.SCM.Path and not manager in [:mix, nil] do - Mix.raise ":path option can only be used with mix projects, " <> - "invalid path dependency for #{inspect dep.app}" - else - dep - end - end + defp overrides(:rebar3, config), do: config[:overrides] || [] + defp overrides(_, _config), do: [] - defp validate_app(%Mix.Dep{opts: opts, requirement: req, app: app, status: status} = dep) do + defp validate_app(%Mix.Dep{opts: opts, requirement: req, app: app} = dep) do opts_app = opts[:app] cond do - not ok?(status) -> + not ok?(dep) -> dep recently_fetched?(dep) -> %{dep | status: :compile} @@ -265,7 +337,7 @@ defmodule Mix.Dep.Loader do defp recently_fetched?(%Mix.Dep{opts: opts, scm: scm}) do scm.fetchable? && Mix.Utils.stale?([Path.join(opts[:dest], ".fetch")], - [Path.join(opts[:build], ".compile.lock")]) + [Path.join(opts[:build], ".compile.fetch")]) end defp app_status(app_path, app, req) do @@ -274,10 +346,10 @@ defmodule Mix.Dep.Loader do case List.keyfind(config, :vsn, 0) do {:vsn, actual} when is_list(actual) -> actual = IO.iodata_to_binary(actual) - if vsn_match?(req, actual, app) do - {:ok, actual} - else - {:nomatchvsn, actual} + case vsn_match(req, actual, app) do + {:ok, true} -> {:ok, actual} + {:ok, false} -> {:nomatchvsn, actual} + {:error, error} -> {error, actual} end {:vsn, actual} -> {:invalidvsn, actual} @@ -288,11 +360,4 @@ defmodule Mix.Dep.Loader do {:error, _} -> {:noappfile, app_path} end end - - defp old_elixir_req(config) do - req = config[:elixir] - if req && not Version.match?(System.version, req) do - req - end - end end diff --git a/lib/mix/lib/mix/dep/lock.ex b/lib/mix/lib/mix/dep/lock.ex index fb95a2999e2..3ca467bb44a 100644 --- a/lib/mix/lib/mix/dep/lock.ex +++ b/lib/mix/lib/mix/dep/lock.ex @@ -4,79 +4,70 @@ # deps.* tasks. We also keep the Elixir version in the manifest file. defmodule Mix.Dep.Lock do @moduledoc false + @manifest ".compile.lock" @doc """ Returns the manifest file for dependencies. - """ - def manifest(manifest_path \\ Mix.Project.manifest_path) do - Path.join(manifest_path, @manifest) - end - - @doc """ - Touches the manifest timestamp unless it is an umbrella application. - """ - def touch() do - unless Mix.Project.umbrella?, do: touch(Mix.Project.manifest_path) - end - - @doc """ - Touches the manifest timestamp and updates the elixir version - and mix environment information in the given path. - """ - def touch(manifest_path) do - File.mkdir_p!(manifest_path) - File.write!(Path.join(manifest_path, @manifest), System.version) - end - @doc """ - Returns the elixir version in the lock manifest unless is an umbrella app. + The manifest is used to check if the lockfile + itself is up to date. """ - def elixir_vsn() do - unless Mix.Project.umbrella?, do: elixir_vsn(Mix.Project.manifest_path) + def manifest(path \\ Mix.Project.manifest_path) do + Path.join(path, @manifest) end @doc """ - Returns the elixir version in the lock manifest in the given path. + Touches the manifest file to force recompilation. """ - def elixir_vsn(manifest_path) do - case File.read(manifest(manifest_path)) do - {:ok, contents} -> - contents - {:error, _} -> - nil - end + def touch_manifest do + path = Mix.Project.manifest_path + File.mkdir_p!(path) + File.touch!(manifest(path)) end @doc """ - Read the lockfile, returns a keyword list containing + Reads the lockfile, returns a map containing each app name and its current lock information. """ + @spec read() :: map def read() do - case File.read(lockfile) do + case File.read(lockfile()) do {:ok, info} -> - {value, _binding} = Code.eval_string(info) - value || %{} + assert_no_merge_conflicts_in_lockfile(lockfile(), info) + case Code.eval_string(info, [], file: lockfile()) do + {lock, _binding} when is_map(lock) -> lock + {_, _binding} -> %{} + end {:error, _} -> %{} end end @doc """ - Receives a keyword list and writes it as the latest lock. + Receives a map and writes it as the latest lock. """ + @spec write(map) :: :ok def write(map) do - unless map == read do + unless map == read() do lines = - for {app, rev} <- map, rev != nil do + for {app, rev} <- Enum.sort(map), rev != nil do ~s("#{app}": #{inspect rev, limit: :infinity}) end - File.write! lockfile, "%{" <> Enum.join(lines, ",\n ") <> "}\n" - touch + File.write! lockfile(), "%{" <> Enum.join(lines, ",\n ") <> "}\n" + touch_manifest() end + :ok end defp lockfile do Mix.Project.config[:lockfile] end + + defp assert_no_merge_conflicts_in_lockfile(lockfile, info) do + if String.contains?(info, ~w(<<<<<<< ======= >>>>>>>)) do + Mix.raise "Your #{lockfile} contains merge conflicts. Please resolve the conflicts " <> + "and run the command again" + end + end end diff --git a/lib/mix/lib/mix/dep/umbrella.ex b/lib/mix/lib/mix/dep/umbrella.ex index fc78fb17ed7..411cc4a30f6 100644 --- a/lib/mix/lib/mix/dep/umbrella.ex +++ b/lib/mix/lib/mix/dep/umbrella.ex @@ -7,59 +7,47 @@ defmodule Mix.Dep.Umbrella do def unloaded do config = Mix.Project.config - if apps_path = config[:apps_path] do - paths = Path.wildcard(Path.join(apps_path, "*")) - build = Mix.Project.build_path - - paths - |> Enum.filter(&File.dir?(&1)) - |> extract_umbrella - |> filter_umbrella(config[:apps]) - |> to_umbrella_dep(build) + if apps_paths = Mix.Project.apps_paths(config) do + env = Mix.env + from = Path.absname("mix.exs") + build = Mix.Project.build_path(config) + + for {app, path} <- apps_paths do + opts = [path: path, dest: Path.expand(path), from_umbrella: true, + env: env, build: Path.join([build, "lib", Atom.to_string(app)])] + %Mix.Dep{scm: Mix.SCM.Path, app: app, requirement: nil, + manager: :mix, status: {:ok, nil}, from: from, opts: opts} + end else [] end end + @doc """ + Gets all umbrella dependencies in the loaded format from cache (if available). + """ + def cached do + if project = Mix.Project.get do + key = {:umbrella_deps, Mix.env, project} + Mix.ProjectStack.read_cache(key) || Mix.ProjectStack.write_cache(key, loaded()) + else + loaded() + end + end + @doc """ Gets all umbrella dependencies in the loaded format. """ def loaded do - deps = unloaded + deps = unloaded() apps = Enum.map(deps, &(&1.app)) Enum.map(deps, fn umbrella_dep -> - umbrella_dep = Mix.Dep.Loader.load(umbrella_dep) + umbrella_dep = Mix.Dep.Loader.load(umbrella_dep, nil) deps = Enum.filter(umbrella_dep.deps, fn dep -> Mix.Dep.available?(dep) and dep.app in apps end) %{umbrella_dep | deps: deps} - end) |> Mix.Dep.Converger.topsort - end - - defp extract_umbrella(paths) do - for path <- paths do - app = path |> Path.basename |> String.downcase |> String.to_atom - {app, path} - end - end - - defp filter_umbrella(pairs, nil), do: pairs - defp filter_umbrella(pairs, apps) when is_list(apps) do - for {app, _} = pair <- pairs, app in apps, do: pair - end - - defp to_umbrella_dep(paths, build) do - Enum.map paths, fn({app, path}) -> - opts = [path: path, dest: Path.expand(path), from_umbrella: true, - env: Mix.env, build: Path.join([build, "lib", Atom.to_string(app)])] - %Mix.Dep{ - scm: Mix.SCM.Path, - app: app, - requirement: nil, - manager: :mix, - status: {:ok, nil}, - opts: opts} - end + end) |> Mix.Dep.Converger.topological_sort end end diff --git a/lib/mix/lib/mix/exceptions.ex b/lib/mix/lib/mix/exceptions.ex index c2fe48ab6af..f5819c5ea7b 100644 --- a/lib/mix/lib/mix/exceptions.ex +++ b/lib/mix/lib/mix/exceptions.ex @@ -1,23 +1,54 @@ defmodule Mix.NoTaskError do - defexception [:task, :message, :mix] + defexception [:task, :message, mix: true] def exception(opts) do task = opts[:task] - %Mix.NoTaskError{task: task, message: "The task #{task} could not be found"} + %Mix.NoTaskError{task: task, message: msg(task)} + end + + defp msg(task) do + msg = "The task #{inspect task} could not be found" + case did_you_mean(task) do + {mod, ^task, _score} -> + msg <> " because the module is named #{inspect mod} instead of " <> + "#{expected_mod_name(task)} as expected. " <> + "Please rename it and try again" + + {_mod, similar, score} when score > 0.8 -> + msg <> ". Did you mean #{inspect similar}?" + + _otherwise -> msg + end + end + + defp did_you_mean(task) do + Mix.Task.load_all # Ensure all tasks are loaded + Mix.Task.all_modules + |> Enum.map(&{&1, Mix.Task.task_name(&1)}) + |> Enum.reduce({nil, nil, 0}, &max_similar(&1, task, &2)) + end + + defp max_similar({mod, source}, target, {_, _, current} = best) do + score = String.jaro_distance(source, target) + if score < current, do: best, else: {mod, source, score} + end + + defp expected_mod_name(task) do + "Mix.Tasks." <> Mix.Utils.command_to_module_name(task) end end defmodule Mix.InvalidTaskError do - defexception [:task, :message, :mix] + defexception [:task, :message, mix: true] def exception(opts) do task = opts[:task] - %Mix.InvalidTaskError{task: task, message: "The task #{task} does not export run/1"} + %Mix.InvalidTaskError{task: task, message: "The task #{inspect task} does not export run/1"} end end defmodule Mix.ElixirVersionError do - defexception [:target, :expected, :actual, :message, :mix] + defexception [:target, :expected, :actual, :message, mix: true] def exception(opts) do target = opts[:target] @@ -30,10 +61,10 @@ defmodule Mix.ElixirVersionError do end defmodule Mix.NoProjectError do - defexception message: "Could not find a Mix.Project, please ensure a mix.exs file is available", - mix: nil + message = "Could not find a Mix.Project, please ensure you are running Mix in a directory with a mix.exs file" + defexception message: message, mix: true end defmodule Mix.Error do - defexception [:mix, :message] + defexception [:message, mix: true] end diff --git a/lib/mix/lib/mix/generator.ex b/lib/mix/lib/mix/generator.ex index 5f7d829139c..54d506ffb5f 100644 --- a/lib/mix/lib/mix/generator.ex +++ b/lib/mix/lib/mix/generator.ex @@ -2,18 +2,30 @@ defmodule Mix.Generator do @moduledoc """ Conveniences for working with paths and generating content. - All of those functions are verbose, in the sense they log - the action to be performed via `Mix.shell`. + All of these functions are verbose, in the sense they log + the action to be performed via `Mix.shell/0`. """ @doc """ Creates a file with the given contents. If the file already exists, asks for user confirmation. + + ## Options + + * `:force` - forces installation without a shell prompt. + + ## Examples + + iex> Mix.Generator.create_file ".gitignore", "_build\ndeps\n" + * creating .gitignore + :ok + """ - def create_file(path, contents) when is_binary(path) do - Mix.shell.info "%{green}* creating%{reset} #{Path.relative_to_cwd path}" + @spec create_file(Path.t, iodata, Keyword.t) :: any + def create_file(path, contents, opts \\ []) when is_binary(path) do + Mix.shell.info [:green, "* creating ", :reset, Path.relative_to_cwd(path)] - if overwriting?(path) do + if opts[:force] || Mix.Utils.can_write?(path) do File.mkdir_p!(Path.dirname(path)) File.write!(path, contents) end @@ -21,30 +33,25 @@ defmodule Mix.Generator do @doc """ Creates a directory if one does not exist yet. + + This function does nothing if the given directory already exists; in this + case, it still logs the directory creation. + + ## Examples + + iex> Mix.Generator.create_directory "path/to/dir" + * creating path/to/dir + :ok + """ + @spec create_directory(Path.t) :: any def create_directory(path) when is_binary(path) do - Mix.shell.info "%{green}* creating%{reset} #{Path.relative_to_cwd path}" + Mix.shell.info [:green, "* creating ", :reset, Path.relative_to_cwd(path)] File.mkdir_p! path end - defp overwriting?(path) do - if File.exists?(path) do - full = Path.expand(path) - Mix.shell.yes?(Path.relative_to_cwd(full) <> " already exists, overwrite?") - else - true - end - end - - @doc false - defmacro from_file(path) do - quote do - File.read! Path.expand(unquote(path), __ENV__.file) - end - end - @doc """ - Embed a template given by `contents` into the current module. + Embeds a template given by `contents` into the current module. It will define a private function with the `name` followed by `_template` that expects assigns as arguments. @@ -54,16 +61,24 @@ defmodule Mix.Generator do template using the `@` macro. For more information, check `EEx.SmartEngine`. + + ## Examples + + defmodule Mix.Tasks.MyTask do + require Mix.Generator + Mix.Generator.embed_template(:log, "Log: <%= @log %>") + end + """ defmacro embed_template(name, contents) do - quote bind_quoted: binding do + quote bind_quoted: binding() do contents = case contents do [from_file: file] -> @file file File.read!(file) c when is_binary(c) -> - @file {__ENV__.file, __ENV__.line+1} + @file {__ENV__.file, __ENV__.line + 1} c _ -> raise ArgumentError, "expected string or from_file: file" @@ -78,10 +93,18 @@ defmodule Mix.Generator do Embeds a text given by `contents` into the current module. It will define a private function with the `name` followed by - `_text` that expects no argument. + `_text` that expects no arguments. + + ## Examples + + defmodule Mix.Tasks.MyTask do + require Mix.Generator + Mix.Generator.embed_text(:error, "There was an error!") + end + """ defmacro embed_text(name, contents) do - quote bind_quoted: binding do + quote bind_quoted: binding() do contents = case contents do [from_file: f] -> File.read!(f) diff --git a/lib/mix/lib/mix/hex.ex b/lib/mix/lib/mix/hex.ex new file mode 100644 index 00000000000..5332f52f90d --- /dev/null +++ b/lib/mix/lib/mix/hex.ex @@ -0,0 +1,78 @@ +defmodule Mix.Hex do + @moduledoc false + @hex_requirement ">= 0.14.0" + @hex_mirror "https://repo.hex.pm/" + + @doc """ + Returns `true` if `Hex` is loaded or installed. Otherwise returns `false`. + """ + @spec ensure_installed?(atom) :: boolean + def ensure_installed?(app) do + if Code.ensure_loaded?(Hex) do + true + else + shell = Mix.shell + shell.info "Could not find Hex, which is needed to build dependency #{inspect app}" + + if shell.yes?("Shall I install Hex? (if running non-interactively, use \"mix local.hex --force\")") do + Mix.Tasks.Local.Hex.run ["--force"] + else + false + end + end + end + + @doc """ + Returns `true` if it has the required `Hex`. If an update is performed, it then exits. + Otherwise returns `false` without updating anything. + """ + @spec ensure_updated?() :: boolean + def ensure_updated?() do + cond do + not Code.ensure_loaded?(Hex) -> + false + not Version.match?(Hex.version, @hex_requirement) -> + Mix.shell.info "Mix requires Hex #{@hex_requirement} but you have #{Hex.version}" + + if Mix.shell.yes?("Shall I abort the current command and update Hex?") do + Mix.Tasks.Local.Hex.run ["--force"] + exit({:shutdown, 0}) + end + + false + true -> + true + end + end + + @doc """ + Ensures `Hex` is started. + """ + def start do + try do + Hex.start + catch + kind, reason -> + stacktrace = System.stacktrace + Mix.shell.error "Could not start Hex. Try fetching a new version with " <> + "\"mix local.hex\" or uninstalling it with \"mix archive.uninstall hex.ez\"" + :erlang.raise(kind, reason, stacktrace) + end + end + + @doc """ + Returns the URL to the Hex mirror. + """ + def mirror do + System.get_env("HEX_MIRROR") || cdn() || @hex_mirror + end + + # TODO: Remove by 1.4 + defp cdn do + if cdn = System.get_env("HEX_CDN") do + Mix.shell.error "warning: the HEX_CDN environment variable has been deprecated " <> + "in favor of HEX_MIRROR" + cdn + end + end +end diff --git a/lib/mix/lib/mix/local.ex b/lib/mix/lib/mix/local.ex index 2d8ccb0e9be..d9e2332c117 100644 --- a/lib/mix/lib/mix/local.ex +++ b/lib/mix/lib/mix/local.ex @@ -1,31 +1,58 @@ defmodule Mix.Local do @moduledoc false + @public_keys_html "https://repo.hex.pm/installs/public_keys.html" + + @type item :: :archive | :escript + @doc """ - The path for local archives. + Returns the name for an archive or an escript, based on the project config. + + ## Examples + + iex> Mix.Local.name_for(:archive, [app: "foo", version: "0.1.0"]) + "foo-0.1.0.ez" + + iex> Mix.Local.name_for(:escript, [escript: [name: "foo"]]) + "foo" - It checks the `MIX_ARCHIVES` variable or it uses the - "archives" directory inside `Mix.Utils.mix_home/0`. + """ + @spec name_for(item, Keyword.t) :: String.t + def name_for(:archive, project) do + version = if version = project[:version], do: "-#{version}" + "#{project[:app]}#{version}.ez" + end + + def name_for(:escript, project) do + case get_in(project, [:escript, :name]) do + nil -> project[:app] + name -> name + end |> to_string() + end - Since archives are specific to Elixir versions, it is - expected from build tools to swap the `MIX_ARCHIVES` - variable to different locations based on a particular - Elixir installation. + @doc """ + The path for local archives or escripts. """ - def archives_path do - System.get_env("MIX_ARCHIVES") || - Path.join(Mix.Utils.mix_home, "archives") + @spec path_for(item) :: String.t + def path_for(:archive) do + System.get_env("MIX_ARCHIVES") || Path.join(Mix.Utils.mix_home, "archives") + end + + def path_for(:escript) do + Path.join(Mix.Utils.mix_home, "escripts") end @doc """ - Append archives paths into Erlang code path. + Appends archives paths into Erlang code path. """ def append_archives do - Enum.each(archives_ebin, &Code.append_path(&1)) + archives = archives_ebins() + Enum.each(archives, &check_elixir_version_in_ebin/1) + Enum.each(archives, &Code.append_path/1) end @doc """ - Append mix paths into Erlang code path. + Appends Mix paths into Erlang code path. """ def append_paths do Enum.each(Mix.Utils.mix_paths, &Code.append_path(&1)) @@ -34,23 +61,108 @@ defmodule Mix.Local do @doc """ Returns all tasks in local archives. """ - def all_tasks, do: Mix.Task.load_tasks(archives_ebin) + def archives_tasks do + Mix.Task.load_tasks(archives_ebins()) + end @doc """ - Returns paths of all archive files matching given - application name. + Returns the name of an archive given a path. """ - def archive_files(name) do - archives(name, ".ez") ++ archives(name, "-*.ez") + def archive_name(path) do + path # "foo/bar/baz-0.1.0.ez" + |> Path.basename # "baz-0.1.0.ez" + |> Path.rootname(".ez") # "baz-0.1.0" + end + + @doc """ + Returns the ebin path of an archive. + """ + def archive_ebin(path) do + Path.join [path, archive_name(path), "ebin"] + end + + defp archives_ebins do + path = path_for(:archive) + case File.ls(path) do + {:ok, entries} -> Enum.map(entries, &archive_ebin(Path.join(path, &1))) + {:error, _} -> [] + end + end + + @doc """ + Checks Elixir version requirement stored in the ebin directory + and print a warning if it is not satisfied. + """ + def check_elixir_version_in_ebin(ebin) do + elixir = ebin |> Path.dirname |> Path.join(".elixir") |> String.to_charlist + case :erl_prim_loader.get_file(elixir) do + {:ok, req, _} -> + unless Version.match?(System.version, req) do + archive = ebin |> Path.dirname |> Path.basename + Mix.shell.error "warning: the archive #{archive} requires Elixir #{inspect req} " <> + "but you are running on v#{System.version}" + end + :ok + :error -> + :ok + end + end + + @doc """ + Fetches the given signed CSV files, verifies and returns the matching + Elixir version, artifact version and artifact's checksum. + + Used to install both Rebar and Hex from S3. + """ + def find_matching_versions_from_signed_csv!(name, path) do + csv = read_path!(name, path) + + signature = + read_path!(name, path <> ".signed") + |> String.replace("\n", "") + |> Base.decode64! + + if Mix.PublicKey.verify csv, :sha512, signature do + csv + |> parse_csv + |> find_latest_eligible_version + else + Mix.raise "Could not install #{name} because Mix could not verify authenticity " <> + "of metadata file at #{inspect(path)}. This may happen because a proxy or some " <> + "entity is interfering with the download or because you don't have a " <> + "public key to verify the download.\n\nYou may try again later or check " <> + "if a new public key has been released in our public keys page: #{@public_keys_html}" + end + end + + defp read_path!(name, path) do + case Mix.Utils.read_path(path) do + {:ok, contents} -> contents + {:remote, message} -> + Mix.raise """ + #{message} + + Could not install #{name} because Mix could not download metadata at #{path}. + """ + end + end + + defp parse_csv(body) do + body + |> :binary.split("\n", [:global, :trim]) + |> Enum.map(&:binary.split(&1, ",", [:global, :trim])) end - defp archives(name, suffix) do - Mix.Local.archives_path - |> Path.join(name <> suffix) - |> Path.wildcard + defp find_latest_eligible_version(entries) do + {:ok, current_version} = Version.parse(System.version) + entries + |> Enum.reverse + |> Enum.find_value(entries, &find_version(&1, current_version)) end - defp archives_ebin do - Path.join(archives_path, "*.ez") |> Path.wildcard |> Enum.map(&Mix.Archive.ebin/1) + defp find_version([artifact_version, digest | versions], current_version) do + if version = Enum.find(versions, &Version.compare(&1, current_version) != :gt) do + {version, artifact_version, digest} + end end end diff --git a/lib/mix/lib/mix/local/installer.ex b/lib/mix/lib/mix/local/installer.ex new file mode 100644 index 00000000000..0f5e2cc9e2e --- /dev/null +++ b/lib/mix/lib/mix/local/installer.ex @@ -0,0 +1,327 @@ +defmodule Mix.Local.Installer do + @moduledoc false + + # This module implements pieces of functionality shared + # by the archive- and escript-related tasks. + + @typedoc """ + Installs types supported by `Mix.Local.Installer`. + + * `:project` - installs the current Mix project's artifact + * `:local` - installs the artifact located at `path` + * `:url` - installs the artifact retrievable at `url` + * `:fetcher` - builds and install the artifact generated by the `dep_spec` + + """ + @type install_spec :: + :project | + {:local, path :: Path.t} | + {:url, url :: binary} | + {:fetcher, dep_spec :: tuple} + + @doc """ + Checks that the `install_spec` and `opts` are supported by the respective module. + """ + @callback check_install_spec(install_spec, opts :: Keyword.t) :: :ok | {:error, String.t} + + @doc """ + Returns a list of already installed version of the same artifact. + """ + @callback find_previous_versions(basename :: String.t) :: [Path.t] + + @doc """ + Builds a local artifact either from a remote dependency or for + the current project. + """ + @callback build(install_spec, opts :: Keyword.t) :: Path.t + + @doc """ + The installation itself. + """ + @callback install(basename :: String.t, contents :: binary, previous :: [Path.t]) :: :ok + + @doc """ + Common implementation of installation for archives and escripts. + + Relies on a few callbacks provided by respective callback modules + for customizing certain steps in the installation process. + """ + @spec install(module, OptionParser.argv, Keyword.t) :: boolean + def install(module, argv, switches) do + {opts, args} = OptionParser.parse!(argv, strict: switches) + + install_spec = + case parse_args(args, opts) do + {:error, message} -> Mix.raise message <> "\n\n" <> usage(module) + install_spec -> install_spec + end + + case module.check_install_spec(install_spec, opts) do + :ok -> :noop + {:error, message} -> Mix.raise message <> "\n\n" <> usage(module) + end + + case install_spec do + {:fetcher, dep_spec} -> + if opts[:sha512] do + Mix.raise "--sha512 is not supported when installing from git/github/hex\n\n" <> usage(module) + end + + fetch dep_spec, fn _ -> + local_install(module, module.build(install_spec, opts), opts) + end + + {path_or_url, src} when path_or_url in [:local, :url] -> + local_install(module, src, opts) + + :project -> + local_install(module, module.build(install_spec, opts), opts) + end + end + + defp task(module) do + Mix.Utils.module_name_to_command(module, 2) + end + + defp usage(module) do + "For more information run \"mix help #{task(module)}\"" + end + + defp local_path?(url_or_path) do + File.regular?(url_or_path) + end + + defp file_url?(url_or_path) do + URI.parse(url_or_path).scheme in ["http", "https"] + end + + defp local_install(module, src, opts) do + basename = Path.basename(URI.parse(src).path) + previous_files = module.find_previous_versions(basename) + + if opts[:force] || should_install?(src, previous_files) do + case Mix.Utils.read_path(src, opts) do + {:ok, binary} -> + module.install(basename, binary, previous_files) + + :badpath -> + Mix.raise "Expected #{inspect src} to be a URL or a local file path" + + {:local, message} -> + Mix.raise message + + {kind, message} when kind in [:remote, :checksum] -> + Mix.raise """ + #{message} + + Could not run #{task(module)} for: + + #{src} + + Please download the contents above manually to your current directory and run: + + mix #{task(module)} ./#{basename} + """ + end + + true + else + false + end + end + + defp should_install?(src, previous_files) do + message = case previous_files do + [] -> + "Are you sure you want to install #{inspect src}?" + [file] -> + "Found existing entry: #{file}\n" <> + "Are you sure you want to replace it with #{inspect src}?" + files -> + "Found existing entries: #{Enum.map_join(files, ", ", &Path.basename/1)}\n" <> + "Are you sure you want to replace them with #{inspect src}?" + end + Mix.shell.yes?(message) + end + + @doc """ + Receives `argv` and `opts` from options parsing and returns an `install_spec`. + """ + @spec parse_args([String.t], Keyword.t) :: install_spec + def parse_args(argv, opts) + + def parse_args([], _opts) do + :project + end + + def parse_args([url_or_path], _opts) do + cond do + local_path?(url_or_path) -> {:local, url_or_path} + file_url?(url_or_path) -> {:url, url_or_path} + true -> {:error, "Expected #{inspect url_or_path} to be a URL or a local file path"} + end + end + + def parse_args(["github" | rest], opts) do + [repo | rest] = rest + url = "https://github.com/#{repo}.git" + parse_args(["git", url] ++ rest, opts) + end + + def parse_args(["git", url], opts) do + parse_args(["git", url, "branch", "master"], opts) + end + + def parse_args(["git", url, ref_type, ref], opts) do + case ref_to_config(ref_type, ref) do + {:error, error} -> + {:error, error} + + git_config -> + git_opts = git_config ++ [git: url, submodules: opts[:submodules]] + app_name = + if opts[:app] do + opts[:app] + else + "new package" + end + + {:fetcher, {String.to_atom(app_name), git_opts}} + end + end + + def parse_args(["git" | [_url | rest]], _opts) do + {:error, "received invalid git checkout spec: #{Enum.join(rest, " ")}"} + end + + def parse_args(["hex", package_name], opts) do + parse_args(["hex", package_name, ">= 0.0.0"], opts) + end + + def parse_args(["hex", package_name, version], opts) do + app_name = + if opts[:app] do + opts[:app] + else + package_name + end + + {:fetcher, {String.to_atom(app_name), version, hex: String.to_atom(package_name)}} + end + + def parse_args(["hex" | [_package_name | rest]], _opts) do + {:error, "received invalid Hex package spec: #{Enum.join(rest, " ")}"} + end + + defp ref_to_config("branch", branch), do: [branch: branch] + + defp ref_to_config("tag", tag), do: [tag: tag] + + defp ref_to_config("ref", ref), do: [ref: ref] + + defp ref_to_config(ref_type, _) do + {:error, "expected one of \"branch\", \"tag\", or \"ref\". Got: \"#{ref_type}\""} + end + + @doc """ + A common implementation for uninstalling archives and scripts. + """ + @spec uninstall(Path.t, String.t, OptionParser.argv) :: Path.t | nil + def uninstall(root, listing, argv) do + {_, argv, _} = OptionParser.parse(argv) + + if name = List.first(argv) do + path = Path.join(root, name) + cond do + not File.exists?(path) -> + Mix.shell.error "Could not find a local artifact named #{inspect name}. We found:" + Mix.Task.rerun(listing) + nil + should_uninstall?(path) -> + File.rm_rf!(path) + path + true -> + nil + end + else + Mix.raise "No argument was given to uninstall command" + end + end + + defp should_uninstall?(path) do + Mix.shell.yes?("Are you sure you want to uninstall #{path}?") + end + + @doc """ + Fetches `dep_spec` with `in_fetcher` and then runs `in_package`. + + Generates a new mix project in a temporary directory with the given `dep_spec` + added to a mix.exs. Then, `in_fetcher` is executed in the fetcher project. By + default, this fetches the dependency, but you can provide an `in_fetcher` + during test or for other purposes. After the `in_fetcher` is executed, + `in_package` is executed in the now (presumably) fetched package, with the + package's config overridden with the deps_path and lockfile of the fetcher + package. Also, the Mix env is set to :prod. + """ + @spec fetch(tuple, ((atom) -> any), ((atom) -> any)) :: any + def fetch(dep_spec, in_fetcher \\ &in_fetcher/1, in_package) do + with_tmp_dir fn tmp_path -> + File.mkdir_p!(tmp_path) + + File.write! Path.join(tmp_path, "mix.exs"), """ + defmodule Mix.Local.Installer.Fetcher.Mixfile do + use Mix.Project + + def project do + [app: Mix.Local.Installer.Fetcher, + version: "1.0.0", + deps: [#{inspect dep_spec}]] + end + end + """ + + with_mix_env_prod fn -> + Mix.Project.in_project(Mix.Local.Installer.Fetcher, tmp_path, in_fetcher) + + package_name = elem(dep_spec, 0) + package_name_string = Atom.to_string(package_name) + package_path = Path.join([tmp_path, "deps", package_name_string]) + post_config = [ + deps_path: Path.join(tmp_path, "deps"), + lockfile: Path.join(tmp_path, "mix.lock") + ] + + Mix.Project.in_project(package_name, package_path, post_config, in_package) + end + end + after + :code.purge(Mix.Local.Installer.Fetcher) + :code.delete(Mix.Local.Installer.Fetcher) + end + + defp in_fetcher(_mixfile) do + Mix.Task.run("deps.get", []) + end + + defp with_tmp_dir(fun) do + unique = :crypto.strong_rand_bytes(4) |> Base.url_encode64(padding: false) + tmp_path = Path.join(System.tmp_dir!(), "mix-local-installer-fetcher-" <> unique) + + try do + fun.(tmp_path) + after + File.rm_rf(tmp_path) + end + end + + defp with_mix_env_prod(fun) do + previous_env = Mix.env() + + try do + Mix.env(:prod) + fun.() + after + Mix.env(previous_env) + end + end +end diff --git a/lib/mix/lib/mix/project.ex b/lib/mix/lib/mix/project.ex index 3ff1cf7bac1..09d4e63a05f 100644 --- a/lib/mix/lib/mix/project.ex +++ b/lib/mix/lib/mix/project.ex @@ -1,29 +1,90 @@ defmodule Mix.Project do @moduledoc """ - Defines and manipulate Mix projects. + Defines and manipulates Mix projects. - In order to configure Mix, a developer needs to use - `Mix.Project` in a module and define a function named - `project` that returns a keyword list with configuration. + A Mix project is defined by calling `use Mix.Project` in a module, usually + placed in `mix.exs`: - defmodule MyApp do + defmodule MyApp.Mixfile do use Mix.Project def project do [app: :my_app, - vsn: "0.6.0"] + version: "0.6.0"] end end - After being defined, the configuration for this project can be read - as `Mix.Project.config/0`. Notice that `config/0` won't fail if a - project is not defined; this allows many mix tasks to work - even without a project. + ## Configuration - In case the developer needs a project or wants to access a special - function in the project, he/she can call `Mix.Project.get!/0` - which fails with `Mix.NoProjectError` in case a project is not + In order to configure Mix, the module that `use`s `Mix.Project` should export + a `project/0` function that returns a keyword list representing configuration + for the project. + + This configuration can be read using `Mix.Project.config/0`. Note that + `config/0` won't fail if a project is not defined; this allows many Mix tasks + to work without a project. + + If a task requires a project to be defined or needs to access a + special function within the project, the task can call `Mix.Project.get!/0` + which fails with `Mix.NoProjectError` in the case a project is not defined. + + There isn't a comprehensive list of all the options that can be returned by + `project/0` since many Mix tasks define their own options that they read from + this configuration. For example, look at the "Configuration" section in the + documentation for the `Mix.Tasks.Compile` task. + + These are a few options that are not used by just one Mix task (and will thus + be documented here): + + * `:build_per_environment` - if `true`, builds will be *per-environment*. If + `false`, builds will go in `_build/shared` regardless of the Mix + environment. Defaults to `true`. + + * `:aliases` - a list of task aliases. For more information, check out the + "Aliases" section in the documentation for the `Mix` module. Defaults to + `[]`. + + * `:config_path` - a string representing the path of the main config + file. See `config_files/0` for more information. Defaults to + `"config/config.exs"`. + + * `:default_task` - a string representing the default task to be run by + `mix` when no task is specified. Defaults to `"run"`. + + * `:deps` - a list of dependencies of this project. Refer to the + documentation for the `Mix.Tasks.Deps` task for more information. Defaults + to `[]`. + + * `:deps_path` - directory where dependencies are stored. Also see + `deps_path/1`. Defaults to `"deps"`. + + * `:lockfile` - the name of the lockfile used by the `mix deps.*` family of + tasks. Defaults to `"mix.lock"`. + + * `:preferred_cli_env` - a keyword list of `{task, env}` tuples where `task` + is the task name as an atom (for example, `:"deps.get"`) and `env` is the + preferred environment (for example, `:test`). This option overrides what + specified by the tasks with the `@preferred_cli_env` attribute (see the + docs for `Mix.Task`). Defaults to `[]`. + + For more options, keep an eye on the documentation for single Mix tasks; good + examples are the `Mix.Tasks.Compile` task and all the specific compiler tasks + (such as `Mix.Tasks.Compile.Elixir` or `Mix.Tasks.Compile.Erlang`). + + Note that sometimes the same configuration option is mentioned in the + documentation for different tasks; this is just because it's common for many + tasks to read and use the same configuration option (for example, + `:erlc_paths` is used by `mix compile.erlang`, `mix compile.yecc`, and other + tasks). + + ## Erlang projects + + Mix can be used to manage Erlang projects that don't have any Elixir code. To + ensure Mix tasks work correctly for an Erlang project, `language: :erlang` has + to be part of the configuration returned by `project/0`. This setting also + makes sure Elixir is not added as a dependency to the generated `.app` file or + to the escript generated with `mix escript.build`, and so on. """ @doc false @@ -33,8 +94,6 @@ defmodule Mix.Project do end end - @private_config [:build_path, :app_path] - # Invoked after each Mix.Project is compiled. @doc false def __after_compile__(env, _binary) do @@ -44,20 +103,19 @@ defmodule Mix.Project do # Push a project onto the project stack. # Only the top of the stack can be accessed. @doc false - def push(atom, file \\ nil) when is_atom(atom) do + def push(atom, file \\ nil, app \\ nil) when is_atom(atom) do file = file || (atom && List.to_string(atom.__info__(:compile)[:source])) - config = default_config + config = ([app: app] ++ default_config()) |> Keyword.merge(get_project_config(atom)) - |> Keyword.drop(@private_config) case Mix.ProjectStack.push(atom, config, file) do :ok -> :ok {:error, other} when is_binary(other) -> Mix.raise "Trying to load #{inspect atom} from #{inspect file}" <> - " but another project with the same name was already defined at #{inspect other}" + " but another project with the same name was already defined at #{inspect other}" end end @@ -70,21 +128,25 @@ defmodule Mix.Project do # The configuration that is pushed down to dependencies. @doc false def deps_config(config \\ config()) do - [build_path: build_path(config), + [build_embedded: config[:build_embedded], build_per_environment: config[:build_per_environment], - deps_path: deps_path(config)] + consolidate_protocols: false, + deps_path: deps_path(config), + env_path: build_path(config)] end @doc """ Retrieves the current project if there is one. - Otherwise `nil` is returned. It may happen in cases - there is no mixfile in the current directory. + If there is no current project, `nil` is returned. This + may happen in casesthere is no mixfile in the current + directory. - If you expect a project to be defined, i.e. it is a + If you expect a project to be defined, i.e., it is a requirement of the current task, you should call `get!/0` instead. """ + @spec get() :: module | nil def get do case Mix.ProjectStack.peek do %{name: name} -> name @@ -98,32 +160,34 @@ defmodule Mix.Project do This is usually called by tasks that need additional functions on the project to be defined. Since such tasks usually depend on a project being defined, this - function raises `Mix.NoProjectError` in case no project - is available. + function raises a `Mix.NoProjectError` exception in + case no project is available. """ + @spec get!() :: module | no_return def get! do - get || Mix.raise Mix.NoProjectError, [] + get() || raise Mix.NoProjectError, [] end @doc """ Returns the project configuration. If there is no project defined, it still returns a keyword - list with default values. This allows many mix tasks to work + list with default values. This allows many Mix tasks to work without the need for an underlying project. Note this configuration is cached once the project is - pushed into the stack. Calling it multiple times won't + pushed onto the stack. Calling it multiple times won't cause it to be recomputed. - Do not use `Mix.Project.config` to rely on runtime configuration. + Do not use `Mix.Project.config/0` to find the runtime configuration. Use it only to configure aspects of your project (like compilation directories) and not your application runtime. """ + @spec config() :: Keyword.t def config do case Mix.ProjectStack.peek do %{config: config} -> config - _ -> default_config + _ -> default_config() end end @@ -133,33 +197,103 @@ defmodule Mix.Project do This function is usually used in compilation tasks to trigger a full recompilation whenever such configuration files change. - By default it includes the mix.exs file, the lock manifest and - all config files in the `config` directory. + It returns the `mix.exs` file, the lock manifest, and all config + files in the `config` directory that do not start with a trailing + period (for example, `.my_config.exs`). """ + @spec config_files() :: [Path.t] def config_files do [Mix.Dep.Lock.manifest] ++ case Mix.ProjectStack.peek do %{config: config, file: file} -> configs = - (config[:config_path] || "config/config.exs") + config[:config_path] |> Path.dirname - |> Path.join("*.*") + |> Path.join("**/*.*") |> Path.wildcard |> Enum.reject(&String.starts_with?(Path.basename(&1), ".")) - [file|configs] + [file | configs] _ -> [] end end @doc """ - Returns `true` if project is an umbrella project. + Returns `true` if `config` is the configuration for an umbrella project. + + When called with no arguments, tells whether the current project is + an umbrella project. """ - def umbrella? do + @spec umbrella?() :: boolean + def umbrella?(config \\ config()) do config[:apps_path] != nil end @doc """ + Returns a map with the umbrella child applications paths. + + These paths are based on the `:apps_path` and `:apps` configurations. + + If the given project configuration identifies an umbrella project, the return + value is a map of `app => path` where `app` is a child app of the umbrella and + `path` is its path relative to the root of the umbrella project. + + If the given project configuration does not identify an umbrella project, + `nil` is returned. + + ## Examples + + Mix.Project.apps_paths() + #=> %{my_app1: "apps/my_app1", my_app2: "apps/my_app2"} + + """ + @spec apps_paths() :: %{atom => Path.t} | nil + def apps_paths(config \\ config()) do + if apps_path = config[:apps_path] do + key = {:apps_paths, Mix.Project.get!} + Mix.ProjectStack.read_cache(key) || + Mix.ProjectStack.write_cache(key, + config[:apps] |> umbrella_apps(apps_path) |> to_apps_paths(apps_path)) + end + end + + defp umbrella_apps(nil, apps_path) do + case File.ls(apps_path) do + {:ok, apps} -> Enum.map(apps, &String.to_atom/1) + {:error, _} -> [] + end + end + defp umbrella_apps(apps, _apps_path) when is_list(apps) do + apps + end + + defp to_apps_paths(apps, apps_path) do + for app <- apps, + path = path_with_mix_exs_otherwise_warn(app, apps_path), + do: {app, path}, + into: %{} + end + + defp path_with_mix_exs_otherwise_warn(app, apps_path) do + path = Path.join(apps_path, Atom.to_string(app)) + cond do + File.regular?(Path.join(path, "mix.exs")) -> + path + + File.dir?(path) -> + Mix.shell.error "warning: path #{inspect Path.relative_to_cwd(path)} is a directory but " <> + "it has no mix.exs. Mix won't consider this directory as part of your " <> + "umbrella application. Please add a \"mix.exs\" or set the \":apps\" key " <> + "in your umbrella configuration with all relevant apps names as atoms" + nil + + true -> + # If it is a stray file, we just ignore it. + nil + end + end + + @doc ~S""" Runs the given `fun` inside the given project. This function changes the current working directory and @@ -168,17 +302,35 @@ defmodule Mix.Project do A `post_config` can be passed that will be merged into the project configuration. + + `fun` is called with the module name of the given `Mix.Project`. + The return value of this function is the return value of `fun`. + + ## Examples + + Mix.Project.in_project :my_app, "/path/to/my_app", fn module -> + "Mixfile is: #{inspect module}" + end + #=> "Mixfile is: MyApp.Mixfile" + """ + @spec in_project(atom, Path.t, Keyword.t, (module -> result)) :: result when result: term def in_project(app, path, post_config \\ [], fun) def in_project(app, ".", post_config, fun) do - cached = load_project(app, post_config) - result = try do + cached = try do + load_project(app, post_config) + rescue + any -> + Mix.shell.error "Error while loading project #{inspect app} at #{File.cwd!}" + reraise any, System.stacktrace + end + + try do fun.(cached) after Mix.Project.pop end - result end def in_project(app, path, post_config, fun) do @@ -188,7 +340,9 @@ defmodule Mix.Project do end @doc """ - Returns the path to store dependencies for this project. + Returns the path where dependencies are stored for the given project. + + If no configuration is given, the one for the current project is used. The returned path will be expanded. @@ -198,12 +352,31 @@ defmodule Mix.Project do #=> "/path/to/project/deps" """ + @spec deps_path(Keyword.t) :: Path.t def deps_path(config \\ config()) do Path.expand config[:deps_path] end @doc """ - Returns the build path for this project. + Returns the full path of all dependencies as a map. + + ## Examples + + Mix.Project.deps_paths + #=> %{foo: "deps/foo", bar: "custom/path/dep"} + + """ + @spec deps_paths() :: %{optional(atom) => Path.t} + def deps_paths do + Enum.reduce Mix.Dep.cached(), %{}, fn + %{app: app, opts: opts}, acc -> Map.put acc, app, opts[:dest] + end + end + + @doc """ + Returns the build path for the given project. + + If no configuration is given, the one for the current project is used. The returned path will be expanded. @@ -212,8 +385,8 @@ defmodule Mix.Project do Mix.Project.build_path #=> "/path/to/project/_build/shared" - If :build_per_environment is set to true (the default), it - will create a new build per environment: + If `:build_per_environment` is set to `true`, it will create a new build per + environment: Mix.env #=> :dev @@ -221,20 +394,31 @@ defmodule Mix.Project do #=> "/path/to/project/_build/dev" """ + @spec build_path(Keyword.t) :: Path.t def build_path(config \\ config()) do - config[:build_path] || if config[:build_per_environment] do - Path.expand("_build/#{Mix.env}") - else - Path.expand("_build/shared") + config[:env_path] || env_path(config) + end + + defp env_path(config) do + build = config[:build_path] || "_build" + + case config[:build_per_environment] do + true -> + Path.expand("#{build}/#{Mix.env}") + false -> + Path.expand("#{build}/shared") + other -> + Mix.raise "The :build_per_environment option should be a boolean, got: #{inspect(other)}" end end @doc """ - The path to store manifests. + Returns the path where manifests are stored. - By default they are stored in the app path - inside the build directory but it may be changed - in future releases. + By default they are stored in the app path inside + the build directory. Umbrella applications have + the manifest path set to the root of the build directory. + Directories may be changed in future releases. The returned path will be expanded. @@ -244,8 +428,14 @@ defmodule Mix.Project do #=> "/path/to/project/_build/shared/lib/app" """ + @spec manifest_path(Keyword.t) :: Path.t def manifest_path(config \\ config()) do - app_path(config) + config[:app_path] || + if app = config[:app] do + Path.join([build_path(config), "lib", Atom.to_string(app)]) + else + build_path(config) + end end @doc """ @@ -259,12 +449,13 @@ defmodule Mix.Project do #=> "/path/to/project/_build/shared/lib/app" """ + @spec app_path(Keyword.t) :: Path.t def app_path(config \\ config()) do config[:app_path] || cond do app = config[:app] -> Path.join([build_path(config), "lib", Atom.to_string(app)]) config[:apps_path] -> - raise "Trying to access app_path for an umbrella project but umbrellas have no app" + raise "trying to access Mix.Project.app_path for an umbrella project but umbrellas have no app" true -> Mix.raise "Cannot access build without an application name, " <> "please ensure you are in a directory with a mix.exs file and it defines " <> @@ -273,28 +464,55 @@ defmodule Mix.Project do end @doc """ - Returns the paths this project compiles to. + Returns the paths the given project compiles to. + + If no configuration is given, the one for the current project will be used. The returned path will be expanded. ## Examples Mix.Project.compile_path - #=> "/path/to/project/_build/shared/lib/app/priv" + #=> "/path/to/project/_build/shared/lib/app/ebin" """ + @spec compile_path(Keyword.t) :: Path.t def compile_path(config \\ config()) do Path.join(app_path(config), "ebin") end @doc """ - Builds the project structure for the current application. + Returns the path where protocol consolidations are stored. + + The returned path will be expanded. + + ## Examples + + Mix.Project.consolidation_path + #=> "/path/to/project/_build/dev/consolidated" + + """ + def consolidation_path(config \\ config()) do + Path.join(build_path(config), "consolidated") + end + + @doc """ + Compiles the given project. + """ + @spec compile([term], Keyword.t) :: term + def compile(args, _config \\ []) do + Mix.Task.run "compile", args + end + + @doc """ + Builds the project structure for the given application. ## Options * `:symlink_ebin` - symlink ebin instead of copying it """ + @spec build_structure(Keyword.t, Keyword.t) :: :ok def build_structure(config \\ config(), opts \\ []) do app = app_path(config) File.mkdir_p!(app) @@ -302,64 +520,100 @@ defmodule Mix.Project do source = Path.expand("ebin") target = Path.join(app, "ebin") - cond do + _ = cond do opts[:symlink_ebin] -> - Mix.Utils.symlink_or_copy(source, target) + _ = symlink_or_copy(config, source, target) match?({:ok, _}, :file.read_link(target)) -> - File.rm_rf!(target) + _ = File.rm_rf!(target) File.mkdir_p!(target) true -> File.mkdir_p!(target) end - Mix.Utils.symlink_or_copy(Path.expand("include"), Path.join(app, "include")) - Mix.Utils.symlink_or_copy(Path.expand("priv"), Path.join(app, "priv")) + _ = symlink_or_copy(config, Path.expand("include"), Path.join(app, "include")) + _ = symlink_or_copy(config, Path.expand("priv"), Path.join(app, "priv")) :ok end + defp symlink_or_copy(config, source, target) do + if config[:build_embedded] do + if File.exists?(source) do + File.rm_rf!(target) + File.cp_r!(source, target) + end + else + Mix.Utils.symlink_or_copy(source, target) + end + end + @doc """ - Returns all load paths for this project. + Ensures the project structure for the given project exists. + + In case it does exist, it is a no-op. Otherwise, it is built. + """ + @spec ensure_structure(Keyword.t, Keyword.t) :: :ok + def ensure_structure(config \\ config(), opts \\ []) do + if File.exists?(app_path(config)) do + :ok + else + build_structure(config, opts) + end + end + + @doc """ + Returns all load paths for the given project. """ - def load_paths do - if umbrella? do + @spec load_paths(Keyword.t) :: [Path.t] + def load_paths(config \\ config()) do + if umbrella?(config) do [] else - [compile_path] + [compile_path(config)] end end # Loads mix.exs in the current directory or loads the project from the - # mixfile cache and pushes the project to the project stack. + # mixfile cache and pushes the project onto the project stack. defp load_project(app, post_config) do Mix.ProjectStack.post_config(post_config) - if cached = Mix.ProjectStack.read_cache(app) do + if cached = Mix.ProjectStack.read_cache({:app, app}) do {project, file} = cached - push(project, file) + push(project, file, app) project else file = Path.expand("mix.exs") - old_proj = get - - if File.regular?(file) do - Code.load_file(file) - end - - new_proj = get - - if old_proj == new_proj do - file = "nofile" - new_proj = nil - push new_proj, file - end + old_proj = get() + + {new_proj, file} = + if File.regular?(file) do + try do + Code.compiler_options(relative_paths: false) + _ = Code.load_file(file) + get() + after + Code.compiler_options(relative_paths: true) + else + ^old_proj -> Mix.raise "Could not find a Mix project at #{file}" + new_proj -> {new_proj, file} + end + else + push(nil, file, app) + {nil, "nofile"} + end - Mix.ProjectStack.write_cache(app, {new_proj, file}) + Mix.ProjectStack.write_cache({:app, app}, {new_proj, file}) new_proj end end defp default_config do - [build_per_environment: true, + [aliases: [], + build_embedded: false, + build_per_environment: true, + build_scm: Mix.SCM.Path, + config_path: "config/config.exs", + consolidate_protocols: true, default_task: "run", deps: [], deps_path: "deps", @@ -368,9 +622,11 @@ defmodule Mix.Project do erlc_include_path: "include", erlc_options: [:debug_info], lockfile: "mix.lock", - preferred_cli_env: %{"test" => :test}] + preferred_cli_env: [], + start_permanent: false] end + @private_config [:app_path, :build_scm, :env_path] defp get_project_config(nil), do: [] - defp get_project_config(atom), do: atom.project + defp get_project_config(atom), do: atom.project |> Keyword.drop(@private_config) end diff --git a/lib/mix/lib/mix/project_stack.ex b/lib/mix/lib/mix/project_stack.ex index ba8344055e9..8f4be415635 100644 --- a/lib/mix/lib/mix/project_stack.ex +++ b/lib/mix/lib/mix/project_stack.ex @@ -1,16 +1,15 @@ defmodule Mix.ProjectStack do - # Keeps the project stack. @moduledoc false @timeout 30_000 @typep file :: binary @typep config :: Keyword.t - @typep project :: {module, config, file} + @typep project :: %{name: module, config: config, file: file} @spec start_link :: {:ok, pid} def start_link() do - initial = %{stack: [], post_config: [], cache: HashDict.new} + initial = %{stack: [], post_config: [], cache: %{}} Agent.start_link fn -> initial end, name: __MODULE__ end @@ -19,27 +18,45 @@ defmodule Mix.ProjectStack do get_and_update fn %{stack: stack} = state -> # Consider the first children to always have io_done # because we don't need to print anything unless another - # project talks ahold of the shell. + # project takes ahold of the shell. io_done? = stack == [] config = Keyword.merge(config, state.post_config) project = %{name: module, config: config, file: file, pos: length(stack), - recursing?: false, io_done: io_done?, tasks: HashSet.new} + recursing?: false, io_done: io_done?, configured_applications: []} cond do file = find_project_named(module, stack) -> {{:error, file}, state} true -> - {:ok, %{state | post_config: [], stack: [project|state.stack]}} + {:ok, %{state | post_config: [], stack: [project | state.stack]}} end end end + @spec configured_applications([atom]) :: :ok + def configured_applications(apps) do + cast fn state -> + update_in state.stack, fn + [h | t] -> [%{h | configured_applications: apps} | t] + [] -> [] + end + end + end + + @spec configured_applications() :: [atom] + def configured_applications() do + get fn + %{stack: [h | _]} -> h.configured_applications + %{stack: []} -> [] + end + end + @spec pop() :: project | nil def pop do get_and_update fn %{stack: stack} = state -> case stack do - [h|t] -> {take(h), %{state | stack: t}} + [h | t] -> {take(h), %{state | stack: t}} [] -> {nil, state} end end @@ -49,12 +66,31 @@ defmodule Mix.ProjectStack do def peek do get fn %{stack: stack} -> case stack do - [h|_] -> take(h) + [h | _] -> take(h) [] -> nil end end end + @doc """ + Runs the given function in the recursing root. + """ + def root(fun) do + {top, file} = + get_and_update fn %{stack: stack} = state -> + {top, [mid | bottom]} = Enum.split_while(stack, &(not &1.recursing?)) + {{top, mid.file}, %{state | stack: [%{mid | recursing?: false} | bottom]}} + end + + try do + File.cd! Path.dirname(file), fun + after + cast fn %{stack: [mid | bottom]} = state -> + %{state | stack: top ++ [%{mid | recursing?: true} | bottom]} + end + end + end + @spec post_config(config) :: :ok def post_config(config) do cast fn state -> @@ -62,18 +98,18 @@ defmodule Mix.ProjectStack do end end - @spec print_app?() :: boolean - def print_app? do + @spec printable_app_name() :: atom | nil + def printable_app_name do get_and_update fn %{stack: stack} = state -> case stack do [] -> - {false, state} - [%{io_done: true}|_] -> - {false, state} - [h|t] -> + {nil, state} + [%{io_done: true} | _] -> + {nil, state} + [h | t] -> h = %{h | io_done: true} t = Enum.map(t, &%{&1 | io_done: false}) - {has_app?(h), %{state | stack: [h|t]}} + {h.config[:app], %{state | stack: [h | t]}} end end end @@ -86,36 +122,33 @@ defmodule Mix.ProjectStack do end @doc """ - Enables the recursion for the project at the top of the stack. - Returns true if recursion was enabled or false if the project - already had recursion enabled or there is no project in the stack. + Enables the recursion for the project at the top of the stack + during the given function. """ - @spec enable_recursion :: boolean - def enable_recursion do - get_and_update fn %{stack: stack} = state -> - case stack do - [h|t] -> - {not h.recursing?, %{state | stack: [%{h | recursing?: true}|t]}} - _ -> - {false, state} + @spec recur((... -> result)) :: result when result: var + def recur(fun) do + cast fn %{stack: [h | t]} = state -> + %{state | stack: [%{h | recursing?: true} | t]} + end + + try do + fun.() + after + cast fn %{stack: [h | t]} = state -> + %{state | stack: [%{h | recursing?: false} | t]} end end end @doc """ - Disables the recursion for the project in the stack. - Returns true if recursion was disabled or false if there - is no project or recursion was not enabled. + Returns the module that started the recursion. + + Returns nil if not recursive. """ - @spec disable_recursion :: boolean - def disable_recursion do - get_and_update fn %{stack: stack} = state -> - case stack do - [h|t] -> - {h.recursing?, %{state | stack: [%{h | recursing?: false}|t]}} - _ -> - {false, state} - end + @spec recursing :: module | nil + def recursing do + get fn %{stack: stack} -> + Enum.find_value stack, & &1.recursing? and &1.name end end @@ -129,14 +162,15 @@ defmodule Mix.ProjectStack do @spec write_cache(term, term) :: :ok def write_cache(key, value) do cast fn state -> - %{state | cache: Dict.put(state.cache, key, value)} + %{state | cache: Map.put(state.cache, key, value)} end + value end @spec clear_cache :: :ok def clear_cache do cast fn state -> - %{state | cache: HashDict.new} + %{state | cache: %{}} end end @@ -151,10 +185,6 @@ defmodule Mix.ProjectStack do Map.take(h, [:name, :config, :file, :pos]) end - defp has_app?(%{config: config}) do - config[:app] - end - defp get_and_update(fun) do Agent.get_and_update __MODULE__, fun, @timeout end diff --git a/lib/mix/lib/mix/public_key.ex b/lib/mix/lib/mix/public_key.ex new file mode 100644 index 00000000000..396f4af5a9b --- /dev/null +++ b/lib/mix/lib/mix/public_key.ex @@ -0,0 +1,59 @@ +defmodule Mix.PublicKey do + @moduledoc false + + @in_memory_key """ + -----BEGIN PUBLIC KEY----- + MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAslPz1mAfyAvRv8W8xOdv + HQMbDJkDKfRhsL4JBGwGH7qw0xh+TbaUlNaM3pF+i8VUjS/4FeXjT/OAUEAHu5Y2 + rBVlx00QcH8Dpbyf+H73XiCs0MXnTSecqDgzx6i6NMi8knklHT7yHySHtuuPmPuN + Po8QTKolCKftwPE/iNDeyZfwufd+hTCoCQdoTVcB01SElfNtvKRtoKbx35q80IPr + rOcGsALmr58+bWqCTY/51kFeRxzrPJ5LdcLU/AebyWddD4IUfPDxk16jTiCagMWA + JPSwo8NUrWDIBbD+rEUp06y0ek276rG5Tzm/3Bma56RN/u6nAqBTBE8F2Hu2QBKj + lQIDAQAB + -----END PUBLIC KEY----- + """ + + @doc """ + Returns the filesystem path for public keys. + """ + def public_keys_path, do: Path.join(Mix.Utils.mix_home, "public_keys") + + @doc """ + Returns all public keys as a list. + """ + def public_keys do + path = public_keys_path() + + [{"in-memory public key for Elixir v#{System.version}", @in_memory_key}] ++ + case File.ls(path) do + {:ok, keys} -> Enum.map(keys, &{&1, File.read!(Path.join(path, &1))}) + {:error, _} -> [] + end + end + + @doc """ + Decodes a public key and raises if the key is invalid. + """ + def decode!(id, key) do + [rsa_public_key] = :public_key.pem_decode(key) + :public_key.pem_entry_decode(rsa_public_key) + rescue + _ -> + Mix.raise """ + Could not decode public key: #{id}. The public key contents are shown below. + + #{key} + + Public keys must be valid and be in the PEM format + """ + end + + @doc """ + Verifies the given binary has the proper signature using the system public keys. + """ + def verify(binary, hash, signature) do + Enum.any? public_keys(), fn {id, key} -> + :public_key.verify binary, hash, signature, decode!(id, key) + end + end +end diff --git a/lib/mix/lib/mix/rebar.ex b/lib/mix/lib/mix/rebar.ex index 3f2b2931029..96f3c482206 100644 --- a/lib/mix/lib/mix/rebar.ex +++ b/lib/mix/lib/mix/rebar.ex @@ -2,34 +2,42 @@ defmodule Mix.Rebar do @moduledoc false @doc """ - Returns the path supposed to host the local copy of rebar. + Returns the path supposed to host the local copy of `rebar`. """ - def local_rebar_path, do: Path.join(Mix.Utils.mix_home, "rebar") + def local_rebar_path(manager) do + Path.join(Mix.Utils.mix_home, Atom.to_string(manager)) + end @doc """ - Returns the path to the global copy of rebar, if one exists. + Returns the path to the global copy of `rebar`, defined by the + environment variables `MIX_REBAR` or `MIX_REBAR3`. """ - def global_rebar_cmd do - wrap_cmd System.find_executable("rebar") + def global_rebar_cmd(manager) do + env = manager_to_env(manager) + if cmd = System.get_env(env) do + wrap_cmd(cmd) + end end @doc """ - Returns the path to the local copy of rebar, if one exists. + Returns the path to the local copy of `rebar`, if one exists. """ - def local_rebar_cmd do - rebar = local_rebar_path - wrap_cmd(if File.regular?(rebar), do: rebar) + def local_rebar_cmd(manager) do + cmd = local_rebar_path(manager) + if File.regular?(cmd) do + wrap_cmd(cmd) + end end @doc """ - Returns the path to the available rebar command. + Returns the path to the available `rebar` command. """ - def rebar_cmd do - global_rebar_cmd || local_rebar_cmd + def rebar_cmd(manager) do + global_rebar_cmd(manager) || local_rebar_cmd(manager) end @doc """ - Loads the rebar.config and evaluates rebar.config.script if it + Loads `rebar.config` and evaluates `rebar.config.script` if it exists in the given directory. """ def load_config(dir) do @@ -43,7 +51,7 @@ defmodule Mix.Rebar do [] {:error, error} -> reason = :file.format_error(error) - Mix.raise "Error consulting rebar config #{config_path}: #{reason}" + Mix.raise "Error consulting Rebar config #{inspect config_path}: #{reason}" end if File.exists?(script_path) do @@ -54,12 +62,34 @@ defmodule Mix.Rebar do end @doc """ - Parses the dependencies in given rebar.config to Mix's dependency format. + Serializes a Rebar config to a term file. + """ + def serialize_config(config) do + Enum.map(config, &[:io_lib.print(&1) | ".\n"]) + end + + @doc """ + Updates Rebar configuration to be more suitable for dependencies. + + Drops `warnings_as_errors` from `erl_opts`. + """ + def dependency_config(config) do + Enum.map(config, fn + {:erl_opts, opts} -> + {:erl_opts, List.delete(opts, :warnings_as_errors)} + other -> + other + end) + end + + @doc """ + Parses the dependencies in given `rebar.config` to Mix's dependency format. """ def deps(config) do + # We don't have to handle rebar3 profiles because dependencies + # are always in the default profile which cannot be customized if deps = config[:deps] do - deps_dir = config[:deps_dir] || "deps" - Enum.map(deps, &parse_dep(&1, deps_dir)) + Enum.map(deps, &parse_dep/1) else [] end @@ -67,68 +97,95 @@ defmodule Mix.Rebar do @doc """ Runs `fun` for the given config and for each `sub_dirs` in the - given rebar config. + given Rebar config. + + `sub_dirs` is only supported in Rebar 2. In Rebar 3, the equivalent + to umbrella apps cannot be used as dependencies, so we don't need + to worry about such cases in Mix. """ - def recur(config, fun) when is_binary(config) do - recur(load_config(config), fun) + def recur(config, fun) do + subs = + (config[:sub_dirs] || []) + |> Enum.flat_map(&Path.wildcard(&1)) + |> Enum.filter(&File.dir?(&1)) + |> Enum.flat_map(&recur(load_config(&1), fun)) + + [fun.(config) | subs] end - def recur(config, fun) do - subs = (config[:sub_dirs] || []) - |> Enum.map(&Path.wildcard(&1)) - |> Enum.concat - |> Enum.filter(&File.dir?(&1)) - |> Enum.map(&recur(&1, fun)) - |> Enum.concat + # Translate a rebar dependency declaration to a mix declaration + # From http://www.rebar3.org/docs/dependencies#section-declaring-dependencies + defp parse_dep(app) when is_atom(app) do + {app, ">= 0.0.0"} + end - [fun.(config)|subs] + defp parse_dep({app, req}) when is_list(req) do + {app, List.to_string(req)} end - defp parse_dep({app, req}, deps_dir) do - {app, compile_req(req), [path: Path.join(deps_dir, Atom.to_string(app))]} + defp parse_dep({app, source}) when is_tuple(source) do + parse_dep({app, nil, source, []}) end - defp parse_dep({app, req, source}, deps_dir) do - parse_dep({app, req, source, []}, deps_dir) + defp parse_dep({app, req, source}) do + parse_dep({app, req, source, []}) end - defp parse_dep({app, req, source, opts}, _deps_dir) do + defp parse_dep({app, req, source, opts}) do + source = parse_source(source) + + compile = + if :proplists.get_value(:raw, opts, false), + do: [compile: false], + else: [] + + {app, compile_req(req), source ++ compile} + end + + defp parse_source({:pkg, pkg}) do + [hex: pkg] + end + defp parse_source(source) do [scm, url | source] = Tuple.to_list(source) - mix_opts = [{scm, to_string(url)}] ref = case source do - [""|_] -> [branch: "HEAD"] - [{:branch, branch}|_] -> [branch: to_string(branch)] - [{:tag, tag}|_] -> [tag: to_string(tag)] - [ref|_] -> [ref: to_string(ref)] - _ -> [] + ["" | _] -> [branch: "HEAD"] + [{:branch, branch} | _] -> [branch: to_string(branch)] + [{:tag, tag} | _] -> [tag: to_string(tag)] + [{:ref, ref} | _] -> [ref: to_string(ref)] + [ref | _] -> [ref: to_string(ref)] + _ -> [] end - mix_opts = mix_opts ++ ref - - if :proplists.get_value(:raw, opts, false) do - mix_opts = mix_opts ++ [compile: false] - end - - {app, compile_req(req), mix_opts} + [{scm, to_string(url)}] ++ ref end - defp parse_dep(app, deps_dir) do - parse_dep({app, ".*"}, deps_dir) + defp compile_req(nil) do + ">= 0.0.0" end defp compile_req(req) do - case Regex.compile to_string(req) do - {:ok, re} -> - re - {:error, reason} -> - Mix.raise "Unable to compile version regex: \"#{req}\", #{reason}" + req = List.to_string(req) + + case Version.parse_requirement(req) do + {:ok, _} -> + req + :error -> + case Regex.compile(req) do + {:ok, re} -> + re + {:error, reason} -> + Mix.raise "Unable to compile version regex: #{inspect req}, #{reason}" + end end end + defp manager_to_env(:rebar), do: "MIX_REBAR" + defp manager_to_env(:rebar3), do: "MIX_REBAR3" + defp eval_script(script_path, config) do - script = Path.basename(script_path) |> String.to_char_list + script = Path.basename(script_path) |> String.to_charlist result = File.cd!(Path.dirname(script_path), fn -> :file.script(script, eval_binds(CONFIG: config, SCRIPT: script)) @@ -139,26 +196,66 @@ defmodule Mix.Rebar do config {:error, error} -> reason = :file.format_error(error) - Mix.shell.error("Error evaluating rebar config script #{script_path}: #{reason}") - Mix.shell.error("You may solve this issue by adding rebar as a dependency to your project") - Mix.shell.error("Any dependency defined in the script won't be available " <> + Mix.shell.error("Error evaluating Rebar config script #{script_path}:#{reason}") + Mix.shell.error("Any dependencies defined in the script won't be available " <> "unless you add them to your Mix project") config end end defp eval_binds(binds) do - Enum.reduce(binds, :erl_eval.new_bindings, fn ({k, v}, binds) -> + Enum.reduce(binds, :erl_eval.new_bindings, fn({k, v}, binds) -> :erl_eval.add_binding(k, v, binds) end) end - defp wrap_cmd(nil), do: nil defp wrap_cmd(rebar) do - if match?({:win32, _}, :os.type) and not String.ends_with?(rebar,".cmd") do - "escript.exe #{rebar}" - else - rebar + cond do + not match?({:win32, _}, :os.type) -> + rebar + String.ends_with?(rebar, ".cmd") -> + "\"#{String.replace(rebar, "/", "\\")}\"" + true -> + "escript.exe \"#{rebar}\"" end end + + @doc """ + Applies the given overrides for app config. + """ + def apply_overrides(app, config, overrides) do + # Inefficient. We want the order we get here though. + config = + Enum.reduce(overrides, config, fn + {:override, overrides}, config -> + Enum.reduce(overrides, config, fn {key, value}, config -> + Keyword.put(config, key, value) + end) + _, config -> + config + end) + + config = + Enum.reduce(overrides, config, fn + {:override, ^app, overrides}, config -> + Enum.reduce(overrides, config, fn {key, value}, config -> + Keyword.put(config, key, value) + end) + _, config -> + config + end) + + config = + Enum.reduce(overrides, config, fn + {:add, ^app, overrides}, config -> + Enum.reduce(overrides, config, fn {key, value}, config -> + old_value = Keyword.get(config, key, []) + Keyword.put(config, key, value ++ old_value) + end) + _, config -> + config + end) + + Keyword.update(config, :overrides, overrides, &(overrides ++ &1)) + end end diff --git a/lib/mix/lib/mix/remote_converger.ex b/lib/mix/lib/mix/remote_converger.ex index e9de940ffa6..c228460d251 100644 --- a/lib/mix/lib/mix/remote_converger.ex +++ b/lib/mix/lib/mix/remote_converger.ex @@ -1,42 +1,46 @@ defmodule Mix.RemoteConverger do @moduledoc false - use Behaviour # A remote converger returns updated dependencies with - # extra information that can be used during mix's converging. + # extra information that can be used during Mix's converging. # Useful for things like external package managers @doc """ - Return `true` if given dependency is handled by + Returns `true` if given dependency is handled by remote converger. """ - defcallback remote?(Mix.Dep.t) :: boolean + @callback remote?(Mix.Dep.t) :: boolean @doc """ - Run the remote converger. + Runs the remote converger. Return updated lock. """ - defcallback converge([Mix.Dep.t], map) :: map + @callback converge([Mix.Dep.t], map) :: map @doc """ Returns child dependencies the converger has for the dependency. This list should filter the loaded children. """ - defcallback deps(Mix.Dep.t, map) :: [atom] + @callback deps(Mix.Dep.t, map) :: [atom] + @doc """ + Called after all convergers have run so that the remote + converger can perform clean up. + """ + @callback post_converge() :: any @doc """ - Get registered remote converger. + Gets registered remote converger. """ def get do - Application.get_env(:mix, :remote_converger) + Mix.State.get(:remote_converger) end @doc """ - Register a remote converger. + Registers a remote converger. """ def register(mod) when is_atom(mod) do - Application.put_env(:mix, :remote_converger, mod) + Mix.State.put(:remote_converger, mod) end end diff --git a/lib/mix/lib/mix/scm.ex b/lib/mix/lib/mix/scm.ex index f83df6ec052..b5d1571c5bc 100644 --- a/lib/mix/lib/mix/scm.ex +++ b/lib/mix/lib/mix/scm.ex @@ -1,22 +1,20 @@ defmodule Mix.SCM do - use Behaviour - - @type opts :: Keyword.t - @moduledoc """ This module provides helper functions and defines the - behaviour required by any SCM used by mix. + behaviour required by any source code manager (SCM) used by Mix. """ + @type opts :: Keyword.t + @doc """ Returns a boolean if the dependency can be fetched or it is meant to be previously available in the filesystem. - Local dependencies (i.e. non fetchable ones) are automatically + Local dependencies (i.e. non-fetchable ones) are automatically recompiled every time the parent project is compiled. """ - defcallback fetchable? :: boolean + @callback fetchable? :: boolean @doc """ Returns a string representing the SCM. This is used @@ -24,7 +22,7 @@ defmodule Mix.SCM do so the amount of information should be concise and easy to spot. """ - defcallback format(opts) :: String.t + @callback format(opts) :: String.t @doc """ Returns a string representing the SCM. This is used @@ -34,7 +32,7 @@ defmodule Mix.SCM do If nil is returned, it means no lock information is available. """ - defcallback format_lock(opts) :: String.t | nil + @callback format_lock(opts) :: String.t | nil @doc """ This behaviour function receives a keyword list of `opts` @@ -47,15 +45,15 @@ defmodule Mix.SCM do Each registered SCM will be asked if they consume this dependency, receiving `[github: "foo/bar"]` as argument. Since this option makes sense for the Git SCM, it will return an update list of options - while other SCMs would simply return nil. + while other SCMs would simply return `nil`. """ - defcallback accepts_options(app :: atom, opts) :: opts | nil + @callback accepts_options(app :: atom, opts) :: opts | nil @doc """ This behaviour function returns a boolean if the dependency is available. """ - defcallback checked_out?(opts) :: boolean + @callback checked_out?(opts) :: boolean @doc """ This behaviour function checks out dependencies. @@ -67,7 +65,7 @@ defmodule Mix.SCM do It must return the current lock. """ - defcallback checkout(opts) :: any + @callback checkout(opts) :: any @doc """ This behaviour function updates dependencies. It may be @@ -79,12 +77,14 @@ defmodule Mix.SCM do It must return the current lock. """ - defcallback update(opts) :: any + @callback update(opts) :: any @doc """ This behaviour function checks the status of the lock. In particular, it checks if the revision stored in the lock - is the same as the repository it is currently in. It may return: + is the same as the repository it is currently in. + + It may return: * `:mismatch` - if the lock doesn't match and we need to simply move to the latest lock @@ -102,37 +102,42 @@ defmodule Mix.SCM do structural check is required. A structural mismatch should always return `:outdated`. """ - defcallback lock_status(opts) :: :mismatch | :outdated | :ok + @callback lock_status(opts) :: :mismatch | :outdated | :nolock | :ok @doc """ - Receives two options and must return true if they refer to the + Receives two options and must return `true` if they refer to the same repository. The options are guaranteed to belong to the same SCM. """ - defcallback equal?(opts1 :: opts, opts2 :: opts) :: boolean + @callback equal?(opts1 :: opts, opts2 :: opts) :: boolean + + @doc """ + Returns the usable managers for the dependency. This can be used + if the SCM has extra knowledge of the dependency, otherwise it + should return an empty list. + """ + @callback managers(opts) :: [atom] @doc """ Returns all available SCMs. Each SCM is tried in order until a matching one is found. """ def available do - {:ok, scm} = Application.fetch_env(:mix, :scm) + {:ok, scm} = Mix.State.fetch(:scm) scm end @doc """ - Prepend the given SCM module to the list of available SCMs. + Prepends the given SCM module to the list of available SCMs. """ def prepend(mod) when is_atom(mod) do - available = Enum.reject(available(), &(&1 == mod)) - Application.put_env(:mix, :scm, [mod|available]) + Mix.State.prepend(:scm, mod) end @doc """ - Append the given SCM module to the list of available SCMs. + Appends the given SCM module to the list of available SCMs. """ def append(mod) when is_atom(mod) do - available = Enum.reject(available(), &(&1 == mod)) - Application.put_env(:mix, :scm, available ++ [mod]) + Mix.State.append(:scm, mod) end end diff --git a/lib/mix/lib/mix/scm/git.ex b/lib/mix/lib/mix/scm/git.ex index 686c95ec050..0c5844e1570 100644 --- a/lib/mix/lib/mix/scm/git.ex +++ b/lib/mix/lib/mix/scm/git.ex @@ -25,108 +25,180 @@ defmodule Mix.SCM.Git do end def accepts_options(_app, opts) do + opts = + opts + |> Keyword.put(:checkout, opts[:dest]) + |> sparse_opts() + cond do gh = opts[:github] -> - opts |> Keyword.delete(:github) |> Keyword.put(:git, "git://github.com/#{gh}.git") + opts + |> Keyword.delete(:github) + |> Keyword.put(:git, "https://github.com/#{gh}.git") + |> validate_git_options opts[:git] -> opts + |> validate_git_options true -> nil end end def checked_out?(opts) do - # Are we inside a git repository? - File.regular?(Path.join(opts[:dest], ".git/HEAD")) + # Are we inside a Git repository? + opts[:checkout] + |> Path.join(".git/HEAD") + |> File.regular? end def lock_status(opts) do - assert_git + assert_git!() + lock = opts[:lock] - case opts[:lock] do - {:git, lock_repo, lock_rev, lock_opts} -> - File.cd!(opts[:dest], fn -> - rev_info = get_rev_info - cond do - lock_repo != opts[:git] -> :outdated - lock_opts != get_lock_opts(opts) -> :outdated - lock_rev != rev_info[:rev] -> :mismatch - lock_repo != rev_info[:origin] -> :outdated - true -> :ok + cond do + lock_rev = get_lock_rev(lock, opts) -> + File.cd!(opts[:checkout], fn -> + %{origin: origin, rev: rev} = get_rev_info() + if get_lock_repo(lock) == origin and lock_rev == rev do + :ok + else + :mismatch end end) - nil -> + is_nil(lock) -> :mismatch - _ -> + true -> :outdated end end def equal?(opts1, opts2) do - opts1[:git] == opts2[:git] && + opts1[:git] == opts2[:git] and get_lock_opts(opts1) == get_lock_opts(opts2) end - def checkout(opts) do - assert_git - - path = opts[:dest] - location = opts[:git] + def managers(_opts) do + [] + end + def checkout(opts) do + assert_git!() + path = opts[:checkout] File.rm_rf!(path) - command = ~s(git clone --no-checkout --progress "#{location}" "#{path}") - - run_cmd_or_raise(command) - File.cd! path, fn -> do_checkout(opts) end + File.mkdir_p!(path) + File.cd!(path, fn -> + git!("init --quiet") + git!("--git-dir=.git remote add origin \"#{opts[:git]}\"") + checkout(path, opts) + end) end def update(opts) do - assert_git + assert_git!() + path = opts[:checkout] + File.cd! path, fn -> checkout(path, opts) end + end - File.cd! opts[:dest], fn -> - # Ensures origin is set the lock repo - location = opts[:git] - update_origin(location) + defp checkout(_path, opts) do + # Set configuration + sparse_toggle(opts) + update_origin(opts[:git]) - command = "git --git-dir=.git fetch --force" + # Fetch external data + [ + "--git-dir=.git fetch --force --quiet", + progress_switch(git_version()), + tags_switch(opts[:tag]) + ] + |> IO.iodata_to_binary() + |> git!() - if {1, 7, 1} <= git_version() do - command = command <> " --progress" - end + # Migrate the Git repo + rev = get_lock_rev(opts[:lock], opts) || get_opts_rev(opts) + git!("--git-dir=.git checkout --quiet #{rev}") - if opts[:tag] do - command = command <> " --tags" - end + if opts[:submodules] do + git!("--git-dir=.git submodule update --init --recursive") + end + + # Get the new repo lock + get_lock(opts) + end - run_cmd_or_raise(command) - do_checkout(opts) + defp sparse_opts(opts) do + if opts[:sparse] do + dest = Path.join(opts[:dest], opts[:sparse]) + Keyword.put(opts, :dest, dest) + else + opts end end + defp sparse_toggle(opts) do + cond do + sparse = opts[:sparse] -> + sparse_check(git_version()) + git!("--git-dir=.git config core.sparsecheckout true") + File.mkdir_p!(".git/info") + File.write!(".git/info/sparse-checkout", sparse) + File.exists?(".git/info/sparse-checkout") -> + File.write!(".git/info/sparse-checkout", "*") + git!("--git-dir=.git read-tree -mu HEAD") + git!("--git-dir=.git config core.sparsecheckout false") + File.rm(".git/info/sparse-checkout") + true -> + :ok + end + end + + defp sparse_check(version) do + unless {1, 7, 4} <= version do + version = version |> Tuple.to_list |> Enum.join(".") + Mix.raise "Git >= 1.7.4 is required to use sparse checkout. " <> + "You are running version #{version}" + end + end + + defp progress_switch(version) do + if {1, 7, 1} <= version, do: " --progress", else: "" + end + + defp tags_switch(nil), do: "" + defp tags_switch(_), do: " --tags" + ## Helpers - defp do_checkout(opts) do - ref = get_lock_rev(opts[:lock]) || get_opts_rev(opts) - run_cmd_or_raise "git --git-dir=.git checkout --quiet #{ref}" + defp validate_git_options(opts) do + err = "You should specify only one of branch, ref or tag, and only once. " <> + "Error on Git dependency: #{opts[:git]}" + validate_single_uniq(opts, [:branch, :ref, :tag], err) + end - if opts[:submodules] do - run_cmd_or_raise "git --git-dir=.git submodule update --init --recursive" + defp validate_single_uniq(opts, take, error) do + case Keyword.take(opts, take) do + [] -> opts + [_] -> opts + _ -> Mix.raise error end - - get_lock(opts) end defp get_lock(opts) do - rev_info = get_rev_info - {:git, opts[:git], rev_info[:rev], get_lock_opts(opts)} + %{rev: rev} = get_rev_info() + {:git, opts[:git], rev, get_lock_opts(opts)} end - defp get_lock_rev({:git, _repo, lock, _opts}) when is_binary(lock), do: lock - defp get_lock_rev(_), do: nil + defp get_lock_repo({:git, repo, _, _}), do: repo + + defp get_lock_rev({:git, repo, lock, lock_opts}, opts) when is_binary(lock) do + if repo == opts[:git] and lock_opts == get_lock_opts(opts) do + lock + end + end + defp get_lock_rev(_, _), do: nil defp get_lock_opts(opts) do - lock_opts = Enum.find_value [:branch, :ref, :tag], &List.keyfind(opts, &1, 0) - lock_opts = List.wrap(lock_opts) + lock_opts = Keyword.take(opts, [:branch, :ref, :tag, :sparse]) + if opts[:submodules] do lock_opts ++ [submodules: true] else @@ -144,52 +216,64 @@ defmodule Mix.SCM.Git do defp get_rev_info do destructure [origin, rev], - System.cmd('git --git-dir=.git config remote.origin.url && git --git-dir=.git rev-parse --verify --quiet HEAD') + :os.cmd('git --git-dir=.git config remote.origin.url && git --git-dir=.git rev-parse --verify --quiet HEAD') |> IO.iodata_to_binary |> String.split("\n", trim: true) - [ origin: origin, rev: rev ] + %{origin: origin, rev: rev} end defp update_origin(location) do - System.cmd('git --git-dir=.git config remote.origin.url #{location}') + git!(~s(--git-dir=.git config remote.origin.url "#{location}")) + :ok end - defp run_cmd_or_raise(command) do - if Mix.shell.cmd(command) != 0 do - Mix.raise "Command `#{command}` failed" + defp git!(command) do + if Mix.shell.cmd("git " <> command) != 0 do + Mix.raise "Command \"git #{command}\" failed" end - true + :ok end - defp assert_git do - case Application.fetch_env(:mix, :git_available) do + defp assert_git! do + case Mix.State.fetch(:git_available) do {:ok, true} -> :ok :error -> - if :os.find_executable('git') == false do - Mix.raise "Error fetching/updating Git repository: the `git` " <> + if System.find_executable("git") do + Mix.State.put(:git_available, true) + else + Mix.raise "Error fetching/updating Git repository: the \"git\" " <> "executable is not available in your PATH. Please install " <> "Git on this machine or pass --no-deps-check if you want to " <> "run a previously built application on a system without Git." - else - Application.put_env(:mix, :git_available, true) end end end - defp git_version do - case Application.fetch_env(:mix, :git_version) do + def git_version do + case Mix.State.fetch(:git_version) do {:ok, version} -> version :error -> - "git version " <> version = String.strip System.cmd("git --version") - version = String.split(version, ".") - |> Enum.take(3) - |> Enum.map(&String.to_integer(&1)) - |> List.to_tuple + version = + :os.cmd('git --version') + |> IO.iodata_to_binary + |> parse_version - Application.put_env(:mix, :git_version, version) + Mix.State.put(:git_version, version) version end end + + defp parse_version("git version " <> version) do + String.split(version, ".") + |> Enum.take(3) + |> Enum.map(&to_integer/1) + |> List.to_tuple + end + + defp to_integer(string) do + {int, _} = Integer.parse(string) + int + end end diff --git a/lib/mix/lib/mix/scm/path.ex b/lib/mix/lib/mix/scm/path.ex index 7f544d748bf..6a7173f8b72 100644 --- a/lib/mix/lib/mix/scm/path.ex +++ b/lib/mix/lib/mix/scm/path.ex @@ -17,7 +17,7 @@ defmodule Mix.SCM.Path do def accepts_options(app, opts) do cond do raw = opts[:path] -> - Keyword.put opts, :dest, Path.expand(raw) + Keyword.put(opts, :dest, Path.expand(raw)) opts[:in_umbrella] -> path = "../#{app}" @@ -42,6 +42,11 @@ defmodule Mix.SCM.Path do opts1[:dest] == opts2[:dest] end + def managers(_opts) do + [] + end + + @spec checkout(list) :: no_return def checkout(opts) do path = Path.relative_to_cwd opts[:dest] Mix.raise "Cannot checkout path dependency, expected a dependency at #{path}" diff --git a/lib/mix/lib/mix/shell.ex b/lib/mix/lib/mix/shell.ex index 6a65f83bab9..b26b03e43c0 100644 --- a/lib/mix/lib/mix/shell.ex +++ b/lib/mix/lib/mix/shell.ex @@ -1,59 +1,96 @@ defmodule Mix.Shell do @moduledoc """ - Defines Mix.Shell contract. + Defines `Mix.Shell` contract. """ - use Behaviour - @doc """ - Informs the given message. + Prints the given message to the shell. """ - defcallback info(message :: String.t) :: any + @callback info(message :: IO.ANSI.ansidata) :: any @doc """ - Warns about the given error message. + Prints the given error to the shell. """ - defcallback error(message :: String.t) :: any + @callback error(message :: IO.ANSI.ansidata) :: any @doc """ Prompts the user for input. """ - defcallback prompt(message :: String.t) :: String.t + @callback prompt(message :: String.t) :: String.t @doc """ - Asks the user for confirmation. + Prompts the user for confirmation. """ - defcallback yes?(message :: String.t) :: boolean + @callback yes?(message :: String.t) :: boolean @doc """ - Executes the given command and returns - its exit status. + Executes the given command and returns its exit status. + """ + @callback cmd(command :: String.t) :: integer + + @doc """ + Executes the given command and returns its exit status. + + ## Options + + * `:print_app` - when `false`, does not print the app name + when the command outputs something + + * `:stderr_to_stdout` - when `false`, does not redirect + stderr to stdout + + * `:quiet` - when `true`, do not print the command output + + * `:env` - environment options to the executed command + """ - defcallback cmd(command :: String.t) :: integer + @callback cmd(command :: String.t, options :: Keyword.t) :: integer @doc """ - Prints the current application to shell if + Prints the current application to the shell if it was not printed yet. """ - defcallback print_app() :: any + @callback print_app() :: any @doc """ - Returns if we should print application name to shell. + Returns the printable app name. + + This function returns the current application name, + but only if the application name should be printed. Calling this function automatically toggles its value - to false. + to `false` until the current project is re-entered. The + goal is to avoid printing the application name + multiple times. """ - def print_app? do - Mix.ProjectStack.print_app? + def printable_app_name do + Mix.ProjectStack.printable_app_name end @doc """ An implementation of the command callback that is shared across different shells. """ - def cmd(command, callback) do + def cmd(command, options \\ [], callback) do + env = validate_env(Keyword.get(options, :env, [])) + + args = + if Keyword.get(options, :stderr_to_stdout, true) do + [:stderr_to_stdout] + else + [] + end + + callback = + if Keyword.get(options, :quiet, false) do + fn x -> x end + else + callback + end + port = Port.open({:spawn, shell_command(command)}, - [:stream, :binary, :exit_status, :hide, :use_stdio, :stderr_to_stdout]) + [:stream, :binary, :exit_status, :hide, :use_stdio, {:env, env} | args]) + do_cmd(port, callback) end @@ -74,16 +111,27 @@ defmodule Mix.Shell do {:unix, _} -> command = command |> String.replace("\"", "\\\"") - |> :binary.bin_to_list + |> String.to_charlist 'sh -c "' ++ command ++ '"' {:win32, osname} -> - command = :binary.bin_to_list(command) + command = '"' ++ String.to_charlist(command) ++ '"' case {System.get_env("COMSPEC"), osname} do - {nil, :windows} -> 'command.com /c ' ++ command - {nil, _} -> 'cmd /c ' ++ command - {cmd, _} -> '#{cmd} /c ' ++ command + {nil, :windows} -> 'command.com /s /c ' ++ command + {nil, _} -> 'cmd /s /c ' ++ command + {cmd, _} -> '#{cmd} /s /c ' ++ command end end end + + defp validate_env(enum) do + Enum.map enum, fn + {k, nil} -> + {String.to_charlist(k), false} + {k, v} -> + {String.to_charlist(k), String.to_charlist(v)} + other -> + raise ArgumentError, "invalid environment key-value #{inspect other}" + end + end end diff --git a/lib/mix/lib/mix/shell/io.ex b/lib/mix/lib/mix/shell/io.ex index 48631000aa6..e001983feaa 100644 --- a/lib/mix/lib/mix/shell/io.ex +++ b/lib/mix/lib/mix/shell/io.ex @@ -1,18 +1,19 @@ defmodule Mix.Shell.IO do @moduledoc """ This is Mix's default shell. + It simply prints messages to stdio and stderr. """ @behaviour Mix.Shell @doc """ - Prints the currently running application if it + Prints the current application to the shell if it was not printed yet. """ def print_app do - if Mix.Shell.print_app? do - IO.puts "==> #{Mix.Project.config[:app]}" + if name = Mix.Shell.printable_app_name do + IO.puts "==> #{name}" end end @@ -20,49 +21,53 @@ defmodule Mix.Shell.IO do Executes the given command and prints its output to stdout as it comes. """ - def cmd(command) do - Mix.Shell.cmd(command, &IO.write(&1)) + def cmd(command, opts \\ []) do + print_app? = Keyword.get(opts, :print_app, true) + Mix.Shell.cmd(command, opts, fn data -> + if print_app?, do: print_app() + IO.write(data) + end) end @doc """ - Writes a message to the shell followed by new line. + Prints the given message to the shell followed by a newline. """ def info(message) do - print_app - IO.puts IO.ANSI.escape(message) + print_app() + IO.puts IO.ANSI.format message end @doc """ - Writes an error message to the shell followed by new line. + Prints the given error to the shell followed by a newline. """ def error(message) do - print_app - IO.puts :stderr, IO.ANSI.escape "%{red,bright}#{message}" + print_app() + IO.puts :stderr, IO.ANSI.format(red(message)) end @doc """ - Writes a message shell followed by prompting the user for - input. Input will be consumed until enter is pressed. + Prints a message and prompts the user for input. + + Input will be consumed until Enter is pressed. """ def prompt(message) do - print_app - IO.gets IO.ANSI.escape(message <> " ") + print_app() + IO.gets(message <> " ") end @doc """ - Receives a message and asks the user if he wants to proceed. - He must press enter or type anything that matches the a "yes" - regex `~r/^Y(es)?$/i`. + Prints a message and asks the user if they want to proceed. + + The user must press Enter or type one of "y", "yes", "Y", "YES" or + "Yes". """ def yes?(message) do - print_app - got_yes? IO.gets(message <> IO.ANSI.escape(" [Yn] ")) + print_app() + answer = IO.gets(message <> " [Yn] ") + is_binary(answer) and String.trim(answer) in ["", "y", "Y", "yes", "YES", "Yes"] end - defp got_yes?(answer) when is_binary(answer) do - answer =~ ~r/^(Y(es)?)?$/i + defp red(message) do + [:red, :bright, message] end - - # The io server may return :eof or :error - defp got_yes?(_), do: false end diff --git a/lib/mix/lib/mix/shell/process.ex b/lib/mix/lib/mix/shell/process.ex index 1194979156e..e1b2c84240c 100644 --- a/lib/mix/lib/mix/shell/process.ex +++ b/lib/mix/lib/mix/shell/process.ex @@ -1,24 +1,40 @@ defmodule Mix.Shell.Process do @moduledoc """ - This is a Mix shell that uses the current process mailbox - for communication instead of IO. + Mix shell that uses the current process mailbox for communication. - When a developer calls `info("hello")`, the following - message will be sent to the current process: + + This module provides a Mix shell implementation that uses + the current process mailbox for communication instead of IO. + + As an example, when `Mix.shell.info("hello")` is called, + the following message will be sent to the calling process: {:mix_shell, :info, ["hello"]} This is mainly useful in tests, allowing us to assert - if given messages were received or not. Since we need - to guarantee a clean slate between tests, there - is also a `flush/1` function responsible for flushing all - `:mix_shell` related messages from the process inbox. + if given messages were received or not instead of performing + checks on some captured IO. Since we need to guarantee a clean + slate between tests, there is also a `flush/1` function + responsible for flushing all `:mix_shell` related messages + from the process inbox. + + ## Examples + + Mix.shell.info "hello" + receive do {:mix_shell, :info, [msg]} -> msg end + #=> "hello" + + send self(), {:mix_shell_input, :prompt, "Pretty cool"} + Mix.shell.prompt?("How cool was that?!") + #=> "Pretty cool" + """ @behaviour Mix.Shell @doc """ - Flush all `:mix_shell` and `:mix_shell_input` messages from the current process. + Flushes all `:mix_shell` and `:mix_shell_input` messages from the current process. + If a callback is given, it is invoked for each received message. ## Examples @@ -40,12 +56,12 @@ defmodule Mix.Shell.Process do end @doc """ - Prints the currently running application if it + Prints the current application if it was not printed yet. """ def print_app do - if Mix.Shell.print_app? do - send self, {:mix_shell, :info, ["==> #{Mix.Project.config[:app]}"]} + if name = Mix.Shell.printable_app_name do + send self(), {:mix_shell, :info, ["==> #{name}"]} end end @@ -53,9 +69,11 @@ defmodule Mix.Shell.Process do Executes the given command and forwards its messages to the current process. """ - def cmd(command) do - Mix.Shell.cmd(command, fn(data) -> - send self, {:mix_shell, :run, [data]} + def cmd(command, opts \\ []) do + print_app? = Keyword.get(opts, :print_app, true) + Mix.Shell.cmd(command, opts, fn(data) -> + if print_app?, do: print_app() + send self(), {:mix_shell, :run, [data]} end) end @@ -63,55 +81,78 @@ defmodule Mix.Shell.Process do Forwards the message to the current process. """ def info(message) do - print_app - send self, {:mix_shell, :info, [IO.ANSI.escape(message, false)]} + print_app() + send self(), {:mix_shell, :info, [format(message)]} end @doc """ - Forwards the message to the current process. + Forwards the error to the current process. """ def error(message) do - print_app - send self, {:mix_shell, :error, [IO.ANSI.escape(message, false)]} + print_app() + send self(), {:mix_shell, :error, [format(message)]} + end + + defp format(message) do + message |> IO.ANSI.format(false) |> IO.iodata_to_binary end @doc """ Forwards the message to the current process. + It also checks the inbox for an input message matching: {:mix_shell_input, :prompt, value} If one does not exist, it will abort since there was no shell - process inputs given. Value must be a string. + process inputs given. `value` must be a string. + + ## Examples + + The following will answer with `"Meg"` to the prompt + `"What's your name?"`: + + # The response is sent before calling prompt/1 so that prompt/1 can read it + send self(), {:mix_shell_input, :prompt, "Meg"} + Mix.shell.prompt("What's your name?") + """ def prompt(message) do - print_app - send self, {:mix_shell, :prompt, [IO.ANSI.escape(message, false)]} + print_app() + send self(), {:mix_shell, :prompt, [message]} receive do {:mix_shell_input, :prompt, response} -> response after - 0 -> raise "No shell process input given for prompt/1" + 0 -> raise "no shell process input given for prompt/1" end end @doc """ Forwards the message to the current process. + It also checks the inbox for an input message matching: {:mix_shell_input, :yes?, value} If one does not exist, it will abort since there was no shell - process inputs given. Value must be `true` or `false`. + process inputs given. `value` must be `true` or `false`. + + ## Example + + # Send the response to self() first so that yes?/1 will be able to read it + send self(), {:mix_shell_input, :yes?, true} + Mix.shell.yes?("Are you sure you want to continue?") + """ def yes?(message) do - print_app - send self, {:mix_shell, :yes?, [IO.ANSI.escape(message, false)]} + print_app() + send self(), {:mix_shell, :yes?, [message]} receive do {:mix_shell_input, :yes?, response} -> response after - 0 -> raise "No shell process input given for yes?/1" + 0 -> raise "no shell process input given for yes?/1" end end end diff --git a/lib/mix/lib/mix/shell/quiet.ex b/lib/mix/lib/mix/shell/quiet.ex new file mode 100644 index 00000000000..0bb04ffcddd --- /dev/null +++ b/lib/mix/lib/mix/shell/quiet.ex @@ -0,0 +1,48 @@ +defmodule Mix.Shell.Quiet do + @moduledoc """ + This is Mix's default shell when the `MIX_QUIET` environment + variable is set. + + It's just like `Mix.Shell.IO`, but prints far less. + """ + + @behaviour Mix.Shell + + @doc """ + Prints the current application if it + was not printed yet. + """ + defdelegate print_app, to: Mix.Shell.IO + + @doc """ + Executes the given command quietly without outputting anything. + """ + def cmd(command, opts \\ []) do + Mix.Shell.cmd(command, opts, fn data -> data end) + end + + @doc """ + Writes nothing to the shell. + """ + def info(_message), do: nil + + @doc """ + Prints the error to the shell followed by a newline. + """ + defdelegate error(message), to: Mix.Shell.IO + + @doc """ + Prints a message and prompts the user for input. + + Input will be consumed until Enter is pressed. + """ + defdelegate prompt(message), to: Mix.Shell.IO + + @doc """ + Prints a message and asks the user if they want to proceed. + + The user must press Enter or type one of "y", "yes", "Y", "YES" or + "Yes". + """ + defdelegate yes?(message), to: Mix.Shell.IO +end diff --git a/lib/mix/lib/mix/state.ex b/lib/mix/lib/mix/state.ex new file mode 100644 index 00000000000..906ef5758f1 --- /dev/null +++ b/lib/mix/lib/mix/state.ex @@ -0,0 +1,34 @@ +defmodule Mix.State do + @moduledoc false + @name __MODULE__ + + def start_link() do + Agent.start_link(__MODULE__, :init, [], [name: @name]) + end + + def init() do + %{shell: Mix.Shell.IO, + env: String.to_atom(System.get_env("MIX_ENV") || "dev"), + scm: [Mix.SCM.Git, Mix.SCM.Path]} + end + + def fetch(key) do + Agent.get(@name, Map, :fetch, [key]) + end + + def get(key, default \\ nil) do + Agent.get(@name, Map, :get, [key, default]) + end + + def put(key, value) do + Agent.update(@name, Map, :put, [key, value]) + end + + def prepend(key, value) do + Agent.update(@name, Map, :update, [key, [value], &[value | List.delete(&1, value)]]) + end + + def append(key, value) do + Agent.update(@name, Map, :update, [key, [value], &(List.delete(&1, value) ++ [value])]) + end +end diff --git a/lib/mix/lib/mix/task.ex b/lib/mix/lib/mix/task.ex index e5eb304dcc9..9f8eca9918b 100644 --- a/lib/mix/lib/mix/task.ex +++ b/lib/mix/lib/mix/task.ex @@ -1,6 +1,4 @@ defmodule Mix.Task do - use Behaviour - @moduledoc """ A simple module that provides conveniences for creating, loading and manipulating tasks. @@ -9,40 +7,49 @@ defmodule Mix.Task do in a module starting with `Mix.Tasks.` and defining the `run/1` function: - defmodule Mix.Tasks.Hello do + defmodule Mix.Tasks.Echo do use Mix.Task - def run(_) do - IO.puts "hello" + def run(args) do + Mix.shell.info Enum.join(args, " ") end end - The `run/1` function will receive all arguments passed + The `run/1` function will receive a list of all arguments passed to the command line. ## Attributes - There are a couple attributes available in Mix tasks to + There are a few attributes available in Mix tasks to configure them in Mix: - * `@shortdoc` - makes the task public with a short description that appears - on `mix help` - * `@recursive` - run the task recursively in umbrella projects + * `@shortdoc` - makes the task public with a short description that appears on `mix help` + * `@recursive` - runs the task recursively in umbrella projects + * `@preferred_cli_env` - recommends environment to run task. It is used in absence of + a Mix project recommendation, or explicit `MIX_ENV`, and it only works for tasks + in the current project. `@preferred_cli_env` is not loaded from dependencies as + we need to know the environment before dependencies are loaded. + + ## Documentation + Users can read the documentation for public Mix tasks by running `mix help my_task`. + The documentation that will be shown is the `@moduledoc` of the task's module. """ + @type task_name :: String.t | atom + @type task_module :: atom + @doc """ A task needs to implement `run` which receives a list of command line args. """ - defcallback run([binary]) :: any + @callback run([binary]) :: any @doc false defmacro __using__(_opts) do quote do - Enum.each [:shortdoc, :recursive], + Enum.each [:shortdoc, :recursive, :preferred_cli_env], &Module.register_attribute(__MODULE__, &1, persist: true) - @behaviour Mix.Task end end @@ -50,50 +57,66 @@ defmodule Mix.Task do @doc """ Loads all tasks in all code paths. """ + @spec load_all() :: [task_module] def load_all, do: load_tasks(:code.get_path) @doc """ Loads all tasks in the given `paths`. """ - def load_tasks(paths) do - Enum.reduce(paths, [], fn(path, matches) -> - {:ok, files} = :erl_prim_loader.list_dir(path |> to_char_list) - Enum.reduce(files, matches, &match_tasks/2) - end) + @spec load_tasks([List.Chars.t]) :: [task_module] + def load_tasks(dirs) do + # We may get duplicate modules because we look through the + # entire load path so make sure we only return unique modules. + + for(dir <- dirs, + file <- safe_list_dir(to_charlist(dir)), + mod = task_from_path(file), + do: mod) + |> Enum.uniq end - @re_pattern Regex.re_pattern(~r/Elixir\.Mix\.Tasks\..*\.beam$/) + defp safe_list_dir(path) do + case :erl_prim_loader.list_dir(path) do + {:ok, paths} -> paths + {:error, _} -> [] + end + end - defp match_tasks(filename, modules) do - if :re.run(filename, @re_pattern, [capture: :none]) == :match do - mod = :filename.rootname(filename, '.beam') |> List.to_atom - if Code.ensure_loaded?(mod), do: [mod | modules], else: modules - else - modules + @prefix_size byte_size("Elixir.Mix.Tasks.") + @suffix_size byte_size(".beam") + + defp task_from_path(filename) do + base = Path.basename(filename) + part = byte_size(base) - @prefix_size - @suffix_size + + case base do + <<"Elixir.Mix.Tasks.", rest::binary-size(part), ".beam">> -> + mod = :"Elixir.Mix.Tasks.#{rest}" + ensure_task?(mod) && mod + _ -> + nil end end @doc """ - Returns all loaded tasks. + Returns all loaded task modules. Modules that are not yet loaded won't show up. Check `load_all/0` if you want to preload all tasks. """ + @spec all_modules() :: [task_module] def all_modules do - Enum.reduce :code.all_loaded, [], fn({module, _}, acc) -> - case Atom.to_char_list(module) do - 'Elixir.Mix.Tasks.' ++ _ -> - if is_task?(module), do: [module|acc], else: acc - _ -> - acc - end - end + for {module, _} <- :code.all_loaded, + task?(module), + do: module end @doc """ Gets the moduledoc for the given task `module`. + Returns the moduledoc or `nil`. """ + @spec moduledoc(task_module) :: String.t | nil def moduledoc(module) when is_atom(module) do case Code.get_docs(module, :moduledoc) do {_line, moduledoc} -> moduledoc @@ -103,8 +126,10 @@ defmodule Mix.Task do @doc """ Gets the shortdoc for the given task `module`. + Returns the shortdoc or `nil`. """ + @spec shortdoc(task_module) :: String.t | nil def shortdoc(module) when is_atom(module) do case List.keyfind module.__info__(:attributes), :shortdoc, 0 do {:shortdoc, [shortdoc]} -> shortdoc @@ -114,8 +139,11 @@ defmodule Mix.Task do @doc """ Checks if the task should be run recursively for all sub-apps in - umbrella projects. Returns `true`, `false` or `:both`. + umbrella projects. + + Returns `true` or `false`. """ + @spec recursive(task_module) :: boolean def recursive(module) when is_atom(module) do case List.keyfind module.__info__(:attributes), :recursive, 0 do {:recursive, [setting]} -> setting @@ -123,21 +151,57 @@ defmodule Mix.Task do end end + @doc """ + Gets preferred CLI environment for the task. + + Returns environment (for example, `:test`, or `:prod`), or `nil`. + """ + @spec preferred_cli_env(task_name) :: atom | nil + def preferred_cli_env(task) when is_atom(task) or is_binary(task) do + case get(task) do + nil -> nil + module -> + case List.keyfind module.__info__(:attributes), :preferred_cli_env, 0 do + {:preferred_cli_env, [setting]} -> setting + _ -> nil + end + end + end + @doc """ Returns the task name for the given `module`. """ - def task_name(module) do + @spec task_name(task_module) :: task_name + def task_name(module) when is_atom(module) do Mix.Utils.module_name_to_command(module, 2) end @doc """ - Receives a task name and retrieves the task module. - Returns nil if the task cannot be found. + Checks if an alias called `task` exists. + + For more information about task aliasing, take a look at the "Aliasing" + section in the docs for `Mix`. """ + @spec alias?(task_name) :: boolean + def alias?(task) when is_binary(task) do + alias?(String.to_atom(task)) + end + + def alias?(task) when is_atom(task) do + Mix.Project.config[:aliases][task] + end + + @doc """ + Receives a task name and returns the task module if found. + + Otherwise returns `nil` in case the module + exists, but it isn't a task or cannot be found. + """ + @spec get(task_name) :: task_module | nil def get(task) do - case Mix.Utils.command_to_module(task, Mix.Tasks) do - {:module, module} -> module - {:error, _} -> nil + case fetch(task) do + {:ok, module} -> module + {:error, _} -> nil end end @@ -148,17 +212,25 @@ defmodule Mix.Task do * `Mix.NoTaskError` - raised if the task could not be found * `Mix.InvalidTaskError` - raised if the task is not a valid `Mix.Task` - """ + @spec get!(task_name) :: task_module | no_return def get!(task) do - if module = get(task) do - if is_task?(module) do + case fetch(task) do + {:ok, module} -> module - else - Mix.raise Mix.InvalidTaskError, task: task - end - else - Mix.raise Mix.NoTaskError, task: task + {:error, :invalid} -> + raise Mix.InvalidTaskError, task: task + {:error, :not_found} -> + raise Mix.NoTaskError, task: task + end + end + + defp fetch(task) when is_binary(task) or is_atom(task) do + case Mix.Utils.command_to_module(to_string(task), Mix.Tasks) do + {:module, module} -> + if task?(module), do: {:ok, module}, else: {:error, :invalid} + {:error, _} -> + {:error, :not_found} end end @@ -168,70 +240,189 @@ defmodule Mix.Task do If the task was not yet invoked, it runs the task and returns the result. - If the task was already invoked, it does not run the task - again and simply aborts with `:noop`. + If there is an alias with the same name, the alias + will be invoked instead of the original task. - It may raise an exception if the task was not found - or it is invalid. Check `get!/1` for more information. + If the task or alias were already invoked, it does not + run them again and simply aborts with `:noop`. + + It may raise an exception if an alias or a task can't + be found or the task is invalid. Check `get!/1` for more + information. """ - def run(task, args \\ []) do - task = to_string(task) + @spec run(task_name, [any]) :: any + def run(task, args \\ []) - if Mix.TasksServer.run_task(task, Mix.Project.get) do - module = get!(task) + def run(task, args) when is_atom(task) do + run(Atom.to_string(task), args) + end - recur module, fn proj -> - Mix.TasksServer.put_task(task, proj) - module.run(args) - end - else - :noop + def run(task, args) when is_binary(task) do + proj = Mix.Project.get + alias = Mix.Project.config[:aliases][String.to_atom(task)] + + cond do + alias && Mix.TasksServer.run({:alias, task, proj}) -> + res = run_alias(List.wrap(alias), args, :ok) + Mix.TasksServer.put({:task, task, proj}) + res + Mix.TasksServer.run({:task, task, proj}) -> + run_task(proj, task, args) + true -> + :noop + end + end + + defp run_task(proj, task, args) do + if Mix.debug?, do: output_task_debug_info(task, args, proj) + + # 1. If the task is available, we run it. + # 2. Otherwise we look for it in dependencies. + # 3. Finally, we compile the current project in hope it is available. + module = + get_task_or_run(proj, task, fn -> Mix.Task.run("deps.loadpaths") end) || + get_task_or_run(proj, task, fn -> Mix.Project.compile([]) end) || + get!(task) + + recursive = recursive(module) + + cond do + recursive && Mix.Project.umbrella? -> + Mix.ProjectStack.recur fn -> + recur(fn _ -> run(task, args) end) + end + + not recursive && Mix.ProjectStack.recursing() -> + Mix.ProjectStack.root(fn -> run(task, args) end) + + true -> + Mix.TasksServer.put({:task, task, proj}) + try do + module.run(args) + rescue + e in OptionParser.ParseError -> + Mix.raise "Could not invoke task #{inspect task}: " <> Exception.message(e) + end end end + defp output_task_debug_info(task, args, proj) do + Mix.shell.info("** Running mix " <> task_to_string(task, args) <> project_to_string(proj)) + end + + defp project_to_string(nil), do: "" + defp project_to_string(proj), do: " (inside #{inspect proj})" + + defp task_to_string(task, []), do: task + defp task_to_string(task, args), do: task <> " " <> Enum.join(args, " ") + + defp get_task_or_run(proj, task, fun) do + cond do + module = get(task) -> + module + proj -> + fun.() + nil + true -> + nil + end + end + + defp run_alias([h | t], alias_args, _res) when is_binary(h) do + [task | args] = OptionParser.split(h) + res = Mix.Task.run task, join_args(args, alias_args, t) + run_alias(t, alias_args, res) + end + + defp run_alias([h | t], alias_args, _res) when is_function(h, 1) do + res = h.(join_args([], alias_args, t)) + run_alias(t, alias_args, res) + end + + defp run_alias([], _alias_task, res) do + res + end + + defp join_args(args, alias_args, []), do: args ++ alias_args + defp join_args(args, _alias_args, _), do: args + @doc """ Clears all invoked tasks, allowing them to be reinvoked. + + This operation is not recursive. """ + @spec clear :: :ok def clear do - Mix.TasksServer.clear_tasks + Mix.TasksServer.clear end @doc """ - Reenables a given task so it can be executed again down the stack. If - an umbrella project reenables a task it is reenabled for all sub projects. + Reenables a given task so it can be executed again down the stack. + + Both alias and the regular stack are reenabled when this function + is called. + + If an umbrella project reenables a task, it is reenabled for all + child projects. """ - def reenable(task) do - task = to_string(task) - module = get!(task) + @spec reenable(task_name) :: :ok + def reenable(task) when is_binary(task) or is_atom(task) do + task = to_string(task) + proj = Mix.Project.get + recursive = (module = get(task)) && recursive(module) - recur module, fn project -> - Mix.TasksServer.delete_task(task, project) + Mix.TasksServer.delete_many([{:task, task, proj}, + {:alias, task, proj}]) + + cond do + recursive && Mix.Project.umbrella? -> + recur fn proj -> + Mix.TasksServer.delete_many([{:task, task, proj}, + {:alias, task, proj}]) + end + + proj = !recursive && Mix.ProjectStack.recursing() -> + Mix.TasksServer.delete_many([{:task, task, proj}, + {:alias, task, proj}]) + + true -> + :ok end - end - defp recur(module, fun) do - umbrella? = Mix.Project.umbrella? - recursive = recursive(module) + :ok + end - if umbrella? && recursive && Mix.ProjectStack.enable_recursion do - # Get all dependency configuration but not the deps path - # as we leave the control of the deps path still to the - # umbrella child. - config = Mix.Project.deps_config |> Keyword.delete(:deps_path) - res = for %Mix.Dep{app: app, opts: opts} <- Mix.Dep.Umbrella.loaded do - Mix.Project.in_project(app, opts[:path], config, fun) - end - Mix.ProjectStack.disable_recursion - res - else - fun.(Mix.Project.get) + defp recur(fun) do + # Get all dependency configuration but not the deps path + # as we leave the control of the deps path still to the + # umbrella child. + config = Mix.Project.deps_config |> Keyword.delete(:deps_path) + for %Mix.Dep{app: app, opts: opts} <- Mix.Dep.Umbrella.cached do + Mix.Project.in_project(app, opts[:path], config, fun) end end + @doc """ + Reruns `task` with the given arguments. + + This function reruns the given task; to do that, it first re-enables the task + and then runs it as normal. + """ + @spec rerun(task_name, [any]) :: any + def rerun(task, args \\ []) do + reenable(task) + run(task, args) + end + @doc """ Returns `true` if given module is a task. """ - def is_task?(module) do - function_exported?(module, :run, 1) + @spec task?(task_module) :: boolean + def task?(module) when is_atom(module) do + match?('Elixir.Mix.Tasks.' ++ _, Atom.to_charlist(module)) and ensure_task?(module) + end + + defp ensure_task?(module) do + Code.ensure_loaded?(module) and function_exported?(module, :run, 1) end end diff --git a/lib/mix/lib/mix/tasks/app.start.ex b/lib/mix/lib/mix/tasks/app.start.ex index 974710da12d..5718f2144b7 100644 --- a/lib/mix/lib/mix/tasks/app.start.ex +++ b/lib/mix/lib/mix/tasks/app.start.ex @@ -1,48 +1,158 @@ defmodule Mix.Tasks.App.Start do use Mix.Task - @recursive true + # Do not mark this task as recursive as it is + # responsible for loading consolidated protocols. + @shortdoc "Starts all registered apps" @moduledoc """ - Starts all registered apps. If no apps key exists, - it starts the current application. + Starts all registered apps. + + The application is started by default as temporary. In case + `:start_permanent` is set to `true` in your project configuration + or the `--permanent` flag is given, it is started as permanent, + which guarantees the node will shutdown if the application + crashes permanently. + + ## Configuration + + * `:start_permanent` - the application and all of its children + applications are started in permanent mode. Defaults to `false`. + + * `:consolidate_protocols` - when `true`, loads consolidated + protocols before start. The default value is `true`. + + * `:elixir` - matches the current Elixir version against the + given requirement ## Command line options - * `--force` - force compilation regardless of compilation times - * `--no-compile` - do not compile even if files require compilation - * `--no-deps-check` - do not check dependencies - * `--no-elixir-version-check` - - do not check elixir version - * `--no-start` - do not start applications after compilation + * `--force` - forces compilation regardless of compilation times + * `--temporary` - starts the application as temporary + * `--permanent` - starts the application as permanent + * `--no-compile` - does not compile even if files require compilation + * `--no-protocols` - does not load consolidated protocols + * `--no-archives-check` - does not check archives + * `--no-deps-check` - does not check dependencies + * `--no-elixir-version-check` - does not check Elixir version + * `--no-start` - does not start applications after compilation """ + @spec run(OptionParser.argv) :: :ok def run(args) do - {opts, _, _} = OptionParser.parse(args) + Mix.Project.get! + config = Mix.Project.config - Mix.Task.run "deps.loadpaths", args + {opts, _, _} = OptionParser.parse args, switches: [permanent: :boolean, temporary: :boolean] Mix.Task.run "loadpaths", args - unless opts[:no_compile] do - Mix.Task.run "compile", args + unless "--no-compile" in args do + Mix.Project.compile(args, config) + end + + unless "--no-protocols" in args do + path = Path.join(Mix.Project.build_path(config), "consolidated") + + if config[:consolidate_protocols] && File.dir?(path) do + Code.prepend_path(path) + Enum.each(File.ls!(path), &load_protocol/1) + end + end + + # Stop Logger when starting the application as it is + # up to the application to decide if it should be restarted + # or not. + # + # Mix should not depend directly on Logger so check that it's loaded. + logger = Process.whereis(Logger) + if logger do + Logger.App.stop end - unless opts[:no_start] do - start(Mix.Project.config[:app]) + if "--no-start" in args do + # Start Logger again if the application won't be starting it + if logger do + :ok = Logger.App.start + end + else + start(Mix.Project.config, opts) end + + :ok end @doc false - def start(app) do - if app do - case Application.ensure_all_started(app) do - {:ok, _} -> :ok - {:error, {app, reason}} -> - Mix.raise "Could not start application #{app}: " <> - Application.format_error(reason) + def start(config, opts) do + apps = + cond do + Mix.Project.umbrella?(config) -> + for %Mix.Dep{app: app} <- Mix.Dep.Umbrella.cached, do: app + app = config[:app] -> + [app] + true -> + [] end - else - :error + + type = type(config, opts) + Enum.each apps, &ensure_all_started(&1, type) + + # If there is a build path, we will let the application + # that owns the build path do the actual check + unless config[:build_path] do + check_configured() + end + + :ok + end + + defp ensure_all_started(app, type) do + case Application.ensure_all_started(app, type) do + {:ok, _} -> :ok + {:error, {app, reason}} -> + Mix.raise "Could not start application #{app}: " <> + Application.format_error(reason) + end + end + + @doc false + def type(config, opts) do + cond do + opts[:temporary] -> :temporary + opts[:permanent] -> :permanent + config[:start_permanent] -> :permanent + true -> :temporary + end + end + + defp check_configured() do + configured = Mix.ProjectStack.configured_applications + loaded = for {app, _, _} <- Application.loaded_applications(), do: app + _ = for app <- configured -- loaded, + :code.lib_dir(app) == {:error, :bad_name} do + Mix.shell.error """ + You have configured application #{inspect app} in your configuration + file, but the application is not available. + + This usually means one of: + + 1. You have not added the application as a dependency in a mix.exs file. + + 2. You are configuring an application that does not really exist. + + Please ensure #{inspect app} exists or remove the configuration. + """ + end + :ok + end + + defp load_protocol(file) do + case file do + "Elixir." <> _ -> + module = file |> Path.rootname |> String.to_atom + :code.purge(module) + :code.delete(module) + _ -> + :ok end end end diff --git a/lib/mix/lib/mix/tasks/app.tree.ex b/lib/mix/lib/mix/tasks/app.tree.ex new file mode 100644 index 00000000000..23bfdd50bc2 --- /dev/null +++ b/lib/mix/lib/mix/tasks/app.tree.ex @@ -0,0 +1,89 @@ +defmodule Mix.Tasks.App.Tree do + use Mix.Task + + @shortdoc "Prints the application tree" + @recursive true + + @moduledoc """ + Prints the application tree. + + mix app.tree --exclude logger --exclude elixir + + If no application is given, it uses the current application defined + in the `mix.exs` file. + + ## Command line options + + * `--exclude` - exclude applications which you do not want to see printed. + `kernel`, `stdlib` and `compiler` are always excluded from the tree. + + * `--format` - Can be set to one of either: + + * `pretty` - uses Unicode codepoints for formatting the tree. + This is the default except on Windows. + + * `plain` - does not use Unicode codepoints for formatting the tree. + This is the default on Windows. + + * `dot` - produces a DOT graph description of the application tree + in `app_tree.dot` in the current directory. + Warning: this will overwrite any previously generated file. + """ + + @default_excluded [:kernel, :stdlib, :compiler] + + @spec run(OptionParser.argv) :: :ok + def run(args) do + Mix.Task.run "compile" + + {app, opts} = + case OptionParser.parse!(args, strict: [exclude: :keep, format: :string]) do + {opts, []} -> + app = Mix.Project.config[:app] || Mix.raise("no application given and none found in mix.exs file") + {app, opts} + {opts, [app]} -> + {String.to_atom(app), opts} + end + + excluded = Keyword.get_values(opts, :exclude) |> Enum.map(&String.to_atom/1) + excluded = @default_excluded ++ excluded + + callback = fn {type, app} -> + load(app) + {{app, type(type)}, children_for(app, excluded)} + end + + if opts[:format] == "dot" do + Mix.Utils.write_dot_graph!("app_tree.dot", "application tree", + [{:normal, app}], callback, opts) + """ + Generated "app_tree.dot" in the current directory. To generate a PNG: + + dot -Tpng app_tree.dot -o app_tree.png + + For more options see http://www.graphviz.org/. + """ + |> String.trim_trailing + |> Mix.shell.info + else + Mix.Utils.print_tree([{:normal, app}], callback, opts) + end + end + + defp load(app) do + case Application.load(app) do + :ok -> :ok + {:error, {:already_loaded, ^app}} -> :ok + _ -> Mix.raise("could not find application #{app}") + end + end + + defp children_for(app, excluded) do + apps = Application.spec(app, :applications) -- excluded + included_apps = Application.spec(app, :included_applications) -- excluded + Enum.map(apps, &{:normal, &1}) ++ Enum.map(included_apps, &{:included, &1}) + end + + defp type(:normal), do: nil + defp type(:included), do: "(included)" +end diff --git a/lib/mix/lib/mix/tasks/archive.build.ex b/lib/mix/lib/mix/tasks/archive.build.ex new file mode 100644 index 00000000000..ebe915e301e --- /dev/null +++ b/lib/mix/lib/mix/tasks/archive.build.ex @@ -0,0 +1,118 @@ +defmodule Mix.Tasks.Archive.Build do + use Mix.Task + + @shortdoc "Archives this project into a .ez file" + + @moduledoc """ + Builds an archive according to the specification of the + [Erlang Archive Format](http://www.erlang.org/doc/man/code.html). + + Archives are meant to contain small projects, usually installed + locally. Archives may be installed into a Mix environment by + running `mix archive.install`. Once installed, the archive is + available to all Mix projects. For this reason, the functionality + behind archives is limited. For instance, archives do not include + dependencies, as those would conflict with any dependency in a + Mix project after the archive is installed. In general, we recommend + the usage of archives to be limited for extensions of Mix, such + as custom SCMs, package managers, etc. For general scripts to be + installed into machines, please see `mix escript.build`. + + The archive will be created in the current directory (which is + expected to be the project root), unless an argument `-o` is + provided with the file name. + + By default, this command archives the current project but the `-i` + option can be used to archive any directory. For example, + `mix archive.build` with no options translates to: + + mix archive.build -i _build/ENV/lib/APP -o APP-VERSION.ez + + ## Command line options + + * `-o` - specifies output file name. + If there is a `mix.exs`, defaults to "APP-VERSION.ez". + + * `-i` - specifies the input directory to archive. + If there is a `mix.exs`, defaults to the current application build. + + * `--no-compile` - skips compilation. + Only applies when `mix.exs` is available. + + """ + @switches [force: :boolean, compile: :boolean, output: :string, input: :string, + deps_check: :boolean, archives_check: :boolean, elixir_version_check: :boolean] + + @spec run(OptionParser.argv) :: :ok + def run(args) do + {opts, _} = OptionParser.parse!(args, aliases: [o: :output, i: :input], strict: @switches) + + project = Mix.Project.get + + if project && Keyword.get(opts, :compile, true) do + Mix.Task.run :compile, args + end + + source = cond do + input = opts[:input] -> + input + project -> + path = Mix.Project.app_path + if elixir = Mix.Project.config[:elixir] do + File.write Path.join(path, ".elixir"), elixir + else + File.rm Path.join(path, ".elixir") + end + path + true -> + Mix.raise "Cannot create archive without input directory, " <> + "please pass -i as an option" + end + + project_config = Mix.Project.config + target = cond do + output = opts[:output] -> + output + project_config[:app] -> + Mix.Local.name_for(:archive, project_config) + true -> + Mix.raise "Cannot create archive without output file, " <> + "please pass -o as an option" + end + + unless File.dir?(source) do + Mix.raise "Expected archive source #{inspect source} to be a directory" + end + + create(source, target) + + Mix.shell.info "Generated archive #{inspect target} with MIX_ENV=#{Mix.env}" + :ok + end + + defp create(source, target) do + source_path = Path.expand(source) + target_path = Path.expand(target) + dir = Mix.Local.archive_name(target_path) |> String.to_charlist + {:ok, _} = :zip.create(String.to_charlist(target_path), + files_to_add(source_path, dir)) + :ok + end + + defp files_to_add(path, dir) do + File.cd! path, fn -> + evsn = Path.wildcard(".elixir") + ebin = Path.wildcard("ebin/*.{beam,app}") + priv = Path.wildcard("priv/**/*") + + Enum.reduce evsn ++ ebin ++ priv, [], fn(f, acc) -> + case File.read(f) do + {:ok, bin} -> + [{Path.join(dir, f) |> String.to_charlist, bin} | acc] + {:error, _} -> + acc + end + end + end + end +end diff --git a/lib/mix/lib/mix/tasks/archive.check.ex b/lib/mix/lib/mix/tasks/archive.check.ex new file mode 100644 index 00000000000..172ce2bba56 --- /dev/null +++ b/lib/mix/lib/mix/tasks/archive.check.ex @@ -0,0 +1,55 @@ +defmodule Mix.Tasks.Archive.Check do + use Mix.Task + + @moduledoc """ + Checks all archives are available. + + Mix projects can specify required archives using + the `:archives` option: + + archives: [{:foo, "~> 1.0.0"}] + + This task guarantees this option is respected. + """ + def run(_) do + archives = Mix.Project.config[:archives] || [] + + Enum.each archives, fn tuple -> + {archive, req} = parse_archive(tuple) + _ = Application.load(archive) + vsn = Application.spec(archive, :vsn) + cond do + is_nil(vsn) -> + Mix.raise "Archive \"#{archive}\" could not be found. " <> + "Please make sure the archive is installed locally." + not Version.match?(List.to_string(vsn), req) -> + Mix.raise "Archive \"#{archive}-#{vsn}\" does not match requirement #{req}. " <> + "Please update your archive version accordingly." + true -> + :ok + end + end + end + + defp parse_archive({archive, req}) when is_atom(archive) and is_binary(req) do + case Version.parse_requirement(req) do + {:ok, req} -> + {archive, req} + :error -> + Mix.raise "Invalid requirement #{req} for archive \"#{archive}\"" + end + end + + defp parse_archive(other) do + Mix.raise """ + Expected archive to be in the format: + + {app :: atom, requirement :: binary} + + got: + + #{inspect other} + + """ + end +end diff --git a/lib/mix/lib/mix/tasks/archive.ex b/lib/mix/lib/mix/tasks/archive.ex index a1f5c6e320d..51f4544998b 100644 --- a/lib/mix/lib/mix/tasks/archive.ex +++ b/lib/mix/lib/mix/tasks/archive.ex @@ -1,66 +1,35 @@ defmodule Mix.Tasks.Archive do use Mix.Task - @shortdoc "Archive this project into a .ez file" + @shortdoc "Lists installed archives" @moduledoc """ - Packages the current project (though not its dependencies) into a - zip file according to the specification of the - [Erlang Archive Format](http://www.erlang.org/doc/man/code.html). + Lists all installed archives. - Archives are meant to bundle small projects, usually installed - locally. - - The file will be created in the current directory (which is expected - to be the project root), unless an argument -o is provided with the file name. - - ## Command line options - - * `-o` - specify output file name. - If there is a mix.exs, defaults to app-vsn.ez. - - * `-i` - specify the input directory to archive. - If there is a mix.exs, defaults to the current application build. - - * `--no-compile` - - skip compilation. Only applies to projects. + Archives are typically installed at `~/.mix/archives` + although the installation path can be customized by + setting the `MIX_ARCHIVES` environment variable. + Since archives are specific to Elixir versions, it is + expected from build tools to swap the `MIX_ARCHIVES` + variable to different locations based on a particular + Elixir installation. """ + @spec run(OptionParser.argv) :: :ok + def run(_) do + Mix.Local.path_for(:archive) + |> Path.join("*") + |> Path.wildcard() + |> Enum.map(&Path.basename/1) + |> print() + end - def run(args) do - {opts, _, _} = OptionParser.parse(args, aliases: [o: :output, i: :input], - switches: [force: :boolean, no_compile: :boolean]) - - project = Mix.Project.get - - if project && !opts[:no_compile] do - Mix.Task.run :compile, args - end - - source = cond do - input = opts[:input] -> - input - project -> - Mix.Project.app_path - true -> - Mix.raise "Cannot create archive without input directory, " <> - "please pass -i as an option" - end - - target = cond do - output = opts[:output] -> - output - app = Mix.Project.config[:app] -> - Mix.Archive.name(app, Mix.Project.config[:version]) - true -> - Mix.raise "Cannot create archive without a name, " <> - "please pass -o as an option" - end - - unless File.dir?(source) do - Mix.raise "Expected archive source #{inspect source} to be a directory" - end + defp print([]) do + Mix.shell.info "No archives currently installed." + end - Mix.Archive.create(source, target) + defp print(items) do + Enum.each items, fn item -> Mix.shell.info ["* ", item] end + Mix.shell.info "Archives installed at: #{Mix.Local.path_for(:archive)}" end end diff --git a/lib/mix/lib/mix/tasks/archive.install.ex b/lib/mix/lib/mix/tasks/archive.install.ex new file mode 100644 index 00000000000..9882a8ff73a --- /dev/null +++ b/lib/mix/lib/mix/tasks/archive.install.ex @@ -0,0 +1,137 @@ +defmodule Mix.Tasks.Archive.Install do + use Mix.Task + + @shortdoc "Installs an archive locally" + + @moduledoc """ + Installs an archive locally. + + If no argument is supplied but there is an archive in the project's + root directory (created with `mix archive.build`), then the archive + will be installed locally. For example: + + mix do archive.build, archive.install + + If an argument is provided, it should be a local path or a URL to a + prebuilt archive, a Git repository, a GitHub repository, or a Hex + package. + + mix archive.install archive.ez + mix archive.install path/to/archive.ez + mix archive.install https://example.com/my_archive.ez + mix archive.install git https://path/to/git/repo + mix archive.install git https://path/to/git/repo branch git_branch + mix archive.install git https://path/to/git/repo tag git_tag + mix archive.install git https://path/to/git/repo ref git_ref + mix archive.install github user/project + mix archive.install github user/project branch git_branch + mix archive.install github user/project tag git_tag + mix archive.install github user/project ref git_ref + mix archive.install hex hex_package + mix archive.install hex hex_package 1.2.3 + + After installation, the tasks in the archive are available locally: + + mix some_task + + Note that installing via Git/GitHub/Hex fetches the source of the archive + and builds it, while using URL/local path fetches a pre-built archive. + + ## Command line options + + * `--sha512` - checks the archive matches the given SHA-512 checksum. Only + applies to installations via URL or local path + + * `--force` - forces installation without a shell prompt; primarily + intended for automation in build systems like Make + + * `--submodules` - fetches repository submodules before building archive from + Git or GitHub + + * `--app` - specifies a custom app name to be used for building the archive + from Git, GitHub, or Hex + + """ + + @behaviour Mix.Local.Installer + + @switches [force: :boolean, sha512: :string, submodules: :boolean, app: :string] + @spec run(OptionParser.argv) :: boolean + def run(argv) do + Mix.Local.Installer.install(__MODULE__, argv, @switches) + end + + # Callbacks + + def check_install_spec({local_or_url, path_or_url}, _opts) when + local_or_url in [:local, :url] do + if Path.extname(path_or_url) == ".ez" do + :ok + else + {:error, "Expected a local file path or a file URL ending in \".ez\"."} + end + end + + def check_install_spec(_, _), do: :ok + + def find_previous_versions(src) do + app = + src + |> Mix.Local.archive_name + |> String.split("-") + |> List.first + + if app do + archives(app) ++ archives(app <> "-*") + else + [] + end + end + + def install(basename, contents, previous) do + ez_path = Path.join(Mix.Local.path_for(:archive), basename) + dir_dest = resolve_destination(ez_path, contents) + + remove_previous_versions(previous) + + File.mkdir_p!(dir_dest) + {:ok, _} = :zip.extract(contents, [cwd: dir_dest]) + Mix.shell.info [:green, "* creating ", :reset, Path.relative_to_cwd(dir_dest)] + + ebin = Mix.Local.archive_ebin(dir_dest) + Mix.Local.check_elixir_version_in_ebin(ebin) + true = Code.append_path(ebin) + :ok + end + + def build(_install_spec, _opts) do + Mix.Task.run("archive.build", []) + Mix.Local.name_for(:archive, Mix.Project.config) + end + + ### Private helpers + + defp resolve_destination(ez_path, contents) do + with {:ok, [_comment, zip_first_file | _]} <- :zip.list_dir(contents), + {:zip_file, zip_first_path, _, _, _, _} = zip_first_file, + [zip_root_dir | _] = Path.split(zip_first_path) do + + Path.join(Path.dirname(ez_path), zip_root_dir) + else + _ -> + Mix.raise "Installation failed: invalid archive file" + end + end + + defp archives(name) do + # TODO: We can remove the .ez extension on Elixir 2.0 since we always unzip since 1.3 + Mix.Local.path_for(:archive) + |> Path.join(name <> "{,*.ez}") + |> Path.wildcard + end + + defp remove_previous_versions([]), + do: :ok + defp remove_previous_versions(previous), + do: Enum.each(previous, &File.rm_rf!/1) +end diff --git a/lib/mix/lib/mix/tasks/archive.uninstall.ex b/lib/mix/lib/mix/tasks/archive.uninstall.ex new file mode 100644 index 00000000000..3f885ad81a3 --- /dev/null +++ b/lib/mix/lib/mix/tasks/archive.uninstall.ex @@ -0,0 +1,16 @@ +defmodule Mix.Tasks.Archive.Uninstall do + use Mix.Task + + @shortdoc "Uninstalls archives" + + @moduledoc """ + Uninstalls local archives. + + mix archive.uninstall archive.ez + + """ + @spec run(OptionParser.argv) :: :ok + def run(argv) do + Mix.Local.Installer.uninstall(Mix.Local.path_for(:archive), "archive", argv) + end +end diff --git a/lib/mix/lib/mix/tasks/clean.ex b/lib/mix/lib/mix/tasks/clean.ex index c3ea1313202..5cad0d1de35 100644 --- a/lib/mix/lib/mix/tasks/clean.ex +++ b/lib/mix/lib/mix/tasks/clean.ex @@ -1,44 +1,54 @@ defmodule Mix.Tasks.Clean do use Mix.Task - @shortdoc "Clean generated application files" + @shortdoc "Deletes generated application files" @recursive true @moduledoc """ - Clean generated application files. + Deletes generated application files. - This command delete all build artifacts for the current application - accross all environments. Dependencies are only cleaned up if the - `--all` option is given. - - ## Command line options - - * `--all` - clean everything, including builds and dependencies + This command deletes all build artifacts for the current project. + Dependencies' sources and build files are cleaned only if the + `--deps` option is given. + By default this task works across all environments, unless `--only` + is given. """ + @switches [deps: :boolean, only: :string] + + @spec run(OptionParser.argv) :: :ok def run(args) do - {opts, _, _} = OptionParser.parse(args) + Mix.Project.get! + loadpaths!() - for compiler <- Mix.Tasks.Compile.compilers() do - module = Mix.Task.get!("compile.#{compiler}") - if function_exported?(module, :clean, 0) do - module.clean - end - end + {opts, _, _} = OptionParser.parse(args, switches: @switches) + + _ = for compiler <- [:protocols] ++ Mix.Tasks.Compile.compilers(), + module = Mix.Task.get("compile.#{compiler}"), + function_exported?(module, :clean, 0), + do: module.clean - if opts[:all] do - Mix.Task.run("deps.clean", args) - Mix.Project.build_path - |> Path.dirname - |> File.rm_rf + build = Mix.Project.build_path + |> Path.dirname + |> Path.join("#{opts[:only] || :*}") + + if opts[:deps] do + build + |> Path.wildcard + |> Enum.each(&File.rm_rf/1) else - config = Mix.Project.config - Mix.Project.build_path(config) - |> Path.dirname - |> Path.join("*/lib/#{config[:app]}") + build + |> Path.join("lib/#{Mix.Project.config[:app]}") |> Path.wildcard |> Enum.each(&File.rm_rf/1) end end + + # Loadpaths without checks because compilers may be defined in deps. + defp loadpaths! do + Mix.Task.run "loadpaths", ["--no-elixir-version-check", "--no-deps-check", "--no-archives-check"] + Mix.Task.reenable "loadpaths" + Mix.Task.reenable "deps.loadpaths" + end end diff --git a/lib/mix/lib/mix/tasks/cmd.ex b/lib/mix/lib/mix/tasks/cmd.ex index d332a7c619b..7e755188e74 100644 --- a/lib/mix/lib/mix/tasks/cmd.ex +++ b/lib/mix/lib/mix/tasks/cmd.ex @@ -12,14 +12,30 @@ defmodule Mix.Tasks.Cmd do mix cmd echo pwd - Aborts when the first command exits with status different - than zero. + You can limit which apps the cmd runs in by passing the app names + before the cmd using --app: + + mix cmd --app app1 --app app2 echo pwd + + Aborts when a command exits with a non-zero status. """ + + @spec run(OptionParser.argv) :: :ok def run(args) do - Mix.shell.print_app - case Mix.shell.cmd(Enum.join(args, " ")) do - 0 -> :ok - s -> exit(s) + {args, apps} = parse_apps(args, []) + if apps == [] or Mix.Project.config[:app] in apps do + case Mix.shell.cmd(Enum.join(args, " ")) do + 0 -> :ok + status -> exit(status) + end + end + end + + defp parse_apps(args, apps) do + case args do + ["--app", app | tail] -> + parse_apps(tail, [String.to_atom(app) | apps]) + args -> {args, apps} end end end diff --git a/lib/mix/lib/mix/tasks/compile.all.ex b/lib/mix/lib/mix/tasks/compile.all.ex new file mode 100644 index 00000000000..7a283c0c4c6 --- /dev/null +++ b/lib/mix/lib/mix/tasks/compile.all.ex @@ -0,0 +1,44 @@ +defmodule Mix.Tasks.Compile.All do + use Mix.Task + + @moduledoc false + @recursive true + + # This is an internal task used by "mix compile" which + # is meant to be recursive and be invoked for each child + # project. + + def run(args) do + Mix.Project.get! + + # Build the project structure so we can write down compiled files. + Mix.Project.build_structure + + with_logger_app fn -> + res = + Enum.map(Mix.Tasks.Compile.compilers(), fn(compiler) -> + Mix.Task.run("compile.#{compiler}", args) + end) + + true = Code.prepend_path(Mix.Project.compile_path) + if :ok in res, do: :ok, else: :noop + end + end + + defp with_logger_app(fun) do + app = Keyword.fetch!(Mix.Project.config, :app) + logger? = Process.whereis(Logger) + logger_config_app = Application.get_env(:logger, :compile_time_application) + + try do + if logger? do + Logger.configure([compile_time_application: app]) + end + fun.() + after + if logger? do + Logger.configure([compile_time_application: logger_config_app]) + end + end + end +end diff --git a/lib/mix/lib/mix/tasks/compile.app.ex b/lib/mix/lib/mix/tasks/compile.app.ex index 97656c7da5b..b9ea7ceda58 100644 --- a/lib/mix/lib/mix/tasks/compile.app.ex +++ b/lib/mix/lib/mix/tasks/compile.app.ex @@ -8,99 +8,145 @@ defmodule Mix.Tasks.Compile.App do An `.app` file is a file containing Erlang terms that defines your application. Mix automatically generates this file based on - your `mix.exs` configuration. You can learn more about OTP - applications by seeing the documentation for the `Application` - module. + your `mix.exs` configuration. - In order to generate the `.app` file, Mix expects your application + In order to generate the `.app` file, Mix expects your project to have both `:app` and `:version` keys. Furthermore, you can - configure the generated application by defining an `application` - function in your `mix.exs` with the following options: + configure the generated application by defining an `application/0` + function in your `mix.exs` with the following options. - * `:applications` - all applications your application depends - on at runtime. For example, if your application depends on - Erlang's `:crypto`, it needs to be added to this list. Most - of your dependencies must be added as well (unless they're - a development or test dependency). Mix and other tools use this - list in order to properly boot your application dependencies - before starting the application itself. + The most commonly used options are: + + * `:extra_applications` - a list of Erlang/Elixir applications + that you want started before your application. For example, + Elixir's `:logger` or Erlang's `:crypto`. Mix guarantees + that any application given here and all of your runtime + dependencies are started before your application starts. * `:registered` - the name of all registered processes in the application. If your application defines a local GenServer with name `MyServer`, it is recommended to add `MyServer` - to this list. It is mostly useful to detect conflicts in + to this list. It is most useful in detecting conflicts between applications that register the same names. - * `:mod` - specify a module to invoke when the application - is started, it must be in the format `{Mod, args}` where - args is often an empty list. The module specified here must - implement the callbacks defined by the `Application` - module. - * `:env` - default values for the application environment. The application environment is one of the most common ways - to configure applications. + to configure applications. See the `Application` module for + mechanisms to read and write to the application environment. - Let's see an example `application` function: + For example: def application do - [mod: {MyApp, []}, - env: [default: :value], - applications: [:crypto]] + [extra_applications: [:logger, :crypto], + env: [key: :value], + registered: [MyServer]] end - Besides the options above, `.app` files also expects other - options like `:modules` and `:vsn`, but those are automatically - filled by Mix. + Other options include: + + * `:applications` - all applications your application depends + on at runtime. By default, this list is automatically inferred + from your dependencies. Mix and other tools use the application + list in order to start your dependencies before starting the + application itself. + + * `:mod` - specifies a module to invoke when the application + is started. It must be in the format `{Mod, args}` where + args is often an empty list. The module specified must + implement the callbacks defined by the `Application` + module. + + * `:start_phases` - specifies a list of phases and their arguments + to be called after the application is started. See the "Phases" + section below. + + * `:included_applications` - specifies a list of applications + that will be included in the application. It is the responsibility of + the primary application to start the supervision tree of all included + applications, as only the primary application will be started. A process + in an included application considers itself belonging to the + primary application. + + Besides the options above, `.app` files also expect other options like + `:modules` and `:vsn`, but these are automatically added by Mix. ## Command line options * `--force` - forces compilation regardless of modification times + ## Phases + + Applications provide a start phases mechanism which will be called, + in order, for the application and all included applications. If a phase + is not defined for an included application, that application is skipped. + + Let's see an example `MyApp.application/0` function: + + def application do + [start_phases: [init: [], go: [], finish: []], + included_applications: [:my_included_app]] + end + + And an example `:my_included_app` defines on its `mix.exs` the function: + + def application do + [mod: {MyIncludedApp, []}, + start_phases: [go: []]] + end + + In this example, the order that the application callbacks are called in is: + + Application.start(MyApp) + MyApp.start(:normal, []) + MyApp.start_phase(:init, :normal, []) + MyApp.start_phase(:go, :normal, []) + MyIncludedApp.start_phase(:go, :normal, []) + MyApp.start_phase(:finish, :normal, []) + """ + @spec run(OptionParser.argv) :: :ok | :noop def run(args) do {opts, _, _} = OptionParser.parse(args, switches: [force: :boolean]) project = Mix.Project.get! config = Mix.Project.config - app = Keyword.fetch!(config, :app) - version = Keyword.fetch!(config, :version) + app = Keyword.get(config, :app) + version = Keyword.get(config, :version) validate_app(app) validate_version(version) path = Mix.Project.compile_path - mods = modules_from(Path.wildcard('#{path}/*.beam')) |> Enum.sort + mods = modules_from(Path.wildcard("#{path}/*.beam")) |> Enum.sort target = Path.join(path, "#{app}.app") sources = Mix.Project.config_files if opts[:force] || Mix.Utils.stale?(sources, [target]) || modules_changed?(mods, target) do best_guess = [ - vsn: to_char_list(version), + description: to_charlist(config[:description] || app), modules: mods, - applications: [] + registered: [], + vsn: to_charlist(version), ] properties = if function_exported?(project, :application, 0) do - Keyword.merge(best_guess, project.application) + project_application = project.application() + unless Keyword.keyword?(project_application) do + Mix.raise "Application configuration returned from application/0 should be a keyword list" + end + Keyword.merge(best_guess, project_application) else best_guess end - # Ensure we always prepend the standard application dependencies - properties = Keyword.update!(properties, :applications, fn apps -> - [:kernel, :stdlib, :elixir] ++ apps - end) - - properties = ensure_correct_properties(app, config, properties) - contents = {:application, app, properties} + properties = ensure_correct_properties(properties, config) + contents = :io_lib.format("~p.~n", [{:application, app, properties}]) - Mix.Project.build_structure(config) - File.write!(target, :io_lib.format("~p.", [contents])) - - Mix.shell.info "Generated #{app}.app" + Mix.Project.ensure_structure() + File.write!(target, IO.chardata_to_string(contents)) + Mix.shell.info "Generated #{app} app" :ok else :noop @@ -109,71 +155,102 @@ defmodule Mix.Tasks.Compile.App do defp modules_changed?(mods, target) do case :file.consult(target) do - {:ok, [ {:application, _app, properties} ]} -> - properties[:registered] == mods + {:ok, [{:application, _app, properties}]} -> + properties[:modules] != mods _ -> false end end defp validate_app(app) when is_atom(app), do: :ok - defp validate_app(_), do: raise(Mix.Error, message: "Expected :app to be an atom") + defp validate_app(app) do + ensure_present(:app, app) + Mix.raise("Expected :app to be an atom, got: #{inspect(app)}") + end defp validate_version(version) do + ensure_present(:version, version) unless is_binary(version) and match?({:ok, _}, Version.parse(version)) do - raise(Mix.Error, message: "Expected :version to be a SemVer version") + Mix.raise("Expected :version to be a SemVer version, got: #{inspect(version)}") end end + defp ensure_present(name, nil) do + Mix.raise("Please ensure mix.exs file has the #{inspect(name)} in the project definition") + end + defp ensure_present(_name, _val), do: :ok + defp modules_from(beams) do Enum.map beams, &(&1 |> Path.basename |> Path.rootname(".beam") |> String.to_atom) end - defp ensure_correct_properties(app, config, properties) do + defp language_app(config) do + case Keyword.fetch(config, :language) do + {:ok, :elixir} -> [:elixir] + {:ok, :erlang} -> [] + :error -> [:elixir] + end + end + + defp ensure_correct_properties(properties, config) do properties - |> Keyword.put_new(:description, to_char_list(config[:description] || app)) - |> Keyword.put_new(:registered, []) - |> validate_properties + |> validate_properties! + |> Keyword.put_new_lazy(:applications, fn -> apps_from_prod_non_optional_deps(properties) end) + |> Keyword.update!(:applications, fn apps -> normalize_apps(apps, properties, config) end) end - defp validate_properties(properties) do + defp validate_properties!(properties) do Enum.each properties, fn {:description, value} -> - unless is_list(value), do: - invalid "Application description (:description) is not a character list (got #{inspect value})" + unless is_list(value) do + Mix.raise "Application description (:description) is not a character list, got: #{inspect value}" + end {:id, value} -> - unless is_list(value), do: - invalid "Application id (:id) is not a character list (got #{inspect value} instead)" + unless is_list(value) do + Mix.raise "Application id (:id) is not a character list, got: #{inspect value}" + end {:vsn, value} -> - unless is_list(value), do: - invalid "Application vsn (:vsn) is not a character list (got #{inspect value} instead)" + unless is_list(value) do + Mix.raise "Application vsn (:vsn) is not a character list, got: #{inspect value}" + end {:maxT, value} -> - unless value == :infinity or is_integer(value), do: - invalid "Application maximum time (:maxT) is not an integer or :infinity (got #{inspect value} instead)" + unless value == :infinity or is_integer(value) do + Mix.raise "Application maximum time (:maxT) is not an integer or :infinity, got: #{inspect value}" + end {:modules, value} -> - unless is_list(value) and Enum.all?(value, &is_atom(&1)), do: - invalid "Application modules (:modules) should be a list of atoms (got #{inspect value} instead)" + unless is_list(value) and Enum.all?(value, &is_atom(&1)) do + Mix.raise "Application modules (:modules) should be a list of atoms, got: #{inspect value}" + end {:registered, value} -> - unless is_list(value) and Enum.all?(value, &is_atom(&1)), do: - invalid "Application registered processes (:registered) should be a list of atoms (got #{inspect value} instead)" + unless is_list(value) and Enum.all?(value, &is_atom(&1)) do + Mix.raise "Application registered processes (:registered) should be a list of atoms, got: #{inspect value}" + end {:included_applications, value} -> - unless is_list(value) and Enum.all?(value, &is_atom(&1)), do: - invalid "Application included applications (:included_applications) should be a list of atoms (got #{inspect value} instead)" + unless is_list(value) and Enum.all?(value, &is_atom(&1)) do + Mix.raise "Application included applications (:included_applications) should be a list of atoms, got: #{inspect value}" + end + {:extra_applications, value} -> + unless is_list(value) and Enum.all?(value, &is_atom(&1)) do + Mix.raise "Application extra applications (:extra_applications) should be a list of atoms, got: #{inspect value}" + end {:applications, value} -> - unless is_list(value) and Enum.all?(value, &is_atom(&1)), do: - invalid "Application dependencies (:applications) should be a list of atoms (got #{inspect value} instead)" + unless is_list(value) and Enum.all?(value, &is_atom(&1)) do + Mix.raise "Application applications (:applications) should be a list of atoms, got: #{inspect value}" + end {:env, value} -> - unless Keyword.keyword?(value), do: - invalid "Application dependencies (:env) should be a keyword list (got #{inspect value} instead)" + unless Keyword.keyword?(value) do + Mix.raise "Application environment (:env) should be a keyword list, got: #{inspect value}" + end {:start_phases, value} -> - unless Keyword.keyword?(value), do: - invalid "Application start phases (:start_phases) should be a keyword list (got #{inspect value} instead)" + unless Keyword.keyword?(value) do + Mix.raise "Application start phases (:start_phases) should be a keyword list, got: #{inspect value}" + end {:mod, []} -> :ok {:mod, {module, _args}} when is_atom(module) -> :ok {:mod, value} -> - invalid "Application callback module (:mod) should be either [] or {module, start_args} (got #{inspect value} instead)" + Mix.raise "Application callback module (:mod) should be either [] or {module, start_args}, got: #{inspect value}" _ -> :ok end @@ -181,7 +258,19 @@ defmodule Mix.Tasks.Compile.App do properties end - defp invalid(message) do - Mix.raise message + defp apps_from_prod_non_optional_deps(properties) do + included_applications = Keyword.get(properties, :included_applications, []) + + for %{app: app, opts: opts, top_level: true} <- Mix.Dep.cached, + Keyword.get(opts, :app, true), + Keyword.get(opts, :runtime, true), + not Keyword.get(opts, :optional, false), + app not in included_applications, + do: app + end + + defp normalize_apps(apps, properties, config) do + extra = Keyword.get(properties, :extra_applications, []) + Enum.uniq([:kernel, :stdlib] ++ language_app(config) ++ extra ++ apps) end end diff --git a/lib/mix/lib/mix/tasks/compile.elixir.ex b/lib/mix/lib/mix/tasks/compile.elixir.ex index f36d0c4448c..d812360a349 100644 --- a/lib/mix/lib/mix/tasks/compile.elixir.ex +++ b/lib/mix/lib/mix/tasks/compile.elixir.ex @@ -7,23 +7,24 @@ defmodule Mix.Tasks.Compile.Elixir do @moduledoc """ Compiles Elixir source files. - Elixir is smart enough to recompile only files that changed + Elixir is smart enough to recompile only files that have changed and their dependencies. This means if `lib/a.ex` is invoking a function defined over `lib/b.ex`, whenever `lib/b.ex` changes, `lib/a.ex` is also recompiled. - Note it is important to recompile a file dependencies because - often there are compilation time dependencies in between them. + Note it is important to recompile a file's dependencies as + there are often compile time dependencies between them. ## Command line options - * `--force` - forces compilation regardless of modification times - * `--no-docs` - do not attach documentation to compiled modules - * `--no-debug-info` - do not attach debug info to compiled modules - * `--ignore-module-conflict` - - do not emit warnings if a module was previously defined - * `--warnings-as-errors` - - treat warnings as errors and return a non-zero exit code + * `--force` - forces compilation regardless of modification times + * `--docs` (`--no-docs`) - attaches (or not) documentation to compiled modules + * `--debug-info` (`--no-debug-info`) - attaches (or not) debug info to compiled modules + * `--ignore-module-conflict` - does not emit warnings if a module was previously defined + * `--warnings-as-errors` - treats warnings in the current project as errors and + return a non-zero exit code + * `--long-compilation-threshold N` - sets the "long compilation" threshold + (in seconds) to `N` (see the docs for `Kernel.ParallelCompiler.files/2`) ## Configuration @@ -33,68 +34,52 @@ defmodule Mix.Tasks.Compile.Elixir do * `:elixirc_options` - compilation options that apply to Elixir's compiler, they are: `:ignore_module_conflict`, `:docs` and `:debug_info`. By default, uses the same - behaviour as Elixir. + defaults as `elixirc` and they can always be overridden from + the command line according to the options above. """ @switches [force: :boolean, docs: :boolean, warnings_as_errors: :boolean, - ignore_module_conflict: :boolean, debug_info: :boolean] + ignore_module_conflict: :boolean, debug_info: :boolean, + verbose: :boolean, long_compilation_threshold: :integer] @doc """ Runs this task. """ + @spec run(OptionParser.argv) :: :ok | :noop def run(args) do {opts, _, _} = OptionParser.parse(args, switches: @switches) project = Mix.Project.config - srcs = project[:elixirc_paths] dest = Mix.Project.compile_path(project) + srcs = project[:elixirc_paths] + + unless is_list(srcs) do + Mix.raise ":elixirc_paths should be a list of paths, got: #{inspect(srcs)}" + end manifest = manifest() configs = Mix.Project.config_files ++ Mix.Tasks.Compile.Erlang.manifests + force = opts[:force] || Mix.Utils.stale?(configs, [manifest]) - force = opts[:force] || local_deps_changed?(manifest) - || Mix.Utils.stale?(configs, [manifest]) - - result = Mix.Compilers.Elixir.compile(manifest, srcs, [:ex], dest, force, fn -> - Code.prepend_path(dest) - set_compiler_opts(project, opts, []) - end) - - # The Mix.Dep.Lock keeps all the project dependencies. Since Elixir - # is a dependency itself, we need to touch the lock so the current - # Elixir version, used to compile the files above, is properly stored. - unless result == :noop, do: Mix.Dep.Lock.touch - result + opts = Keyword.merge(project[:elixirc_options] || [], opts) + case Mix.Compilers.Elixir.compile(manifest, srcs, dest, [:ex], force, opts) do + {[], []} -> :noop + {_, _} -> :ok + end end @doc """ Returns Elixir manifests. """ - def manifests, do: [manifest] + def manifests, do: [manifest()] defp manifest, do: Path.join(Mix.Project.manifest_path, @manifest) @doc """ Cleans up compilation artifacts. """ def clean do - Mix.Compilers.Elixir.clean(manifest()) - end - - defp set_compiler_opts(project, opts, extra) do - opts = Dict.take(opts, Code.available_compiler_options) - opts = Keyword.merge(project[:elixirc_options] || [], opts) - Code.compiler_options Keyword.merge(opts, extra) - end - - defp local_deps_changed?(manifest) do - manifest = Path.absname(manifest) - - Enum.any?(Mix.Dep.children([]), fn(dep) -> - not dep.scm.fetchable? and Mix.Dep.in_dependency(dep, fn(_) -> - files = Mix.Project.config_files ++ Mix.Tasks.Compile.manifests - Mix.Utils.stale?(files, [manifest]) - end) - end) + dest = Mix.Project.compile_path + Mix.Compilers.Elixir.clean(manifest(), dest) end end diff --git a/lib/mix/lib/mix/tasks/compile.erlang.ex b/lib/mix/lib/mix/tasks/compile.erlang.ex index 0e2da67b4cd..804f3c61ba9 100644 --- a/lib/mix/lib/mix/tasks/compile.erlang.ex +++ b/lib/mix/lib/mix/tasks/compile.erlang.ex @@ -6,7 +6,7 @@ defmodule Mix.Tasks.Compile.Erlang do @manifest ".compile.erlang" @moduledoc """ - Compile Erlang source files. + Compiles Erlang source files. When this task runs, it will first check the modification times of all files to be compiled and if they haven't been @@ -36,41 +36,64 @@ defmodule Mix.Tasks.Compile.Erlang do * `:erlc_include_path` - directory for adding include files. Defaults to `"include"`. - * `:erlc_options` - compilation options that apply to Erlang's compiler. - `:debug_info` is enabled by default. + * `:erlc_options` - compilation options that apply to Erlang's + compiler. Defaults to `[:debug_info]`. + + For a complete list of options, + see [`:compile.file/2`](http://www.erlang.org/doc/man/compile.html#file-2). + + For example, to configure the `erlc_options` for your Erlang project you + may run: + + erlc_options: [:debug_info, {:i, 'path/to/include'}] - There are many available options here: - http://www.erlang.org/doc/man/compile.html#file-2 """ @doc """ Runs this task. """ + @spec run(OptionParser.argv) :: :ok | :noop def run(args) do {opts, _, _} = OptionParser.parse(args, switches: [force: :boolean]) - project = Mix.Project.config source_paths = project[:erlc_paths] + Mix.Compilers.Erlang.assert_valid_erlc_paths(source_paths) + files = Mix.Utils.extract_files(source_paths, [:erl]) + do_run(files, opts, project, source_paths) + end + + defp do_run([], _, _, _), do: :noop + defp do_run(files, opts, project, source_paths) do include_path = to_erl_file project[:erlc_include_path] compile_path = to_erl_file Mix.Project.compile_path(project) - files = Mix.Utils.extract_files(source_paths, [:erl]) erlc_options = project[:erlc_options] || [] + unless is_list(erlc_options) do + Mix.raise ":erlc_options should be a list of options, got: #{inspect(erlc_options)}" + end erlc_options = erlc_options ++ [{:outdir, compile_path}, {:i, include_path}, :report] erlc_options = Enum.map erlc_options, fn - {kind, dir} when kind in [:i, :outdit] -> + {kind, dir} when kind in [:i, :outdir] -> {kind, to_erl_file(dir)} opt -> opt end + compile_path = Path.relative_to(compile_path, File.cwd!) + tuples = files |> scan_sources(include_path, source_paths) |> sort_dependencies |> Enum.map(&annotate_target(&1, compile_path, opts[:force])) - Mix.Compilers.Erlang.compile(manifest(), tuples, fn + Mix.Compilers.Erlang.compile(manifest(), tuples, opts, fn input, _output -> + # We're purging the module because a previous compiler (e.g. Phoenix) + # might have already loaded the previous version of it. + module = Path.basename(input, ".erl") |> String.to_atom + :code.purge(module) + :code.delete(module) + file = to_erl_file(Path.rootname(input, ".erl")) :compile.file(file, erlc_options) end) @@ -79,7 +102,7 @@ defmodule Mix.Tasks.Compile.Erlang do @doc """ Returns Erlang manifests. """ - def manifests, do: [manifest] + def manifests, do: [manifest()] defp manifest, do: Path.join(Mix.Project.manifest_path, @manifest) @doc """ @@ -111,14 +134,14 @@ defmodule Mix.Tasks.Compile.Erlang do case form do {:attribute, _, :file, {include_file, _}} when file != include_file -> if File.regular?(include_file) do - %{erl | includes: [include_file|erl.includes]} + %{erl | includes: [include_file | erl.includes]} else erl end {:attribute, _, :behaviour, behaviour} -> - %{erl | behaviours: [behaviour|erl.behaviours]} + %{erl | behaviours: [behaviour | erl.behaviours]} {:attribute, _, :compile, value} -> - %{erl | compile: [value|erl.compile]} + %{erl | compile: [value | erl.compile]} _ -> erl end @@ -127,18 +150,19 @@ defmodule Mix.Tasks.Compile.Erlang do defp sort_dependencies(erls) do graph = :digraph.new - for erl <- erls do + _ = for erl <- erls do :digraph.add_vertex(graph, erl.module, erl) end - for erl <- erls do - for b <- erl.behaviours, do: :digraph.add_edge(graph, b, erl.module) - for c <- erl.compile do + _ = for erl <- erls do + _ = for b <- erl.behaviours, do: :digraph.add_edge(graph, b, erl.module) + _ = for c <- erl.compile do case c do {:parse_transform, transform} -> :digraph.add_edge(graph, transform, erl.module) _ -> :ok end end + :ok end result = @@ -153,9 +177,9 @@ defmodule Mix.Tasks.Compile.Erlang do end defp annotate_target(erl, compile_path, force) do - beam = Path.join(compile_path, "#{erl.module}#{:code.objfile_extension}") + beam = Path.join(compile_path, "#{erl.module}.beam") - if force || Mix.Utils.stale?([erl.file|erl.includes], [beam]) do + if force || Mix.Utils.stale?([erl.file | erl.includes], [beam]) do {:stale, erl.file, beam} else {:ok, erl.file, beam} @@ -163,6 +187,6 @@ defmodule Mix.Tasks.Compile.Erlang do end defp module_from_artifact(artifact) do - artifact |> Path.basename |> Path.rootname + artifact |> Path.basename |> Path.rootname |> String.to_atom end end diff --git a/lib/mix/lib/mix/tasks/compile.ex b/lib/mix/lib/mix/tasks/compile.ex index d3faeaac587..838add24949 100644 --- a/lib/mix/lib/mix/tasks/compile.ex +++ b/lib/mix/lib/mix/tasks/compile.ex @@ -1,29 +1,57 @@ defmodule Mix.Tasks.Compile do use Mix.Task - @shortdoc "Compile source files" - @recursive true + @shortdoc "Compiles source files" @moduledoc """ A meta task that compiles source files. - It simply runs the compilers registered in your project. At - the end of compilation it ensures load paths are set. + It simply runs the compilers registered in your project. ## Configuration - * `:compilers` - compilers to run, defaults to: - `[:leex, :yeec, :erlang, :elixir, :app]` + * `:compilers` - compilers to run, defaults to `Mix.compilers/0`, + which are `[:yecc, :leex, :erlang, :elixir, :xref, :app]`. + + * `:consolidate_protocols` - when `true`, runs protocol + consolidation via the `compile.protocols` task. The default + value is `true`. + + * `:build_embedded` - when `true`, embeds all code and priv + content in the `_build` directory instead of using symlinks. + + * `:build_path` - the directory where build artifacts + should be written to. This option is intended only for + child apps within a larger umbrella application so that + each child app can use the common `_build` directory of + the parent umbrella. In a non-umbrella context, configuring + this has undesirable side-effects (such as skipping some + compiler checks) and should be avoided. + + ## Compilers + + To see documentation for each specific compiler, you must + invoke `help` directly for the compiler command: + + mix help compile.elixir + mix help compile.erlang + + You can get a list of all compilers by running: + + mix compile --list ## Command line options - * `--list` - list all enabled compilers - * `--no-deps-check` - skip checking of dependencies - * `--force` - force compilation + * `--list` - lists all enabled compilers + * `--no-archives-check` - skips checking of archives + * `--no-deps-check` - skips checking of dependencies + * `--force` - forces compilation """ + @spec run(OptionParser.argv) :: :ok | :noop def run(["--list"]) do - Mix.Task.load_all + loadpaths!() + _ = Mix.Task.load_all shell = Mix.shell modules = Mix.Task.all_modules @@ -45,34 +73,41 @@ defmodule Mix.Tasks.Compile do shell.info format('mix ~-#{max}s # ~ts', [task, doc]) end - shell.info "\nEnabled compilers: #{Enum.join compilers(), ", "}" + compilers = compilers() ++ if(consolidate_protocols?(), do: [:protocols], else: []) + shell.info "\nEnabled compilers: #{Enum.join compilers, ", "}" + :ok end def run(args) do - # --no-deps is used only internally. It has not purpose - # from Mix.CLI because the CLI itself already loads - # dependencies. - unless "--no-deps" in args do - Mix.Task.run "deps.loadpaths", args + Mix.Project.get! + Mix.Task.run "loadpaths", args + + res = Mix.Task.run "compile.all", args + res = if :ok in List.wrap(res), do: :ok, else: :noop + + if res == :ok && consolidate_protocols?() do + Mix.Task.run "compile.protocols", args end - Mix.Task.run "loadpaths", args + res + end - res = - Enum.map(compilers(), fn(compiler) -> - List.wrap Mix.Task.run("compile.#{compiler}", args) - end) + # Loadpaths without checks because compilers may be defined in deps. + defp loadpaths! do + Mix.Task.run "loadpaths", ["--no-elixir-version-check", "--no-deps-check", "--no-archives-check"] + Mix.Task.reenable "loadpaths" + Mix.Task.reenable "deps.loadpaths" + end - Code.prepend_path Mix.Project.compile_path - if Enum.any?(res, &(:ok in &1)), do: :ok, else: :noop + defp consolidate_protocols? do + Mix.Project.config[:consolidate_protocols] end @doc """ Returns all compilers. """ def compilers do - Mix.Project.config[:compilers] || - [:yecc, :leex, :erlang, :elixir, :app] + Mix.Project.config[:compilers] || Mix.compilers end @doc """ @@ -80,8 +115,8 @@ defmodule Mix.Tasks.Compile do """ def manifests do Enum.flat_map(compilers(), fn(compiler) -> - module = Mix.Task.get!("compile.#{compiler}") - if function_exported?(module, :manifests, 0) do + module = Mix.Task.get("compile.#{compiler}") + if module && function_exported?(module, :manifests, 0) do module.manifests else [] @@ -94,6 +129,6 @@ defmodule Mix.Tasks.Compile do end defp first_line(doc) do - String.split(doc, "\n", parts: 2) |> hd |> String.strip |> String.rstrip(?.) + String.split(doc, "\n", parts: 2) |> hd |> String.trim |> String.trim_trailing(".") end end diff --git a/lib/mix/lib/mix/tasks/compile.leex.ex b/lib/mix/lib/mix/tasks/compile.leex.ex index 8813ea1680c..7598043df0a 100644 --- a/lib/mix/lib/mix/tasks/compile.leex.ex +++ b/lib/mix/lib/mix/tasks/compile.leex.ex @@ -5,8 +5,13 @@ defmodule Mix.Tasks.Compile.Leex do @recursive true @manifest ".compile.leex" + # These options can't be controlled with :leex_options. + @forced_opts [report: true, + return_errors: false, + return_warnings: false] + @moduledoc """ - Compile Leex source files. + Compiles Leex source files. When this task runs, it will check the modification time of every file, and if it has changed, the file will be compiled. Files will be @@ -23,25 +28,37 @@ defmodule Mix.Tasks.Compile.Leex do * `:erlc_paths` - directories to find source files. Defaults to `["src"]`. * `:leex_options` - compilation options that apply - to Leex's compiler. There are many available options - here: http://www.erlang.org/doc/man/leex.html#file-2. + to Leex's compiler. + + For a complete list of options, + see [`:leex.file/2`](http://www.erlang.org/doc/man/leex.html#file-2). + Note that the `:report`, `:return_errors`, and `:return_warnings` options + are overridden by this compiler, thus setting them has no effect. """ @doc """ Runs this task. """ + @spec run(OptionParser.argv) :: :ok | :noop def run(args) do - {opts, _, _} = OptionParser.parse(args, switches: [force: :boolean]) + {opts, _, _} = OptionParser.parse(args, switches: [force: :boolean, verbose: :boolean]) + + project = Mix.Project.config - project = Mix.Project.config source_paths = project[:erlc_paths] - mappings = Enum.zip(source_paths, source_paths) - options = project[:leex_options] || [] + Mix.Compilers.Erlang.assert_valid_erlc_paths(source_paths) + mappings = Enum.zip(source_paths, source_paths) + + options = project[:leex_options] || [] + unless is_list(options) do + Mix.raise ":leex_options should be a list of options, got: #{inspect(options)}" + end - Erlang.compile(manifest(), mappings, :xrl, :erl, opts[:force], fn + Erlang.compile(manifest(), mappings, :xrl, :erl, opts, fn input, output -> - options = options ++ [scannerfile: Erlang.to_erl_file(output), report: true] + Erlang.ensure_application!(:parsetools, input) + options = options ++ @forced_opts ++ [scannerfile: Erlang.to_erl_file(output)] :leex.file(Erlang.to_erl_file(input), options) end) end @@ -49,7 +66,7 @@ defmodule Mix.Tasks.Compile.Leex do @doc """ Returns Leex manifests. """ - def manifests, do: [manifest] + def manifests, do: [manifest()] defp manifest, do: Path.join(Mix.Project.manifest_path, @manifest) @doc """ diff --git a/lib/mix/lib/mix/tasks/compile.protocols.ex b/lib/mix/lib/mix/tasks/compile.protocols.ex index a86b2041c24..2e9b8d0531a 100644 --- a/lib/mix/lib/mix/tasks/compile.protocols.ex +++ b/lib/mix/lib/mix/tasks/compile.protocols.ex @@ -1,73 +1,206 @@ defmodule Mix.Tasks.Compile.Protocols do use Mix.Task - @recursive true - - @shortdoc "Consolidates all protocols in all paths" + @manifest ".compile.protocols" + @manifest_vsn :v2 @moduledoc ~S""" Consolidates all protocols in all paths. - This module consolidates all protocols in the code path - and output the new binary files to the given directory - (defaults to "consolidated"). + This task is automatically invoked unless the project + disables the `:consolidate_protocols` option in their + configuration. + + ## Consolidation + + Protocol consolidation is useful in production when no + dynamic code loading will happen, effectively optimizing + protocol dispatches by not accounting for code loading. - A new directory will be created with the consolidated - protocol versions in the build directory for the given - environment. Simply add it to your loadpath to make use - of it: + This task consolidates all protocols in the code path + and outputs the new binary files to the given directory + (defaults to "_build/MIX_ENV/consolidated"). - $ elixir -pa _build/dev/consolidated -S mix run + In case you are manually compiling protocols or building + releases, you need to take the generated protocols into + account. This can be done with: + + $ elixir -pa _build/MIX_ENV/consolidated -S mix run You can verify a protocol is consolidated by checking its attributes: - $ iex -pa _build/dev/consolidated -S mix run + $ iex -pa _build/MIX_ENV/consolidated -S mix run iex> Protocol.consolidated?(Enumerable) true """ - + @spec run(OptionParser.argv) :: :ok def run(args) do + config = Mix.Project.config Mix.Task.run "compile", args - {opts, _, _} = OptionParser.parse(args, switches: [output: :string], aliases: [o: :output]) + {opts, _, _} = OptionParser.parse(args, switches: [force: :boolean, verbose: :boolean]) - paths = filter_otp(:code.get_path, :code.lib_dir) - paths - |> Protocol.extract_protocols - |> consolidate(paths, opts[:output] || Path.join(Mix.Project.build_path, "consolidated")) + output = Mix.Project.consolidation_path(config) + manifest = Path.join(output, @manifest) - :ok + protocols_and_impls = protocols_and_impls(config) + + cond do + opts[:force] || Mix.Utils.stale?(Mix.Project.config_files(), [manifest]) -> + clean() + paths = consolidation_paths() + paths + |> Protocol.extract_protocols + |> consolidate(paths, output, manifest, protocols_and_impls, opts) + + protocols_and_impls -> + manifest + |> diff_manifest(protocols_and_impls, output) + |> consolidate(consolidation_paths(), output, manifest, protocols_and_impls, opts) + + true -> + :noop + end + end + + @doc """ + Cleans up consolidated protocols. + """ + def clean do + File.rm_rf(Mix.Project.consolidation_path) + end + + defp protocols_and_impls(config) do + deps = for(%{scm: scm, opts: opts} <- Mix.Dep.cached(), + not scm.fetchable?, + do: opts[:build]) + + app = + if Mix.Project.umbrella?(config) do + [] + else + [Mix.Project.app_path(config)] + end + + protocols_and_impls = + for path <- app ++ deps do + manifest_path = Path.join(path, ".compile.elixir") + compile_path = Path.join(path, "ebin") + Mix.Compilers.Elixir.protocols_and_impls(manifest_path, compile_path) + end + + Enum.concat(protocols_and_impls) + end + + defp consolidation_paths do + filter_otp(:code.get_path, :code.lib_dir) end defp filter_otp(paths, otp) do Enum.filter(paths, &(not :lists.prefix(&1, otp))) end - defp consolidate(protocols, paths, output) do + defp consolidate([], _paths, output, manifest, metadata, _opts) do + File.mkdir_p!(output) + write_manifest(manifest, metadata) + :noop + end + + defp consolidate(protocols, paths, output, manifest, metadata, opts) do File.mkdir_p!(output) - for protocol <- protocols do - impls = Protocol.extract_impls(protocol, paths) - maybe_reload(protocol) - {:ok, binary} = Protocol.consolidate(protocol, impls) - File.write!(Path.join(output, "#{protocol}.beam"), binary) - Mix.shell.info "Consolidated #{inspect protocol}" - end + protocols + |> Enum.uniq() + |> Enum.map(&Task.async(fn -> consolidate(&1, paths, output, opts) end)) + |> Enum.map(&Task.await(&1, 30_000)) - relative = Path.relative_to_cwd(output) - Mix.shell.info "Consolidated protocols written to #{relative}" + write_manifest(manifest, metadata) + :ok end - defp maybe_reload(module) do - case :code.which(module) do - :non_existing -> - module - file -> - unless Path.extname(file) == ".beam" do - :code.purge(module) - :code.delete(module) + defp consolidate(protocol, paths, output, opts) do + impls = Protocol.extract_impls(protocol, paths) + reload(protocol) + case Protocol.consolidate(protocol, impls) do + {:ok, binary} -> + File.write!(Path.join(output, "#{protocol}.beam"), binary) + if opts[:verbose] do + Mix.shell.info "Consolidated #{inspect protocol}" end + + # If we remove a dependency and we have implemented one of its + # protocols locally, we will mark the protocol as needing to be + # reconsolidated when the implementation is removed even though + # the protocol no longer exists. Although most times removing a + # dependency will trigger a full recompilation, such won't happen + # in umbrella apps with shared build. + {:error, :no_beam_info} -> + remove_consolidated(protocol, output) + if opts[:verbose] do + Mix.shell.info "Unavailable #{inspect protocol}" + end + end + end + + defp reload(module) do + :code.purge(module) + :code.delete(module) + end + + defp read_manifest(manifest, output) do + try do + [@manifest_vsn | metadata] = + manifest |> File.read! |> :erlang.binary_to_term() + metadata + rescue + _ -> + # If there is no manifest or it is out of date, remove old files + File.rm_rf(output) + [] end end + + defp write_manifest(manifest, metadata) do + manifest_data = + [@manifest_vsn | metadata] + |> :erlang.term_to_binary([:compressed]) + + File.write!(manifest, manifest_data) + end + + defp diff_manifest(manifest, new_metadata, output) do + modified = Mix.Utils.last_modified(manifest) + old_metadata = read_manifest(manifest, output) + + protocols = + for {protocol, :protocol, beam} <- new_metadata, + Mix.Utils.last_modified(beam) > modified, + remove_consolidated(protocol, output), + do: {protocol, true}, + into: %{} + + protocols = + Enum.reduce(new_metadata -- old_metadata, protocols, fn + {_, {:impl, protocol}, _beam}, protocols -> + Map.put(protocols, protocol, true) + {protocol, :protocol, _beam}, protocols -> + Map.put(protocols, protocol, true) + end) + + protocols = + Enum.reduce(old_metadata -- new_metadata, protocols, fn + {_, {:impl, protocol}, _beam}, protocols -> + Map.put(protocols, protocol, true) + {protocol, :protocol, _beam}, protocols -> + remove_consolidated(protocol, output) + protocols + end) + + Map.keys(protocols) + end + + defp remove_consolidated(protocol, output) do + File.rm Path.join(output, "#{protocol}.beam") + end end diff --git a/lib/mix/lib/mix/tasks/compile.xref.ex b/lib/mix/lib/mix/tasks/compile.xref.ex new file mode 100644 index 00000000000..6e7a77e2f0c --- /dev/null +++ b/lib/mix/lib/mix/tasks/compile.xref.ex @@ -0,0 +1,76 @@ +defmodule Mix.Tasks.Compile.Xref do + use Mix.Task + alias Mix.Tasks.Compile.Elixir, as: E + + @recursive true + @manifest ".compile.xref" + + @moduledoc """ + Performs remote dispatch checking. + + When this task runs, it will check the modification time of the `:elixir` + compiler manifest. If it has changed, `mix xref` will be run to check remote + dispatches. You can force checking regardless of modification time by passing + the `--force` option. + + ## Command line options + + * `--force` - forces checking regardless of modification time + * `--warnings-as-errors` - treats warnings as errors and returns a non-zero exit code + + """ + + @doc """ + Runs this task. + """ + @spec run(OptionParser.argv) :: :ok | :noop + def run(args) do + {opts, _, _} = + OptionParser.parse(args, switches: [force: :boolean, warnings_as_errors: :boolean]) + + if needs_xref?(opts) do + if should_exit?(run_xref(), opts) do + exit({:shutdown, 1}) + end + write_manifest() + end + + :noop + end + + defp run_xref do + Mix.Task.run("xref", ["warnings"]) + end + + defp needs_xref?(opts) do + !!opts[:force] or Mix.Utils.stale?(E.manifests(), manifests()) + end + + defp should_exit?(:error, opts), + do: warnings_as_errors(opts) == true + defp should_exit?(_, _opts), + do: false + + defp warnings_as_errors(opts) do + Keyword.get_lazy(opts, :warnings_as_errors, fn -> + Mix.Project.config()[:elixirc_options][:warnings_as_errors] + end) + end + + @doc """ + Returns xref manifests. + """ + def manifests, do: [manifest()] + defp manifest, do: Path.join(Mix.Project.manifest_path, @manifest) + + defp write_manifest do + File.touch(manifest()) + end + + @doc """ + Cleans up xref manifest. + """ + def clean do + File.rm manifest() + end +end diff --git a/lib/mix/lib/mix/tasks/compile.yecc.ex b/lib/mix/lib/mix/tasks/compile.yecc.ex index 99714a6975f..1f2a1250392 100644 --- a/lib/mix/lib/mix/tasks/compile.yecc.ex +++ b/lib/mix/lib/mix/tasks/compile.yecc.ex @@ -5,8 +5,13 @@ defmodule Mix.Tasks.Compile.Yecc do @recursive true @manifest ".compile.yecc" + # These options can't be controlled with :yecc_options. + @forced_opts [report: true, + return_errors: false, + return_warnings: false] + @moduledoc """ - Compile Yecc source files. + Compiles Yecc source files. When this task runs, it will check the modification time of every file, and if it has changed, the file will be compiled. Files will be @@ -23,25 +28,37 @@ defmodule Mix.Tasks.Compile.Yecc do * `:erlc_paths` - directories to find source files. Defaults to `["src"]`. * `:yecc_options` - compilation options that apply - to Yecc's compiler. There are many other available - options here: http://www.erlang.org/doc/man/yecc.html#file-1. + to Yecc's compiler. + + For a complete list of options, + see [`:yecc.file/1`](http://www.erlang.org/doc/man/yecc.html#file-1). + Note that the `:report`, `:return_errors`, and `:return_warnings` options + are overridden by this compiler, thus setting them has no effect. """ @doc """ Runs this task. """ + @spec run(OptionParser.argv) :: :ok | :noop def run(args) do {opts, _, _} = OptionParser.parse(args, switches: [force: :boolean]) - project = Mix.Project.config + project = Mix.Project.config + source_paths = project[:erlc_paths] - mappings = Enum.zip(source_paths, source_paths) - options = project[:yecc_options] || [] + Mix.Compilers.Erlang.assert_valid_erlc_paths(source_paths) + mappings = Enum.zip(source_paths, source_paths) + + options = project[:yecc_options] || [] + unless is_list(options) do + Mix.raise ":yecc_options should be a list of options, got: #{inspect(options)}" + end - Erlang.compile(manifest(), mappings, :yrl, :erl, opts[:force], fn + Erlang.compile(manifest(), mappings, :yrl, :erl, opts, fn input, output -> - options = options ++ [parserfile: Erlang.to_erl_file(output), report: true] + Erlang.ensure_application!(:parsetools, input) + options = options ++ @forced_opts ++ [parserfile: Erlang.to_erl_file(output)] :yecc.file(Erlang.to_erl_file(input), options) end) end @@ -49,7 +66,7 @@ defmodule Mix.Tasks.Compile.Yecc do @doc """ Returns Yecc manifests. """ - def manifests, do: [manifest] + def manifests, do: [manifest()] defp manifest, do: Path.join(Mix.Project.manifest_path, @manifest) @doc """ diff --git a/lib/mix/lib/mix/tasks/deps.check.ex b/lib/mix/lib/mix/tasks/deps.check.ex deleted file mode 100644 index f60c5d50736..00000000000 --- a/lib/mix/lib/mix/tasks/deps.check.ex +++ /dev/null @@ -1,103 +0,0 @@ -defmodule Mix.Tasks.Deps.Check do - use Mix.Task - - import Mix.Dep, only: [loaded: 1, loaded_by_name: 2, format_dep: 1, - format_status: 1, check_lock: 2, ok?: 1] - - @moduledoc """ - Checks if all dependencies are valid and if not, abort. - Prints the invalid dependencies' status before aborting. - - This task is not shown in `mix help` but it is part - of the `mix` public API and can be depended on. - - ## Command line options - - * `--no-compile` - do not compile dependencies - - """ - def run(args) do - {opts, _, _} = OptionParser.parse(args) - lock = Mix.Dep.Lock.read - all = Enum.map(loaded(env: Mix.env), &check_lock(&1, lock)) - - prune_deps(all) - {not_ok, compile} = partition_deps(all, [], []) - - cond do - not_ok != [] -> - show_not_ok(not_ok) - compile == [] or opts[:no_compile] -> - :ok - true -> - Mix.Tasks.Deps.Compile.compile(compile, opts) - show_not_ok compile - |> Enum.map(& &1.app) - |> loaded_by_name(env: Mix.env) - |> Enum.filter(&(not ok?(&1))) - end - end - - defp partition_deps([dep|deps], not_ok, compile) do - cond do - compile?(dep) -> partition_deps(deps, not_ok, [dep|compile]) - ok?(dep) and local?(dep) -> partition_deps(deps, not_ok, [dep|compile]) - ok?(dep) -> partition_deps(deps, not_ok, compile) - true -> partition_deps(deps, [dep|not_ok], compile) - end - end - - defp partition_deps([], not_ok, compile) do - {Enum.reverse(not_ok), Enum.reverse(compile)} - end - - defp local?(dep) do - not dep.scm.fetchable? and dep.opts[:from_umbrella] != true - end - - defp compile?(%Mix.Dep{status: {:elixirlock, _}}), do: true - defp compile?(%Mix.Dep{status: {:noappfile, _}}), do: true - defp compile?(%Mix.Dep{status: :compile}), do: true - defp compile?(%Mix.Dep{}), do: false - - # If the build is per environment, we should be able to look - # at all dependencies and remove the builds that no longer - # have a dependency defined for them. - # - # Notice we require the build_path to be nil. If the build_path - # is not nil, it means it was set by a parent application and - # the parent application should be the one to do the pruning. - defp prune_deps(all) do - config = Mix.Project.config - - if nil?(config[:build_path]) && config[:build_per_environment] do - paths = Mix.Project.build_path(config) - |> Path.join("lib/*/ebin") - |> Path.wildcard - |> List.delete(not Mix.Project.umbrella? && Mix.Project.compile_path(config)) - - to_prune = Enum.reduce(all, paths, &(&2 -- Mix.Dep.load_paths(&1))) - - Enum.map(to_prune, fn path -> - Code.delete_path(path) - File.rm_rf!(path |> Path.dirname) - end) - end - end - - defp show_not_ok([]) do - :ok - end - - defp show_not_ok(deps) do - shell = Mix.shell - shell.error "Unchecked dependencies for environment #{Mix.env}:" - - Enum.each deps, fn(dep) -> - shell.error "* #{format_dep(dep)}" - shell.error " #{format_status dep}" - end - - Mix.raise "Can't continue due to errors on dependencies" - end -end diff --git a/lib/mix/lib/mix/tasks/deps.clean.ex b/lib/mix/lib/mix/tasks/deps.clean.ex index 27e147dbbcc..18182d0329b 100644 --- a/lib/mix/lib/mix/tasks/deps.clean.ex +++ b/lib/mix/lib/mix/tasks/deps.clean.ex @@ -1,59 +1,114 @@ defmodule Mix.Tasks.Deps.Clean do use Mix.Task - @shortdoc "Remove the given dependencies' files" + @shortdoc "Deletes the given dependencies' files" @moduledoc """ - Remove the given dependencies' files. + Deletes the given dependencies' files, including build artifacts and fetched + sources. - Since this is a destructive action, cleaning of all dependencies - can only happen by passing the `--all` command line option. It - also works accross all environments, unless `--only` is given. + Since this is a destructive action, cleaning of dependencies + only occurs when passing arguments/options: - Clean does not unlock the dependencies, unless `--unlock` is given. + * `dep1 dep2` - the names of dependencies to be deleted separated by a space + * `--unlock` - also unlocks the deleted dependencies + * `--build` - deletes only compiled files (keeps source files) + * `--all` - deletes all dependencies + * `--unused` - deletes only unused dependencies + (i.e. dependencies no longer mentioned in `mix.exs`) + + By default this task works across all environments, + unless `--only` is given which will clean all dependencies + leaving only the ones for chosen environment. """ - @switches [unlock: :boolean, all: :boolean, only: :string] + @switches [unlock: :boolean, all: :boolean, only: :string, unused: :boolean, + build: :boolean] + + @spec run(OptionParser.argv) :: :ok def run(args) do - Mix.Project.get! # Require the project to be available - {opts, args, _} = OptionParser.parse(args, switches: @switches) - - cond do - opts[:all] -> - # Clean all deps by default unless --only is given - clean_opts = if only = opts[:only], do: [env: :"#{only}"], else: [] - apps = Mix.Dep.loaded(clean_opts) |> Enum.map(&(&1.app)) - do_clean apps, opts - args != [] -> - do_clean args, opts + Mix.Project.get! + {opts, apps, _} = OptionParser.parse(args, switches: @switches) + + build_path = + Mix.Project.build_path + |> Path.dirname + |> Path.join("#{opts[:only] || :*}/lib") + deps_path = Mix.Project.deps_path + + loaded_opts = if only = opts[:only], do: [env: :"#{only}"], else: [] + loaded_deps = Mix.Dep.loaded(loaded_opts) + + apps_to_clean = cond do + opts[:all] -> checked_deps(build_path, deps_path) + opts[:unused] -> checked_deps(build_path, deps_path) |> filter_loaded(loaded_deps) + apps != [] -> apps true -> - Mix.raise "mix deps.clean expects dependencies as arguments or " <> - "the --all option to clean all dependencies" + Mix.raise "\"mix deps.clean\" expects dependencies as arguments or " <> + "a flag indicating which dependencies to clean. " <> + "The --all option will clean all dependencies while " <> + "the --unused option cleans unused dependencies" + end + + do_clean(apps_to_clean, loaded_deps, build_path, deps_path, opts[:build]) + + if opts[:unlock] do + Mix.Task.run "deps.unlock", args + else + :ok + end + end + + defp checked_deps(build_path, deps_path) do + for root <- [deps_path, build_path], + path <- Path.wildcard(Path.join(root, "*")), + File.dir?(path) do + Path.basename(path) end + |> Enum.uniq() + |> List.delete(to_string(Mix.Project.config[:app])) + end + + defp filter_loaded(apps, deps) do + apps -- Enum.map(deps, &Atom.to_string(&1.app)) + end + + defp maybe_warn_for_invalid_path([], dependency) do + Mix.shell.error "warning: the dependency #{dependency} is not present in the build directory" + [] + end + defp maybe_warn_for_invalid_path(paths, _dependency) do + paths end - defp do_clean(apps, opts) do + defp do_clean(apps, deps, build_path, deps_path, build_only?) do shell = Mix.shell - build = Mix.Project.build_path - |> Path.dirname - |> Path.join("#{opts[:only] || :*}/lib") - deps = Mix.Project.deps_path - Enum.each apps, fn(app) -> + local = + for %{scm: scm, app: app} <- deps, + not scm.fetchable?, + do: Atom.to_string(app) + + Enum.each apps, fn app -> shell.info "* Cleaning #{app}" - build + # Remove everything from the build directory of dependencies + build_path |> Path.join(to_string(app)) |> Path.wildcard + |> maybe_warn_for_invalid_path(app) |> Enum.each(&File.rm_rf!/1) - deps - |> Path.join(to_string(app)) - |> File.rm_rf! - end - - if opts[:unlock] do - Mix.Task.run "deps.unlock", apps + # Remove everything from the source directory of dependencies. + # Skip this step if --build option is specified or if + # the dependency is local, i.e., referenced using :path. + if build_only? || app in local do + :do_not_delete_source + else + deps_path + |> Path.join(to_string(app)) + |> File.rm_rf! + end end end end diff --git a/lib/mix/lib/mix/tasks/deps.compile.ex b/lib/mix/lib/mix/tasks/deps.compile.ex index 7a2c965be00..df337d15b8d 100644 --- a/lib/mix/lib/mix/tasks/deps.compile.ex +++ b/lib/mix/lib/mix/tasks/deps.compile.ex @@ -1,13 +1,13 @@ defmodule Mix.Tasks.Deps.Compile do use Mix.Task - @shortdoc "Compile dependencies" + @shortdoc "Compiles dependencies" @moduledoc """ - Compile dependencies. + Compiles dependencies. - By default, compile all dependencies. A list of dependencies can - be given to force the compilation of specific dependencies. + By default, compile all dependencies. A list of dependencies + can be given to compile multiple dependencies in order. This task attempts to detect if the project contains one of the following files and act accordingly: @@ -15,145 +15,231 @@ defmodule Mix.Tasks.Deps.Compile do * `mix.exs` - invokes `mix compile` * `rebar.config` - invokes `rebar compile` * `Makefile.win` - invokes `nmake /F Makefile.win` (only on Windows) - * `Makefile` - invokes `make` (except on Windows) + * `Makefile` - invokes `gmake` on FreeBSD and OpenBSD, invokes `make` on any other OS (except on Windows) The compilation can be customized by passing a `compile` option in the dependency: {:some_dependency, "0.1.0", compile: "command to compile"} + If a list of dependencies is given, Mix will attempt to compile + them as is. For example, if project `a` depends on `b`, calling + `mix deps.compile a` will compile `a` even if `b` is out of + date. This is to allow parts of the dependency tree to be + recompiled without propagating those changes upstream. To ensure + `b` is included in the compilation step, pass `--include-children`. """ import Mix.Dep, only: [loaded: 1, available?: 1, loaded_by_name: 2, - format_dep: 1, make?: 1, mix?: 1, rebar?: 1] + make?: 1, mix?: 1] + @switches [include_children: :boolean, force: :boolean] + + @spec run(OptionParser.argv) :: :ok def run(args) do - Mix.Project.get! # Require the project to be available + unless "--no-archives-check" in args do + Mix.Task.run "archive.check", args + end + + Mix.Project.get! - case OptionParser.parse(args) do + case OptionParser.parse(args, switches: @switches) do {opts, [], _} -> - compile(Enum.filter(loaded(env: Mix.env), &compilable?/1), opts) + # Because this command may be invoked explicitly with + # deps.compile, we simply try to compile any available + # dependency. + compile(Enum.filter(loaded(env: Mix.env), &available?/1), opts) {opts, tail, _} -> - compile(loaded_by_name(tail, env: Mix.env), opts) + compile(loaded_by_name(tail, [env: Mix.env] ++ opts), opts) end end @doc false - def compile(deps, _opts) do + def compile(deps, options \\ []) do shell = Mix.shell config = Mix.Project.deps_config + Mix.Task.run "deps.precompile" + compiled = Enum.map(deps, fn %Mix.Dep{app: app, status: status, opts: opts, scm: scm} = dep -> check_unavailable!(app, status) - compiled = cond do - not nil?(opts[:compile]) -> - do_compile dep + maybe_clean(app, options) + + compiled? = cond do + not is_nil(opts[:compile]) -> + do_compile dep, config mix?(dep) -> - do_mix dep - rebar?(dep) -> - do_rebar dep, config + do_mix dep, config make?(dep) -> - do_make dep + do_make dep, config + dep.manager == :rebar -> + do_rebar dep, config + dep.manager == :rebar3 -> + do_rebar3 dep, config true -> - shell.error "Could not compile #{app}, no mix.exs, rebar.config or Makefile " <> - "(pass :compile as an option to customize compilation, set it to false to do nothing)" + shell.error "Could not compile #{inspect app}, no \"mix.exs\", \"rebar.config\" or \"Makefile\" " <> + "(pass :compile as an option to customize compilation, set it to \"false\" to do nothing)" + false end unless mix?(dep), do: build_structure(dep, config) - if scm.fetchable?, do: Mix.Dep.Lock.touch(opts[:build]) - compiled + # We should touch fetchable dependencies even if they + # did not compile otherwise they will always be marked + # as stale, even when there is nothing to do. + fetchable? = touch_fetchable(scm, opts[:build]) + compiled? and fetchable? end) - if Enum.any?(compiled), do: Mix.Dep.Lock.touch + if true in compiled, do: Mix.Dep.Lock.touch_manifest, else: :ok + end + + defp maybe_clean(app, opts) do + if Keyword.get(opts, :force, false) do + File.rm_rf! Path.join [Mix.Project.build_path, "lib", Atom.to_string(app)] + end end - # All available dependencies can be compiled - # except for umbrella applications. - defp compilable?(%Mix.Dep{extra: extra} = dep) do - available?(dep) and !extra[:umbrella?] + defp touch_fetchable(scm, path) do + if scm.fetchable? do + File.mkdir_p!(path) + File.touch!(Path.join(path, ".compile.fetch")) + true + else + false + end end defp check_unavailable!(app, {:unavailable, _}) do - Mix.raise "Cannot compile dependency #{app} because " <> - "it isn't available, run `mix deps.get` first" + Mix.raise "Cannot compile dependency #{inspect app} because " <> + "it isn't available, run \"mix deps.get\" first" end defp check_unavailable!(_, _) do :ok end - defp do_mix(dep) do + defp do_mix(dep, _config) do Mix.Dep.in_dependency dep, fn _ -> + if req = old_elixir_req(Mix.Project.config) do + Mix.shell.error "warning: the dependency #{inspect dep.app} requires Elixir #{inspect req} " <> + "but you are running on v#{System.version}" + end + + # Force recompilation on compile status + if dep.status == :compile do + Mix.Dep.Lock.touch_manifest + end + try do - res = Mix.Task.run("compile", ["--no-deps", "--no-elixir-version-check"]) + res = Mix.Task.run("compile", ["--no-deps", "--no-archives-check", + "--no-elixir-version-check", "--no-warnings-as-errors"]) :ok in List.wrap(res) catch kind, reason -> stacktrace = System.stacktrace app = dep.app - Mix.shell.error "could not compile dependency #{app}, mix compile failed. " <> - "You can recompile this dependency with `mix deps.compile #{app}` or " <> - "update it with `mix deps.update #{app}`" + Mix.shell.error "could not compile dependency #{inspect app}, \"mix compile\" failed. " <> + "You can recompile this dependency with \"mix deps.compile #{app}\", update it " <> + "with \"mix deps.update #{app}\" or clean it with \"mix deps.clean #{app}\"" :erlang.raise(kind, reason, stacktrace) end end end - defp do_rebar(%Mix.Dep{app: app} = dep, config) do - do_command dep, rebar_cmd(app), "compile skip_deps=true deps_dir=#{inspect config[:deps_path]}" + defp do_rebar(dep, config) do + lib_path = Path.join(config[:env_path], "lib") + cmd = "#{rebar_cmd(dep)} compile skip_deps=true deps_dir=#{inspect lib_path}" + do_command dep, config, cmd, false + end + + defp do_rebar3(%Mix.Dep{opts: opts} = dep, config) do + dep_path = opts[:build] + config_path = Path.join(dep_path, "mix.rebar.config") + lib_path = Path.join(config[:env_path], "lib/*/ebin") + + env = [{"REBAR_CONFIG", config_path}, {"TERM", "dumb"}] + cmd = "#{rebar_cmd(dep)} bare compile --paths #{inspect lib_path}" + + File.mkdir_p!(dep_path) + File.write!(config_path, rebar_config(dep)) + do_command dep, config, cmd, false, env + end + + defp rebar_config(dep) do + dep.extra + |> Mix.Rebar.dependency_config + |> Mix.Rebar.serialize_config end - defp rebar_cmd(app) do - Mix.Rebar.rebar_cmd || handle_rebar_not_found(app) + defp rebar_cmd(%Mix.Dep{manager: manager} = dep) do + Mix.Rebar.rebar_cmd(manager) || handle_rebar_not_found(dep) end - defp handle_rebar_not_found(app) do + defp handle_rebar_not_found(%Mix.Dep{app: app, manager: manager}) do shell = Mix.shell - shell.info "Could not find rebar, which is needed to build dependency #{inspect app}" - shell.info "I can install a local copy which is just used by mix" + shell.info "Could not find \"#{manager}\", which is needed to build dependency #{inspect app}" + shell.info "I can install a local copy which is just used by Mix" - unless shell.yes?("Shall I install rebar?") do - Mix.raise "Could not find rebar to compile " <> - "dependency #{app}, please ensure rebar is available" + unless shell.yes?("Shall I install #{manager}? (if running non-interactively, use \"mix local.rebar --force\")") do + Mix.raise "Could not find \"#{manager}\" to compile " <> + "dependency #{inspect app}, please ensure \"#{manager}\" is available" end - Mix.Tasks.Local.Rebar.run [] - Mix.Rebar.local_rebar_cmd || Mix.raise "rebar installation failed" + (Mix.Tasks.Local.Rebar.run([]) && Mix.Rebar.local_rebar_cmd(manager)) || + Mix.raise "\"#{manager}\" installation failed" end - defp do_make(dep) do - if match?({:win32, _}, :os.type) and File.regular?("Makefile.win") do - do_command(dep, "nmake /F Makefile.win") + defp do_make(dep, config) do + command = make_command(dep) + do_command(dep, config, command, true, [{"IS_DEP", "1"}]) + end + + defp make_command(dep) do + makefile_win? = makefile_win?(dep) + + command = + case :os.type do + {:win32, _} when makefile_win? -> + "nmake /F Makefile.win" + {:unix, type} when type in [:freebsd, :openbsd] -> + "gmake" + _ -> + "make" + end + + if erlang_mk?(dep) do + "#{command} clean && #{command}" else - do_command(dep, "make") + command end end - defp do_compile(%Mix.Dep{app: app, opts: opts} = dep) do + defp do_compile(%Mix.Dep{opts: opts} = dep, config) do if command = opts[:compile] do - Mix.shell.info("#{app}: #{command}") - do_command(dep, command) + do_command(dep, config, command, true) else false end end - defp do_command(%Mix.Dep{app: app, opts: opts}, command, extra \\ "") do - File.cd! opts[:dest], fn -> - if Mix.shell.cmd("#{command} #{extra}") != 0 do - Mix.raise "Could not compile dependency #{app}, #{command} command failed. " <> - "If you want to recompile this dependency, please run: mix deps.compile #{app}" + defp do_command(%Mix.Dep{app: app, opts: opts}, config, command, print_app?, env \\ []) do + File.cd!(opts[:dest], fn -> + env = [{"ERL_LIBS", Path.join(config[:env_path], "lib")}] ++ env + if Mix.shell.cmd(command, print_app: print_app?, env: env) != 0 do + Mix.raise "Could not compile dependency #{inspect app}, \"#{command}\" command failed. " <> + "You can recompile this dependency with \"mix deps.compile #{app}\", update it " <> + "with \"mix deps.update #{app}\" or clean it with \"mix deps.clean #{app}\"" end - end + end) true end defp build_structure(%Mix.Dep{opts: opts} = dep, config) do build_path = Path.dirname(opts[:build]) - Enum.each Mix.Dep.source_paths(dep), fn source -> - app = Path.join(build_path, Path.basename(source)) + Enum.each Mix.Dep.source_paths(dep), fn {source, base} -> + app = Path.join(build_path, base) build_structure(source, app, config) Code.prepend_path(Path.join(app, "ebin")) end @@ -165,4 +251,19 @@ defmodule Mix.Tasks.Deps.Compile do Mix.Project.build_structure(config, symlink_ebin: true) end end + + defp old_elixir_req(config) do + req = config[:elixir] + if req && not Version.match?(System.version, req) do + req + end + end + + defp erlang_mk?(%Mix.Dep{opts: opts}) do + File.regular?(Path.join(opts[:dest], "erlang.mk")) + end + + defp makefile_win?(%Mix.Dep{opts: opts}) do + File.regular?(Path.join(opts[:dest], "Makefile.win")) + end end diff --git a/lib/mix/lib/mix/tasks/deps.ex b/lib/mix/lib/mix/tasks/deps.ex index aebed477bab..ca3ec6fa940 100644 --- a/lib/mix/lib/mix/tasks/deps.ex +++ b/lib/mix/lib/mix/tasks/deps.ex @@ -1,12 +1,12 @@ defmodule Mix.Tasks.Deps do use Mix.Task - import Mix.Dep, only: [loaded: 1, format_dep: 1, format_status: 1, check_lock: 2] + import Mix.Dep, only: [loaded: 1, format_dep: 1, format_status: 1, check_lock: 1] - @shortdoc "List dependencies and their status" + @shortdoc "Lists dependencies and their status" @moduledoc ~S""" - List all dependencies and their status. + Lists all dependencies and their status. Dependencies must be specified in the `mix.exs` file in one of the following formats: @@ -18,102 +18,131 @@ defmodule Mix.Tasks.Deps do Where: * app is an atom - * requirement is a version requirement or a regular expression + * requirement is a `Version` requirement or a regular expression * opts is a keyword list of options + For example: + + {:plug, ">= 0.4.0"} + {:gettext, git: "https://github.com/elixir-lang/gettext.git", tag: "0.1"} + {:local_dependency, path: "path/to/local_dependency"} + By default, dependencies are fetched using the [Hex package manager](https://hex.pm/): {:plug, ">= 0.4.0"} By specifying such dependencies, Mix will automatically install - Hex (if it wasn't previously installed and download a package - suitable to your project). + Hex (if it wasn't previously installed) and download a package + suitable to your project. - Mix also supports git and path dependencies: + Mix also supports Git and path dependencies: {:foobar, git: "https://github.com/elixir-lang/foobar.git", tag: "0.1"} {:foobar, path: "path/to/foobar"} And also in umbrella dependencies: - {:myapp, in_umbrella: true} + {:my_app, in_umbrella: true} Path and in umbrella dependencies are automatically recompiled by - the parent project whenever they change. While fetchable dependencies - like git are recompiled only when fetched/updated. + the parent project whenever they change. While fetchable dependencies, + like the ones using `:git`, are recompiled only when fetched/updated. - The dependencies versions are expected to follow Semantic Versioning - and the requirements must be specified as defined in the `Version` - module. + The dependencies' versions are expected to be formatted according to + Semantic Versioning and the requirements must be specified as defined + in the `Version` module. + + ## Options Below we provide a more detailed look into the available options. - ## Mix options + ### Dependency definition options - * `:app` - when set to false, does not read the app file for this - dependency + * `:app` - when set to `false`, does not read the app file for this + dependency. By default, the app file is read - * `:env` - the environment to run the dependency on, defaults to :prod + * `:env` - the environment (as an atom) to run the dependency on; defaults to `:prod` - * `:compile` - a command to compile the dependency, defaults to a mix, - rebar or make command + * `:compile` - a command (string) to compile the dependency; defaults to a `mix`, + `rebar` or `make` command - * `:optional` - the dependency is optional and used only to specify - requirements + * `:optional` - marks the dependency as optional. In such cases, the + current project will always include the optional dependency but any + other project that depends on the current project won't be forced to + use the optional dependency. However, if the other project includes + the optional dependency on its own, the requirements and options + specified here will also be applied. - * `:only` - the dependency will belong only to the given environments, - useful when declaring dev- or test-only dependencies + * `:only` - the dependency is made available only in the given environments, + useful when declaring dev- or test-only dependencies; by default the + dependency will be available in all environments. The value of this option + can either be a single environment (like `:dev`) or a list of environments + (like `[:dev, :test]`) - * `:override` - if set to true the dependency will override any other + * `:override` - if set to `true` the dependency will override any other definitions of itself by other dependencies - ## Git options (`:git`) - - * `:git` - the git repository URI - * `:github` - a shortcut for specifying git repos from github, uses `git:` - * `:ref` - the reference to checkout (may be a branch, a commit sha or a tag) - * `:branch` - the git branch to checkout - * `:tag` - the git tag to checkout - * `:submodules` - when true, initialize submodules for the repo - - ## Path options (`:path`) + * `:manager` - Mix can also compile Rebar, Rebar3 and makefile projects + and can fetch sub dependencies of Rebar and Rebar3 projects. Mix will + try to infer the type of project but it can be overridden with this + option by setting it to `:mix`, `:rebar3`, `:rebar` or `:make`. In case + there are conflicting definitions, the first manager in the list above + will be picked up. For example, if a dependency is found with `:rebar3` + and `:rebar` managers in different part of the trees, `:rebar3` will + be automatically picked. You can find the manager by running `mix deps` + and override it by setting the `:override` option in a top-level project. + + * `:runtime` - whether the dependency is part of runtime applications. + Defaults to `true` which automatically adds the application to the list + of apps that are started automatically and included in releases + + ### Git options (`:git`) + + * `:git` - the Git repository URI + * `:github` - a shortcut for specifying Git repos from GitHub, uses `git:` + * `:ref` - the reference to checkout (may be a branch, a commit SHA or a tag) + * `:branch` - the Git branch to checkout + * `:tag` - the Git tag to checkout + * `:submodules` - when `true`, initialize submodules for the repo + * `:sparse` - checkout a single directory inside the Git repository and use it + as your Mix dependency. Search "sparse git checkouts" for more information. + + ### Path options (`:path`) * `:path` - the path for the dependency - * `:in_umbrella` - when true, sets a path dependency pointing to + * `:in_umbrella` - when `true`, sets a path dependency pointing to "../#{app}", sharing the same environment as the current application - ## mix deps task + ## Deps task - This task lists all dependencies in the following format: + `mix deps` task lists all dependencies in the following format: - APP VERSION (SCM) + APP VERSION (SCM) (MANAGER) [locked at REF] STATUS It supports the following options: - * `--all` - check all dependencies, regardless of specified environment + * `--all` - checks all dependencies, regardless of specified environment """ + @spec run(OptionParser.argv) :: :ok def run(args) do - Mix.Project.get! # Require the project to be available + Mix.Project.get! {opts, _, _} = OptionParser.parse(args) - - if opts[:all] do - loaded_opts = [] - else - loaded_opts = [env: Mix.env] - end + loaded_opts = if opts[:all], do: [], else: [env: Mix.env] shell = Mix.shell - lock = Mix.Dep.Lock.read - Enum.each loaded(loaded_opts), fn %Mix.Dep{scm: scm} = dep -> - dep = check_lock(dep, lock) - shell.info "* #{format_dep(dep)}" + Enum.each loaded(loaded_opts), fn %Mix.Dep{scm: scm, manager: manager} = dep -> + dep = check_lock(dep) + extra = if manager, do: " (#{manager})", else: "" + + shell.info "* #{format_dep(dep)}#{extra}" if formatted = scm.format_lock(dep.opts) do shell.info " locked at #{formatted}" end + shell.info " #{format_status dep}" end end diff --git a/lib/mix/lib/mix/tasks/deps.get.ex b/lib/mix/lib/mix/tasks/deps.get.ex index 9f4fd9f8a70..2304fbb457b 100644 --- a/lib/mix/lib/mix/tasks/deps.get.ex +++ b/lib/mix/lib/mix/tasks/deps.get.ex @@ -1,33 +1,35 @@ defmodule Mix.Tasks.Deps.Get do use Mix.Task - @shortdoc "Get all out of date dependencies" + @shortdoc "Gets all out of date dependencies" @moduledoc """ - Get all out of date dependencies, i.e. dependencies + Gets all out of date dependencies, i.e. dependencies that are not available or have an invalid lock. ## Command line options - * `--quiet` - do not output verbose messages - * `--only` - only fetch dependencies for given environment + * `--only` - only fetches dependencies for given environment + * `--no-archives-check` - does not check archives before fetching deps """ + @spec run(OptionParser.argv) :: :ok def run(args) do - Mix.Project.get! # Require the project to be available - {opts, rest, _} = OptionParser.parse(args, switches: [quiet: :boolean, only: :string]) + unless "--no-archives-check" in args do + Mix.Task.run "archive.check", args + end + + Mix.Project.get! + {opts, _, _} = OptionParser.parse(args, switches: [only: :string]) # Fetch all deps by default unless --only is given fetch_opts = if only = opts[:only], do: [env: :"#{only}"], else: [] - apps = - if rest != [] do - Mix.Dep.Fetcher.by_name(rest, %{}, Mix.Dep.Lock.read, fetch_opts) - else - Mix.Dep.Fetcher.all(%{}, Mix.Dep.Lock.read, fetch_opts) - end + apps = Mix.Dep.Fetcher.all(%{}, Mix.Dep.Lock.read, fetch_opts) - if apps == [] && !opts[:quiet] do + if apps == [] do Mix.shell.info "All dependencies up to date" + else + :ok end end end diff --git a/lib/mix/lib/mix/tasks/deps.loadpaths.ex b/lib/mix/lib/mix/tasks/deps.loadpaths.ex index 46cdf72b186..a7a28c63666 100644 --- a/lib/mix/lib/mix/tasks/deps.loadpaths.ex +++ b/lib/mix/lib/mix/tasks/deps.loadpaths.ex @@ -1,26 +1,147 @@ defmodule Mix.Tasks.Deps.Loadpaths do use Mix.Task + import Mix.Dep, only: [loaded_by_name: 2, format_dep: 1, ok?: 1, + format_status: 1, check_lock: 1] + @moduledoc """ - Loads all dependencies for the current build. - This is invoked directly by `loadpaths` when - the CLI boots. + Checks and loads all dependencies along the way. + + If there is an invalid dependency, its status is printed + before aborting. + + Although this task does not show up in `mix help`, it is + part of Mix public API and can be depended on. ## Command line options - * `--no-deps-check` - skip dependency check + * `--no-deps-check` - does not check or compile deps, only load available ones + * `--no-compile` - does not compile dependencies + """ + @spec run(OptionParser.argv) :: :ok def run(args) do + all = Mix.Dep.cached() unless "--no-deps-check" in args do - Mix.Task.run "deps.check", args + deps_check(all, "--no-compile" in args) end + load_paths = + for dep <- all, path <- Mix.Dep.load_paths(dep) do + _ = Code.prepend_path(path) + path + end + + # Since MIX_NO_DEPS returns no dependencies, it would + # cause all paths to be pruned, so we never enter here. + unless System.get_env("MIX_NO_DEPS") in ~w(1 true) do + prune_deps(load_paths, "--no-deps-check" in args) + end + end + + # If the build is per environment, we should be able to look + # at all dependencies and remove the builds that no longer + # have a dependency defined for them. + # + # Notice we require the build_path to be nil. If it is not nil, + # it means the build_path is shared so we don't delete entries. + # + # We also expect env_path to be nil. If it is not nil, it means + # it was set by a parent application and the parent application + # should be the one doing the pruning. + defp prune_deps(load_paths, no_check?) do config = Mix.Project.config - Mix.Project.build_path(config) + shared_build? = + no_check? or config[:build_path] != nil or config[:build_per_environment] == false + + config + |> Mix.Project.build_path |> Path.join("lib/*/ebin") |> Path.wildcard - |> List.delete(not Mix.Project.umbrella? && Mix.Project.compile_path(config)) - |> Enum.each(&Code.prepend_path/1) + |> List.delete(config[:app] && Mix.Project.compile_path(config)) + |> Kernel.--(load_paths) + |> Enum.each(&prune_path(&1, shared_build?)) + end + + defp prune_path(path, shared_build?) do + _ = Code.delete_path(path) + + unless shared_build? do + path |> Path.dirname |> File.rm_rf! + end + end + + defp deps_check(all, no_compile?) do + all = Enum.map(all, &check_lock/1) + + {not_ok, compile} = partition(all, [], []) + + cond do + not_ok != [] -> + show_not_ok!(not_ok) + compile == [] or no_compile? -> + :ok + true -> + Mix.Tasks.Deps.Compile.compile(compile) + compile + |> Enum.map(& &1.app) + |> loaded_by_name(env: Mix.env) + |> Enum.filter(&(not ok?(&1))) + |> show_not_ok! + end + end + + defp partition([dep | deps], not_ok, compile) do + cond do + compilable?(dep) -> + if from_umbrella?(dep) do + partition(deps, not_ok, compile) + else + partition(deps, not_ok, [dep | compile]) + end + ok?(dep) -> + partition(deps, not_ok, compile) + true -> + partition(deps, [dep | not_ok], compile) + end + end + + defp partition([], not_ok, compile) do + {Enum.reverse(not_ok), Enum.reverse(compile)} + end + + # Those are compiled by umbrella. + defp from_umbrella?(dep) do + dep.opts[:from_umbrella] + end + + # Every local dependency (i.e. that are not fetchable) + # are automatically recompiled if they are ok. + defp local?(dep) do + not dep.scm.fetchable? + end + + # Can the dependency be compiled automatically without user intervention? + defp compilable?(%Mix.Dep{status: {:elixirlock, _}}), do: true + defp compilable?(%Mix.Dep{status: {:noappfile, _}}), do: true + defp compilable?(%Mix.Dep{status: {:scmlock, _}}), do: true + defp compilable?(%Mix.Dep{status: :compile}), do: true + defp compilable?(%Mix.Dep{} = dep), do: ok?(dep) and local?(dep) + + defp show_not_ok!([]) do + :ok + end + + defp show_not_ok!(deps) do + shell = Mix.shell + shell.error "Unchecked dependencies for environment #{Mix.env}:" + + Enum.each deps, fn(dep) -> + shell.error "* #{format_dep dep}" + shell.error " #{format_status dep}" + end + + Mix.raise "Can't continue due to errors on dependencies" end end diff --git a/lib/mix/lib/mix/tasks/deps.precompile.ex b/lib/mix/lib/mix/tasks/deps.precompile.ex new file mode 100644 index 00000000000..3c96b910583 --- /dev/null +++ b/lib/mix/lib/mix/tasks/deps.precompile.ex @@ -0,0 +1,26 @@ +defmodule Mix.Tasks.Deps.Precompile do + use Mix.Task + + @moduledoc """ + Extension point for precompiling dependencies. + + This is a task that can be aliased by projects + that need to execute certain tasks before + compiling dependencies: + + aliases: ["deps.precompile": ["nerves.precompile", "deps.precompile"]] + + By default, this task's single responsibility + is to load all dependency paths. Dependency + loading is deliberately ad-hoc, loading as much as + possible without validating the files. + """ + def run(_) do + config = Mix.Project.config + Mix.Project.build_path(config) + |> Path.join("lib/*/ebin") + |> Path.wildcard + |> List.delete(config[:app] && Mix.Project.compile_path(config)) + |> Enum.each(&Code.prepend_path/1) + end +end diff --git a/lib/mix/lib/mix/tasks/deps.tree.ex b/lib/mix/lib/mix/tasks/deps.tree.ex new file mode 100644 index 00000000000..ab77fb8d58a --- /dev/null +++ b/lib/mix/lib/mix/tasks/deps.tree.ex @@ -0,0 +1,124 @@ +defmodule Mix.Tasks.Deps.Tree do + use Mix.Task + + @shortdoc "Prints the dependency tree" + @recursive true + + @moduledoc """ + Prints the dependency tree. + + mix deps.tree + + If no dependency is given, it uses the tree defined in the `mix.exs` file. + + ## Command line options + + * `--only` - the environment to show dependencies for + + * `--exclude` - exclude dependencies which you do not want to see printed. + + * `--format` - Can be set to one of either: + + * `pretty` - uses Unicode codepoints for formatting the tree. + This is the default except on Windows. + + * `plain` - does not use Unicode codepoints for formatting the tree. + This is the default on Windows. + + * `dot` - produces a DOT graph description of the dependency tree + in `deps_tree.dot` in the current directory. + Warning: this will override any previously generated file. + + """ + @switches [only: :string, exclude: :keep, format: :string] + + @spec run(OptionParser.argv) :: :ok + def run(args) do + Mix.Project.get! + {opts, args, _} = OptionParser.parse(args, switches: @switches) + + deps_opts = if only = opts[:only], do: [env: :"#{only}"], else: [] + deps = Mix.Dep.loaded(deps_opts) + + root = + case args do + [] -> + Mix.Project.config[:app] || Mix.raise("no application given and none found in mix.exs file") + [app] -> + app = String.to_atom(app) + find_dep(deps, app) || Mix.raise("could not find dependency #{app}") + end + + if opts[:format] == "dot" do + callback = callback(&format_dot/1, deps, opts) + Mix.Utils.write_dot_graph!("deps_tree.dot", "dependency tree", [root], callback, opts) + """ + Generated "deps_tree.dot" in the current directory. To generate a PNG: + + dot -Tpng deps_tree.dot -o deps_tree.png + + For more options see http://www.graphviz.org/. + """ + |> String.trim_trailing + |> Mix.shell.info + else + callback = callback(&format_tree/1, deps, opts) + Mix.Utils.print_tree([root], callback, opts) + end + end + + defp callback(formatter, deps, opts) do + excluded = Keyword.get_values(opts, :exclude) |> Enum.map(&String.to_atom/1) + top_level = Enum.filter(deps, & &1.top_level) + + fn + %Mix.Dep{app: app} = dep -> + deps = + # Do not show dependencies if they were + # already shown at the top level + if not dep.top_level && find_dep(top_level, app) do + [] + else + find_dep(deps, app).deps + end + {formatter.(dep), exclude(deps, excluded)} + app -> + {{Atom.to_string(app), nil}, exclude(top_level, excluded)} + end + end + + defp exclude(deps, excluded) do + Enum.reject deps, & &1.app in excluded + end + + defp format_dot(%{app: app, requirement: requirement, opts: opts}) do + override = + if opts[:override] do + " *override*" + else + "" + end + + requirement = requirement && requirement(requirement) + {app, "#{requirement}#{override}"} + end + + defp format_tree(%{app: app, scm: scm, requirement: requirement, opts: opts}) do + override = + if opts[:override] do + IO.ANSI.format([:bright, " *override*"]) + else + "" + end + + requirement = requirement && "#{requirement(requirement)} " + {app, "#{requirement}(#{scm.format(opts)})#{override}"} + end + + defp requirement(%Regex{} = regex), do: "#{inspect regex}" + defp requirement(binary) when is_binary(binary), do: binary + + defp find_dep(deps, app) do + Enum.find(deps, & &1.app == app) + end +end diff --git a/lib/mix/lib/mix/tasks/deps.unlock.ex b/lib/mix/lib/mix/tasks/deps.unlock.ex index 34d2f5587e5..adf0af78f26 100644 --- a/lib/mix/lib/mix/tasks/deps.unlock.ex +++ b/lib/mix/lib/mix/tasks/deps.unlock.ex @@ -1,33 +1,72 @@ defmodule Mix.Tasks.Deps.Unlock do use Mix.Task - @shortdoc "Unlock the given dependencies" + @shortdoc "Unlocks the given dependencies" @moduledoc """ - Unlock the given dependencies. + Unlocks the given dependencies. + + Since this is a destructive action, unlocking dependencies + only occurs when passing arguments/options: + + * `dep1 dep2` - the name of dependencies to be unlocked + * `--all` - unlocks all dependencies + * `--unused` - unlocks only unused dependencies (no longer mentioned + in the `mix.exs` file) - Since this is a destructive action, unlocking of all dependencies - can only happen by passing the `--all` command line option. """ + @switches [all: :boolean, unused: :boolean, filter: :string] + + @spec run(OptionParser.argv) :: :ok def run(args) do - Mix.Project.get! # Require the project to be available - {opts, args, _} = OptionParser.parse(args, switches: [unlock: :boolean, all: :boolean]) + Mix.Project.get! + {opts, apps, _} = OptionParser.parse(args, switches: @switches) cond do opts[:all] -> - Mix.Dep.Lock.write([]) - args != [] -> + Mix.Dep.Lock.write(%{}) + opts[:unused] -> + apps = Mix.Dep.loaded([]) |> Enum.map(& &1.app) + Mix.Dep.Lock.read() |> Map.take(apps) |> Mix.Dep.Lock.write() + filter = opts[:filter] -> + lock = Mix.Dep.Lock.read + apps = Map.keys(lock) + + unlock = Enum.filter(apps, &(Atom.to_string(&1) =~ filter)) + + if unlock == [] do + Mix.shell.error "warning: no dependencies were matched" + else + lock = + Enum.reject(lock, fn({app, _}) -> + app in unlock + end) + Mix.Dep.Lock.write(lock) + Mix.shell.info """ + Unlocked deps: + * #{Enum.join(unlock, "\n* ")} + """ + end + + apps != [] -> lock = - Enum.reduce args, Mix.Dep.Lock.read, fn(arg, lock) -> - if is_binary(arg), do: arg = String.to_atom(arg) - Map.delete(lock, arg) + Enum.reduce apps, Mix.Dep.Lock.read, fn(app_str, lock) -> + app = String.to_atom(app_str) + if Map.has_key?(lock, app) do + Map.delete(lock, app) + else + Mix.shell.error "warning: #{app} dependency is not locked" + lock + end end - Mix.Dep.Lock.write(lock) + true -> - Mix.raise "mix deps.unlock expects dependencies as arguments or " <> - "the --all option to unlock all dependencies" + Mix.raise "\"mix deps.unlock\" expects dependencies as arguments or " <> + "a flag indicating which dependencies to unlock. " <> + "The --all option will unlock all dependencies while " <> + "the --unused option unlocks unused dependencies" end end end diff --git a/lib/mix/lib/mix/tasks/deps.update.ex b/lib/mix/lib/mix/tasks/deps.update.ex index d5cfaa41f77..38846ec7778 100644 --- a/lib/mix/lib/mix/tasks/deps.update.ex +++ b/lib/mix/lib/mix/tasks/deps.update.ex @@ -1,23 +1,28 @@ defmodule Mix.Tasks.Deps.Update do use Mix.Task - @shortdoc "Update the given dependencies" + @shortdoc "Updates the given dependencies" @moduledoc """ - Update the given dependencies. + Updates the given dependencies. - Since this is a destructive action, update of all dependencies - can only happen by passing the `--all` command line option. + Since this is a destructive action, updating all dependencies + only occurs when the `--all` command line option is passed. All dependencies are automatically recompiled after update. ## Command line options - * `--all` - update all dependencies - * `--only` - only fetch dependencies for given environment + * `--all` - updates all dependencies + * `--only` - only fetches dependencies for given environment + * `--no-archives-check` - does not check archives before fetching deps """ + @spec run(OptionParser.argv) :: [atom] def run(args) do - Mix.Project.get! # Require the project to be available + unless "--no-archives-check" in args do + Mix.Task.run "archive.check", args + end + Mix.Project.get! {opts, rest, _} = OptionParser.parse(args, switches: [all: :boolean, only: :string]) # Fetch all deps by default unless --only is given @@ -27,10 +32,10 @@ defmodule Mix.Tasks.Deps.Update do opts[:all] -> Mix.Dep.Fetcher.all(Mix.Dep.Lock.read, %{}, fetch_opts) rest != [] -> - {old, new} = Dict.split(Mix.Dep.Lock.read, to_app_names(rest)) + {old, new} = Map.split(Mix.Dep.Lock.read, to_app_names(rest)) Mix.Dep.Fetcher.by_name(rest, old, new, fetch_opts) true -> - Mix.raise "mix deps.update expects dependencies as arguments or " <> + Mix.raise "\"mix deps.update\" expects dependencies as arguments or " <> "the --all option to update all dependencies" end end diff --git a/lib/mix/lib/mix/tasks/do.ex b/lib/mix/lib/mix/tasks/do.ex index 9aa6984827d..f97519a13d8 100644 --- a/lib/mix/lib/mix/tasks/do.ex +++ b/lib/mix/lib/mix/tasks/do.ex @@ -6,6 +6,8 @@ defmodule Mix.Tasks.Do do @moduledoc """ Executes the tasks separated by comma. + The comma should be followed by a space. + ## Examples The example below prints the available compilers and @@ -14,28 +16,34 @@ defmodule Mix.Tasks.Do do mix do compile --list, deps """ + + @spec run(OptionParser.argv) :: :ok def run(args) do Enum.each gather_commands(args), fn - [task|args] -> Mix.Task.run task, args - [] -> Mix.raise "No expression between commas" + [task | args] -> Mix.Task.run task, args end end - defp gather_commands(args) do - gather_commands args, [], [] + @doc false + def gather_commands(args) do + gather_commands(args, [], []) end - defp gather_commands([h|t], current, acc) when binary_part(h, byte_size(h), -1) == "," do - part = binary_part(h, 0, byte_size(h) - 1) - current = Enum.reverse([part|current]) - gather_commands t, [], [current|acc] + defp gather_commands([head | rest], current, acc) + when binary_part(head, byte_size(head), -1) == "," do + current = + case binary_part(head, 0, byte_size(head) - 1) do + "" -> Enum.reverse(current) + part -> Enum.reverse([part | current]) + end + gather_commands(rest, [], [current | acc]) end - defp gather_commands([h|t], current, acc) do - gather_commands t, [h|current], acc + defp gather_commands([head | rest], current, acc) do + gather_commands(rest, [head | current], acc) end defp gather_commands([], current, acc) do - Enum.reverse [Enum.reverse(current)|acc] + Enum.reverse [Enum.reverse(current) | acc] end -end \ No newline at end of file +end diff --git a/lib/mix/lib/mix/tasks/escript.build.ex b/lib/mix/lib/mix/tasks/escript.build.ex new file mode 100644 index 00000000000..8fb65565a1a --- /dev/null +++ b/lib/mix/lib/mix/tasks/escript.build.ex @@ -0,0 +1,383 @@ +defmodule Mix.Tasks.Escript.Build do + use Mix.Task + use Bitwise, only_operators: true + + @shortdoc "Builds an escript for the project" + + @moduledoc ~S""" + Builds an escript for the project. + + An escript is an executable that can be invoked from the + command line. An escript can run on any machine that has + Erlang installed and by default does not require Elixir to + be installed, as Elixir is embedded as part of the escript. + + This task guarantees the project and its dependencies are + compiled and packages them inside an escript. + + > Note: escripts do not support projects and dependencies + > that need to store or read artifacts from the priv directory. + + ## Command line options + + * `--force` - forces compilation regardless of modification times + * `--no-compile` - skips compilation to .beam files + + ## Configuration + + The following option must be specified in your `mix.exs` under `:escript` + key: + + * `:main_module` - the module to be invoked once the escript starts. + The module must contain a function named `main/1` that will receive the + command line arguments as binaries. + + The remaining options can be specified to further customize the escript: + + * `:name` - the name of the generated escript. + Defaults to app name. + + * `:path` - the path to write the escript to. + Defaults to app name. + + * `:app` - the app to start with the escript. + Defaults to app name. Set it to `nil` if no application should + be started. + + * `:strip_beam` - if `true` strips BEAM code in the escript to remove chunks + unnecessary at runtime, such as debug information and documentation. + Defaults to `true`. + + * `:embed_elixir` - if `true` embeds Elixir and its children apps + (`ex_unit`, `mix`, etc.) mentioned in the `:applications` list inside the + `application/0` function in `mix.exs`. + + Defaults to `true` for Elixir projects, `false` for Erlang projects. + + Note: if you set this to `false` for an Elixir project, you will have to add paths to Elixir's + `ebin` directories to `ERL_LIBS` environment variable when running the resulting escript, in + order for the code loader to be able to find `:elixir` application and its children + applications (if they are used). + + * `:shebang` - shebang interpreter directive used to execute the escript. + Defaults to `"#! /usr/bin/env escript\n"`. + + * `:comment` - comment line to follow shebang directive in the escript. + Defaults to `""`. + + * `:emu_args` - emulator arguments to embed in the escript file. + Defaults to `""`. + + There is one project-level option that affects how the escript is generated: + + * `language: :elixir | :erlang` - set it to `:erlang` for Erlang projects + managed by Mix. Doing so will ensure Elixir is not embedded by default. + Your app will still be started as part of escript loading, with the + config used during build. + + ## Example + + defmodule MyApp.Mixfile do + use Mix.Project + + def project do + [app: :my_app, + version: "0.0.1", + escript: escript()] + end + + def escript do + [main_module: MyApp.CLI] + end + end + + defmodule MyApp.CLI do + def main(_args) do + IO.puts("Hello from MyApp!") + end + end + + """ + @switches [force: :boolean, compile: :boolean, + deps_check: :boolean, archives_check: :boolean, elixir_version_check: :boolean] + + @spec run(OptionParser.argv) :: :ok | :noop + def run(args) do + Mix.Project.get! + {opts, _} = OptionParser.parse!(args, strict: @switches) + + if Keyword.get(opts, :compile, true) do + Mix.Task.run :compile, args + end + + project = Mix.Project.config + language = Keyword.get(project, :language, :elixir) + + escriptize(project, language, Keyword.get(opts, :force, false)) + end + + defp escriptize(project, language, force?) do + escript_opts = project[:escript] || [] + + if Mix.Project.umbrella?() do + Mix.raise "Building escripts for umbrella projects is unsupported" + end + + script_name = Mix.Local.name_for(:escript, project) + filename = escript_opts[:path] || script_name + main = escript_opts[:main_module] + files = project_files() + + cond do + !script_name -> + Mix.raise "Could not generate escript, no name given, " <> + "set :name escript option or :app in the project settings" + + !main -> + Mix.raise "Could not generate escript, please set :main_module " <> + "in your project configuration (under :escript option) to a module that implements main/1" + + not Code.ensure_loaded?(main) -> + Mix.raise "Could not generate escript, module #{main} defined as " <> + ":main_module could not be loaded" + + force? or Mix.Utils.stale?(files, [filename]) -> + app = Keyword.get(escript_opts, :app, project[:app]) + strip_beam? = Keyword.get(escript_opts, :strip_beam, true) + escript_mod = String.to_atom(Atom.to_string(app) <> "_escript") + + beam_paths = + [files, deps_files(), core_files(escript_opts, language)] + |> Stream.concat + |> prepare_beam_paths() + |> Map.merge(consolidated_paths(project)) + + tuples = gen_main(project, escript_mod, main, app, language) ++ + read_beams(beam_paths) + tuples = if strip_beam?, do: strip_beams(tuples), else: tuples + + case :zip.create('mem', tuples, [:memory]) do + {:ok, {'mem', zip}} -> + shebang = escript_opts[:shebang] || "#! /usr/bin/env escript\n" + comment = build_comment(escript_opts[:comment]) + emu_args = build_emu_args(escript_opts[:emu_args], escript_mod) + + script = IO.iodata_to_binary([shebang, comment, emu_args, zip]) + File.mkdir_p!(Path.dirname(filename)) + File.write!(filename, script) + set_perms(filename) + {:error, error} -> + Mix.raise "Error creating escript: #{error}" + end + + Mix.shell.info "Generated escript #{filename} with MIX_ENV=#{Mix.env}" + :ok + true -> + :noop + end + end + + defp project_files() do + get_files(Mix.Project.app_path) + end + + defp get_files(app) do + Path.wildcard("#{app}/ebin/*.{app,beam}") ++ + (Path.wildcard("#{app}/priv/**/*") |> Enum.filter(&File.regular?/1)) + end + + defp set_perms(filename) do + stat = File.stat!(filename) + :ok = File.chmod(filename, stat.mode ||| 0o111) + end + + defp deps_files() do + deps = Mix.Dep.cached() + Enum.flat_map(deps, fn dep -> get_files(dep.opts[:build]) end) + end + + defp core_files(escript_opts, language) do + if Keyword.get(escript_opts, :embed_elixir, language == :elixir) do + Enum.flat_map [:elixir | extra_apps()], &app_files/1 + else + [] + end + end + + defp extra_apps() do + Mix.Project.config()[:app] + |> extra_apps_in_app_tree() + |> Enum.uniq() + end + + defp extra_apps_in_app_tree(app) when app in [:kernel, :stdlib, :elixir] do + [] + end + + defp extra_apps_in_app_tree(app) when app in [:eex, :ex_unit, :iex, :logger, :mix] do + [app] + end + + defp extra_apps_in_app_tree(app) do + _ = Application.load(app) + case Application.spec(app) do + nil -> + [] + spec -> + applications = Keyword.get(spec, :applications, []) ++ + Keyword.get(spec, :included_applications, []) + Enum.flat_map(applications, &extra_apps_in_app_tree/1) + end + end + + defp app_files(app) do + case :code.where_is_file('#{app}.app') do + :non_existing -> Mix.raise "Could not find application #{app}" + file -> get_files(Path.dirname(Path.dirname(file))) + end + end + + defp prepare_beam_paths(paths) do + for path <- paths, into: %{}, do: {Path.basename(path), path} + end + + defp read_beams(items) do + items + |> Enum.map(fn {basename, beam_path} -> + {String.to_charlist(basename), File.read!(beam_path)} + end) + end + + defp strip_beams(tuples) do + for {basename, maybe_beam} <- tuples do + case Path.extname(basename) do + ".beam" -> {basename, strip_beam(maybe_beam)} + _ -> {basename, maybe_beam} + end + end + end + + defp strip_beam(beam) when is_binary(beam) do + {:ok, _, all_chunks} = :beam_lib.all_chunks(beam) + filtered_chunks = ['CInf', 'Abst', 'Dbgi'] + significant_chunks = + for {name, _} = pair <- all_chunks, name not in filtered_chunks, do: pair + {:ok, built_module} = :beam_lib.build_module(significant_chunks) + compress(built_module) + end + + defp compress(binary0) do + {:ok, fd} = :ram_file.open(binary0, [:write, :binary]) + {:ok, _} = :ram_file.compress(fd) + {:ok, binary} = :ram_file.get_file(fd) + :ok = :ram_file.close(fd) + binary + end + + defp consolidated_paths(config) do + if config[:consolidate_protocols] do + Mix.Project.consolidation_path(config) + |> Path.join("*") + |> Path.wildcard() + |> prepare_beam_paths() + else + %{} + end + end + + defp build_comment(user_comment) do + "%% #{user_comment}\n" + end + + defp build_emu_args(user_args, escript_mod) do + "%%! -escript main #{escript_mod} #{user_args}\n" + end + + defp gen_main(project, name, module, app, language) do + config = + if File.regular?(project[:config_path]) do + Macro.escape Mix.Config.read!(project[:config_path]) + else + [] + end + + module_body = quote do + @module unquote(module) + @config unquote(config) + @app unquote(app) + + @spec main(OptionParser.argv) :: any + def main(args) do + unquote(main_body_for(language)) + end + + defp load_config(config) do + :lists.foreach(fn {app, kw} -> + :lists.foreach(fn {k, v} -> + :application.set_env(app, k, v, persistent: true) + end, kw) + end, config) + :ok + end + + defp start_app(nil) do + :ok + end + + defp start_app(app) do + case :application.ensure_all_started(app) do + {:ok, _} -> :ok + {:error, {app, reason}} -> + formatted_error = case :code.ensure_loaded(Application) do + {:module, Application} -> Application.format_error(reason) + {:error, _} -> :io_lib.format('~p', [reason]) + end + io_error ["Could not start application ", + :erlang.atom_to_binary(app, :utf8), + ": ", formatted_error, ?\n] + :erlang.halt(1) + end + end + + defp io_error(message) do + :io.put_chars(:standard_error, message) + end + end + + {:module, ^name, binary, _} = Module.create(name, module_body, Macro.Env.location(__ENV__)) + [{'#{name}.beam', binary}] + end + + defp main_body_for(:elixir) do + quote do + erl_version = :erlang.system_info(:otp_release) + + case :string.to_integer(erl_version) do + {num, _} when num >= 18 -> nil + _ -> + io_error ["Incompatible Erlang/OTP release: ", erl_version, + ".\nThis escript requires at least Erlang/OTP 18.0\n"] + :erlang.halt(1) + end + + case :application.ensure_all_started(:elixir) do + {:ok, _} -> + load_config(@config) + start_app(@app) + args = Enum.map(args, &List.to_string(&1)) + Kernel.CLI.run fn _ -> @module.main(args) end, true + error -> + io_error ["Failed to start Elixir.\n", :io_lib.format('error: ~p~n', [error])] + :erlang.halt(1) + end + end + end + + defp main_body_for(:erlang) do + quote do + load_config(@config) + start_app(@app) + @module.main(args) + end + end +end diff --git a/lib/mix/lib/mix/tasks/escript.ex b/lib/mix/lib/mix/tasks/escript.ex new file mode 100644 index 00000000000..741cb3aee21 --- /dev/null +++ b/lib/mix/lib/mix/tasks/escript.ex @@ -0,0 +1,56 @@ +defmodule Mix.Tasks.Escript do + use Mix.Task + + @shortdoc "Lists installed escripts" + + @moduledoc ~S""" + Lists all installed escripts. + + Escripts are installed at `~/.mix/escripts`. Add that path to your `PATH` environment variable + to be able to run installed escripts from any directory. + """ + + use Bitwise + + @spec run(OptionParser.argv) :: :ok + def run(_) do + escripts_path = Mix.Local.path_for(:escript) + escripts_path + |> list_dir() + |> Enum.filter(fn filename -> executable?(Path.join(escripts_path, filename)) end) + |> print() + end + + defp list_dir(path) do + case File.ls(path) do + {:ok, list} -> list + _ -> [] + end + end + + defp executable?(path) do + owner_exec_bit = 0o00100 + group_exec_bit = 0o00010 + other_exec_bit = 0o00001 + stat = File.stat!(path) + + case :os.type() do + {:win32, _} -> + # on win32, the script itself is not executable, but the bat is + File.exists?(path <> ".bat") and stat.type == :regular + _ -> + executable_bit = + stat.mode &&& (owner_exec_bit ||| group_exec_bit ||| other_exec_bit) + executable_bit != 0 and stat.type == :regular and Path.extname(path) != ".bat" + end + end + + defp print([]) do + Mix.shell.info "No escripts currently installed." + end + + defp print(items) do + Enum.each items, fn item -> Mix.shell.info ["* ", item] end + Mix.shell.info "Escripts installed at: #{Mix.Local.path_for(:escript)}" + end +end diff --git a/lib/mix/lib/mix/tasks/escript.install.ex b/lib/mix/lib/mix/tasks/escript.install.ex new file mode 100644 index 00000000000..e66004f32b8 --- /dev/null +++ b/lib/mix/lib/mix/tasks/escript.install.ex @@ -0,0 +1,151 @@ +defmodule Mix.Tasks.Escript.Install do + use Mix.Task + + @shortdoc "Installs an escript locally" + + @moduledoc """ + Installs an escript locally. + + If no argument is supplied but there is an escript in the project's root directory + (created with `mix escript.build`), then the escript will be installed + locally. For example: + + mix do escript.build, escript.install + + If an argument is provided, it should be a local path or a URL to a prebuilt escript, + a Git repository, a GitHub repository, or a Hex package. + + mix escript.install escript + mix escript.install path/to/escript + mix escript.install https://example.com/my_escript + mix escript.install git https://path/to/git/repo + mix escript.install git https://path/to/git/repo branch git_branch + mix escript.install git https://path/to/git/repo tag git_tag + mix escript.install git https://path/to/git/repo ref git_ref + mix escript.install github user/project + mix escript.install github user/project branch git_branch + mix escript.install github user/project tag git_tag + mix escript.install github user/project ref git_ref + mix escript.install hex hex_package + mix escript.install hex hex_package 1.2.3 + + After installation, the escript can be invoked as + + ~/.mix/escripts/foo + + For convenience, consider adding `~/.mix/escripts` directory to your + `PATH` environment variable. For more information, check the wikipedia + article on PATH: https://en.wikipedia.org/wiki/PATH_(variable) + + ## Command line options + + * `--sha512` - checks the escript matches the given SHA-512 checksum. Only + applies to installations via URL or local path + + * `--force` - forces installation without a shell prompt; primarily + intended for automation in build systems like Make + + * `--submodules` - fetches repository submodules before building escript from + Git or GitHub + + * `--app` - specifies a custom app name to be used for building the escript + from Git, GitHub, or Hex + + """ + + @behaviour Mix.Local.Installer + + @escript_file_mode 0o555 # only read and execute permissions + + @switches [force: :boolean, sha512: :string, submodules: :boolean, app: :string] + @spec run(OptionParser.argv) :: boolean + def run(argv) do + Mix.Local.Installer.install(__MODULE__, argv, @switches) + end + + # Callbacks + + def check_install_spec(_, _), do: :ok + + def find_previous_versions(basename) do + dst = destination(basename) + if File.exists?(dst), do: [dst], else: [] + end + + def install(basename, binary, _previous) do + dst = destination(basename) + + if escript?(binary) do + _ = File.rm(dst) + _ = File.rm(dst <> ".bat") + + executable = Path.basename(dst) + previous_executable = System.find_executable(executable) + + File.mkdir_p!(Path.dirname(dst)) + File.write!(dst, binary) + File.chmod!(dst, @escript_file_mode) + write_bat!(dst <> ".bat", :os.type) + + Mix.shell.info [:green, "* creating ", :reset, Path.relative_to_cwd(dst)] + check_discoverability(dst, executable, previous_executable) + :ok + else + Mix.raise "The given path does not point to an escript, installation aborted" + end + end + + def build(_spec, _opts) do + Mix.Task.run("escript.build", []) + Mix.Local.name_for(:escript, Mix.Project.config) + end + + # Helpers + + defp destination(basename) do + Path.join(Mix.Local.path_for(:escript), basename) + end + + defp write_bat!(path, {:win32, _}) do + File.write!(path, """ + @echo off + @escript "%~dpn0" %* + """) + File.chmod!(path, @escript_file_mode) + end + defp write_bat!(_path, _type) do + :ok + end + + defp check_discoverability(dst, executable, previous_executable) do + current_executable = System.find_executable(executable) + + cond do + # If existing executable was changed, + # it was overridden + previous_executable && previous_executable != current_executable -> + Mix.shell.error "\nwarning: escript #{inspect executable} overrides executable " <> + "#{inspect previous_executable} already in your PATH\n" + + # If existing executable didn't change but it is not the one we installed, + # it is a conflict + previous_executable && previous_executable != dst -> + Mix.shell.error "\nwarning: escript #{inspect executable} conflicts with executable " <> + "#{inspect previous_executable} already in your PATH\n" + + # If current executable is nil or does not match the one we just installed, + # PATH is misconfigured + current_executable != dst -> + Mix.shell.error "\nwarning: you must append #{inspect Mix.Local.path_for(:escript)} " <> + "to your PATH if you want to invoke escripts by name\n" + + true -> + :ok + end + end + + defp escript?(binary) do + parts = String.split(binary, "\n", parts: 4) + match?(["#!" <> _, _, _, <<80, 75, 3, 4, _::binary>>], parts) + end +end diff --git a/lib/mix/lib/mix/tasks/escript.uninstall.ex b/lib/mix/lib/mix/tasks/escript.uninstall.ex new file mode 100644 index 00000000000..7d5373257ee --- /dev/null +++ b/lib/mix/lib/mix/tasks/escript.uninstall.ex @@ -0,0 +1,18 @@ +defmodule Mix.Tasks.Escript.Uninstall do + use Mix.Task + + @shortdoc "Uninstalls escripts" + + @moduledoc """ + Uninstalls local escripts: + + mix escript.uninstall escript_name + + """ + @spec run(OptionParser.argv) :: :ok + def run(argv) do + if path = Mix.Local.Installer.uninstall(Mix.Local.path_for(:escript), "escript", argv) do + File.rm(path <> ".bat") + end + end +end diff --git a/lib/mix/lib/mix/tasks/escriptize.ex b/lib/mix/lib/mix/tasks/escriptize.ex deleted file mode 100644 index ebd3f051cf2..00000000000 --- a/lib/mix/lib/mix/tasks/escriptize.ex +++ /dev/null @@ -1,225 +0,0 @@ -defmodule Mix.Tasks.Escriptize do - use Mix.Task - use Bitwise, only_operators: true - - @shortdoc "Generates an escript for the project" - @recursive true - - @moduledoc ~S""" - Generates an escript for the project. - - ## Command line options - - * `--force` - forces compilation regardless of modification times - * `--no-compile` - skips compilation to .beam files - - ## Configuration - - The following option must be specified in your `mix.exs` under `:escript` - key: - - * `:main_module` - the module to be invoked once the escript starts. - The module must contain a function named `main/1` that will receive the - command line arguments as binaries. - - The remaining options can be specified to further customize the escript: - - * `:name` - the name of the generated escript. - Defaults to app name. - - * `:path` - the path to write the escript to. - Defaults to app name. - - * `:app` - the app to start with the escript. - Defaults to app name. Set it to `nil` if no application should - be started. - - * `:embed_elixir` - if `true` embed elixir in the escript file. - Defaults to `true`. - - * `:embed_extra_apps` - embed additional Elixir applications. - if `:embed_elixir` is `true`. - Defaults to `[]`. - - * `:shebang` - shebang interpreter directive used to execute the escript. - Defaults to `"#! /usr/bin/env escript\n"`. - - * `:comment` - comment line to follow shebang directive in the escript. - Defaults to `""`. - - * `:emu_args` - emulator arguments to embed in the escript file. - Defaults to `""`. - - ## Example - - defmodule MyApp.Mixfile do - def project do - [ app: :myapp, - version: "0.0.1", - escript: escript ] - end - - def escript do - [ main_module: MyApp.CLI, - embed_extra_apps: [:mix] ] - end - end - - """ - def run(args) do - {opts, _, _} = OptionParser.parse(args, switches: [force: :boolean, no_compile: :boolean]) - - # Require the project to be available - Mix.Project.get! - - unless opts[:no_compile] do - Mix.Task.run :compile, args - end - - escriptize(Mix.Project.config, opts[:force]) - end - - defp escriptize(project, force) do - escript_opts = project[:escript] || [] - - script_name = to_string(escript_opts[:name] || project[:app]) - filename = escript_opts[:path] || script_name - main = escript_opts[:main_module] - embed = Keyword.get(escript_opts, :embed_elixir, true) - app = Keyword.get(escript_opts, :app, project[:app]) - files = project_files() - - escript_mod = String.to_atom(Atom.to_string(app) <> "-escript-main") - - cond do - !script_name -> - Mix.raise "Could not generate escript, no name given, " <> - "set :name escript option or :app in the project settings" - - !main or !Code.ensure_loaded?(main)-> - Mix.raise "Could not generate escript, please set :main_module " <> - "in your project configuration (under `:escript` option) to a module that implements main/1" - - force || Mix.Utils.stale?(files, [filename]) -> - tuples = gen_main(escript_mod, main, app) ++ to_tuples(files) - tuples = tuples ++ deps_tuples() - - if embed do - extra_apps = escript_opts[:embed_extra_apps] || [] - tuples = Enum.reduce [:elixir|extra_apps], tuples, fn(app, acc) -> - app_tuples(app) ++ acc - end - end - - # We might get duplicate tuples in umbrella projects from applications - # sharing the same dependencies - tuples = Enum.uniq(tuples, fn {name, _} -> name end) - - case :zip.create 'mem', tuples, [:memory] do - {:ok, {'mem', zip}} -> - shebang = escript_opts[:shebang] || "#! /usr/bin/env escript\n" - comment = build_comment(escript_opts[:comment]) - emu_args = build_emu_args(escript_opts[:emu_args], escript_mod) - - script = IO.iodata_to_binary([shebang, comment, emu_args, zip]) - - File.mkdir_p!(Path.dirname(filename)) - File.write!(filename, script) - {:error, error} -> - Mix.shell.error "Error creating escript: #{error}" - end - - set_perms(filename) - Mix.shell.info "Generated escript #{filename}" - :ok - true -> - :noop - end - end - - defp project_files do - get_files(Mix.Project.app_path) - end - - defp deps_tuples do - deps = Mix.Dep.loaded(env: Mix.env) || [] - Enum.reduce(deps, [], fn dep, acc -> - get_tuples(dep.opts[:build]) ++ acc - end) - end - - defp set_perms(filename) do - stat = File.stat!(filename) - :ok = :file.change_mode(filename, stat.mode ||| 73) - end - - defp app_tuples(app) do - case :code.where_is_file('#{app}.app') do - :non_existing -> Mix.raise "Could not find application #{app}" - file -> get_tuples(Path.dirname(Path.dirname(file))) - end - end - - defp get_files(app) do - Path.wildcard("#{app}/ebin/*.{app,beam}") ++ - (Path.wildcard("#{app}/priv/**/*") |> Enum.filter(&File.regular?/1)) - end - - defp get_tuples(app) do - get_files(app) |> to_tuples - end - - defp to_tuples(files) do - for f <- files do - {String.to_char_list(Path.basename(f)), File.read!(f)} - end - end - - defp build_comment(user_comment) do - "%% #{user_comment}\n" - end - - defp build_emu_args(user_args, escript_mod) do - "%%! -escript main #{escript_mod} #{user_args}\n" - end - - defp gen_main(name, module, app) do - {:module, ^name, binary, _} = - defmodule name do - @module module - @app app - - def main(args) do - case :application.start(:elixir) do - :ok -> - start_app(@app) - args = Enum.map(args, &List.to_string(&1)) - Kernel.CLI.run fn -> @module.main(args) end, true - _ -> - io_error "Elixir is not in the code path, aborting." - System.halt(1) - end - end - - defp start_app(nil) do - :ok - end - - defp start_app(app) do - case :application.ensure_all_started(app) do - {:ok, _} -> :ok - {:error, {app, reason}} -> - io_error "Could not start application #{app}: " <> - Application.format_error(reason) - System.halt(1) - end - end - - defp io_error(message) do - IO.puts :stderr, IO.ANSI.escape("%{red, bright} " <> message) - end - end - - [{'#{name}.beam', binary}] - end -end diff --git a/lib/mix/lib/mix/tasks/help.ex b/lib/mix/lib/mix/tasks/help.ex index fcc10d088bb..90279f6248d 100644 --- a/lib/mix/lib/mix/tasks/help.ex +++ b/lib/mix/lib/mix/tasks/help.ex @@ -1,16 +1,18 @@ defmodule Mix.Tasks.Help do use Mix.Task - @shortdoc "Print help information for tasks" + @shortdoc "Prints help information for tasks" @moduledoc """ Lists all tasks or prints the documentation for a given task. ## Arguments - mix help - prints all tasks and their shortdoc - mix help --names - prints all task names - mix help TASK - prints full docs for the given task + mix help - prints all tasks and their shortdoc + mix help TASK - prints full docs for the given task + mix help --search PATTERN - prints all tasks that contain PATTERN in the name + mix help --names - prints all task names and aliases + (useful for autocompleting) ## Colors @@ -19,61 +21,74 @@ defmodule Mix.Tasks.Help do application either inside your project (in `config/config.exs`) or by using the local config (in `~/.mix/config.exs`). - For example, to disable, one may: + For example, to disable color, one may use the configuration: [mix: [colors: [enabled: false]]] The available color options are: - * `:enabled` - show ANSI formatting (defaults to IO.ANSI.terminal?) - * `:doc_code` — the attributes for code blocks (cyan, bright) + * `:enabled` - shows ANSI formatting (defaults to `IO.ANSI.enabled?/0`) + * `:doc_code` - the attributes for code blocks (cyan, bright) * `:doc_inline_code` - inline code (cyan) * `:doc_headings` - h1 and h2 (yellow, bright) - * `:doc_title` — the overall heading for the output (reverse,yellow,bright) + * `:doc_title` - the overall heading for the output (reverse, yellow, bright) * `:doc_bold` - (bright) * `:doc_underline` - (underline) """ + @spec run(OptionParser.argv) :: :ok + def run(argv) + def run([]) do - Mix.Task.load_all + loadpaths!() - shell = Mix.shell - modules = Mix.Task.all_modules + modules = load_tasks() + {docs, max} = build_task_doc_list(modules) - docs = for module <- modules, - doc = Mix.Task.shortdoc(module) do - {"mix " <> Mix.Task.task_name(module), doc} - end + display_default_task_doc(max) + display_task_doc_list(docs, max) + display_iex_task_doc(max) + end - max = Enum.reduce docs, 0, fn({task, _}, acc) -> - max(byte_size(task), acc) - end + def run(["--names"]) do + loadpaths!() - display_default_task_doc(max) + tasks = Enum.map(load_tasks(), &Mix.Task.task_name/1) - Enum.each Enum.sort(docs), fn({task, doc}) -> - shell.info format_task(task, max, doc) + aliases = + Mix.Project.config[:aliases] + |> Enum.map(fn {k, _} -> Atom.to_string(k) end) + + for info <- Enum.sort(aliases ++ tasks) do + Mix.shell.info info end + end - display_iex_task_doc(max) + def run(["--search", pattern]) do + loadpaths!() + + modules = + load_tasks() + |> Enum.filter(&(String.contains?(Mix.Task.task_name(&1), pattern))) + {docs, max} = build_task_doc_list(modules) + + display_task_doc_list(docs, max) end - def run(["--names"]) do - Mix.Task.load_all - for module <- Enum.sort(Mix.Task.all_modules), - task = Mix.Task.task_name(module) do - Mix.shell.info "#{task}" - end + def run(["--search"]) do + Mix.raise "Unexpected arguments, expected \"mix help --search PATTERN\"" end def run([task]) do + loadpaths!() + module = Mix.Task.get!(task) doc = Mix.Task.moduledoc(module) || "There is no documentation for this task" opts = Application.get_env(:mix, :colors) if ansi_docs?(opts) do - opts = [width: width] ++ opts + opts = [width: width()] ++ opts IO.ANSI.Docs.print_heading("mix #{task}", opts) IO.ANSI.Docs.print(doc, opts) else @@ -85,15 +100,23 @@ defmodule Mix.Tasks.Help do end def run(_) do - Mix.raise "Unexpected arguments, expected `mix help` or `mix help TASK`" + Mix.raise "Unexpected arguments, expected \"mix help\" or \"mix help TASK\"" + end + + # Loadpaths without checks because tasks may be defined in deps. + defp loadpaths! do + Mix.Task.run "loadpaths", ["--no-elixir-version-check", "--no-deps-check", "--no-archives-check"] + Mix.Task.reenable "loadpaths" + Mix.Task.reenable "deps.loadpaths" + end + + defp load_tasks() do + Mix.Task.load_all() + |> Enum.filter(&(Mix.Task.moduledoc(&1) != false)) end defp ansi_docs?(opts) do - if Keyword.has_key?(opts, :enabled) do - opts[:enabled] - else - IO.ANSI.terminal? - end + Keyword.get(opts, :enabled, IO.ANSI.enabled?) end defp width() do @@ -104,11 +127,11 @@ defmodule Mix.Tasks.Help do end defp format_task(task, max, doc) do - String.ljust(task, max) <> " # " <> doc + String.pad_trailing(task, max) <> " # " <> doc end defp where_is_file(module) do - case :code.where_is_file(Atom.to_char_list(module) ++ '.beam') do + case :code.where_is_file(Atom.to_charlist(module) ++ '.beam') do :non_existing -> "not available" location -> @@ -121,11 +144,28 @@ defmodule Mix.Tasks.Help do defp display_default_task_doc(max) do Mix.shell.info format_task("mix", max, - "Run the default task (current: mix #{Mix.Project.config[:default_task]})") + "Runs the default task (current: \"mix #{Mix.Project.config[:default_task]}\")") end defp display_iex_task_doc(max) do Mix.shell.info format_task("iex -S mix", max, - "Start IEx and run the default task") + "Starts IEx and runs the default task") + end + + defp display_task_doc_list(docs, max) do + Enum.each Enum.sort(docs), fn({task, doc}) -> + Mix.shell.info format_task(task, max, doc) + end + end + + defp build_task_doc_list(modules) do + Enum.reduce modules, {[], 0}, fn module, {docs, max} -> + if doc = Mix.Task.shortdoc(module) do + task = "mix " <> Mix.Task.task_name(module) + {[{task, doc} | docs], max(byte_size(task), max)} + else + {docs, max} + end + end end end diff --git a/lib/mix/lib/mix/tasks/iex.ex b/lib/mix/lib/mix/tasks/iex.ex index 3bc13aa84c0..fbc8447331f 100644 --- a/lib/mix/lib/mix/tasks/iex.ex +++ b/lib/mix/lib/mix/tasks/iex.ex @@ -2,10 +2,11 @@ defmodule Mix.Tasks.Iex do use Mix.Task @moduledoc """ - A task that is simply meant to redirect users to `iex -S mix`. + A task that simply instructs users to run `iex -S mix`. """ + @spec run(OptionParser.argv) :: no_return def run(_) do - Mix.raise "To use IEx with Mix, please run: iex -S mix" + Mix.raise "To use IEx with Mix, please run \"iex -S mix\"" end end diff --git a/lib/mix/lib/mix/tasks/loadconfig.ex b/lib/mix/lib/mix/tasks/loadconfig.ex index 567da818831..c7762686674 100644 --- a/lib/mix/lib/mix/tasks/loadconfig.ex +++ b/lib/mix/lib/mix/tasks/loadconfig.ex @@ -6,19 +6,25 @@ defmodule Mix.Tasks.Loadconfig do @moduledoc """ Loads and persists the given configuration. - In case no configuration file is given, it - loads the project one at "config/config.exs". + If no configuration file is given, it loads the project's + configuration file, "config/config.exs", if it exists. Keep in mind that + the "config/config.exs" file is always loaded by the CLI and + invoking it is only required in cases you are starting Mix + manually. - This task is automatically reenabled, so it - can be called multiple times to load different - configs. + This task is automatically reenabled, so it can be called + multiple times to load different configs. """ + + @spec run(OptionParser.argv) :: :ok def run(args) do + config = Mix.Project.config + cond do file = Enum.at(args, 0) -> load file - File.regular?("config/config.exs") -> - load "config/config.exs" + File.regular?(config[:config_path]) or (config[:config_path] != "config/config.exs") -> + load config[:config_path] true -> :ok end @@ -27,6 +33,8 @@ defmodule Mix.Tasks.Loadconfig do end defp load(file) do - Mix.Config.persist Mix.Config.read! file + apps = Mix.Config.persist Mix.Config.read!(file) + Mix.ProjectStack.configured_applications(apps) + :ok end end diff --git a/lib/mix/lib/mix/tasks/loadpaths.ex b/lib/mix/lib/mix/tasks/loadpaths.ex index d57da8af485..4a69354181e 100644 --- a/lib/mix/lib/mix/tasks/loadpaths.ex +++ b/lib/mix/lib/mix/tasks/loadpaths.ex @@ -4,36 +4,78 @@ defmodule Mix.Tasks.Loadpaths do @moduledoc """ Loads the application and its dependencies paths. + ## Configuration + + * `:elixir` - matches the current Elixir version against the + given requirement + ## Command line options - * `--no-elixir-version-check` - do not check elixir version + * `--no-archives-check` - does not check archive + * `--no-deps-check` - does not check dependencies + * `--no-elixir-version-check` - does not check Elixir version """ + + @spec run(OptionParser.argv) :: :ok def run(args) do - {opts, _, _} = OptionParser.parse(args) - - unless opts[:no_elixir_version_check] do - config = Mix.Project.config - - if req = config[:elixir] do - case Version.parse_requirement(req) do - {:ok, req} -> - unless Version.match?(System.version, req) do - Mix.raise Mix.ElixirVersionError, target: config[:app] || Mix.Project.get, - expected: req, - actual: System.version - end - :error -> - Mix.raise "Invalid Elixir version requirement #{req} in mix.exs file" - end + config = Mix.Project.config + + unless "--no-elixir-version-check" in args do + check_elixir_version(config, args) + end + + unless "--no-archives-check" in args do + Mix.Task.run "archive.check", args + end + + # --no-deps is used only internally. It has no purpose + # from Mix.CLI because running a task may load deps. + unless "--no-deps" in args do + Mix.Task.run "deps.loadpaths", args + end + + if config[:app] do + load_project(config, args) + end + + :ok + end + + defp check_elixir_version(config, _) do + if req = config[:elixir] do + case Version.parse_requirement(req) do + {:ok, req} -> + unless Version.match?(System.version, req) do + raise Mix.ElixirVersionError, target: config[:app] || Mix.Project.get, + expected: req, + actual: System.version + end + :error -> + Mix.raise "Invalid Elixir version requirement #{req} in mix.exs file" end end + end - # Force recompile if we have a version mismatch. - # Skip it for umbrella apps since they have no build. - old_vsn = Mix.Dep.Lock.elixir_vsn - if old_vsn && old_vsn != System.version, do: Mix.Dep.Lock.touch + defp load_project(config, _args) do + vsn = {System.version, :erlang.system_info(:otp_release)} + scm = config[:build_scm] + + # Erase the app build if we have lock mismatch. + # We do this to force full recompilation when + # any of SCM or Elixir version changes. Applies + # to dependencies and the main project alike. + case Mix.Dep.ElixirSCM.read() do + {:ok, old_vsn, _} when old_vsn != vsn -> rm_rf_app(config) + {:ok, _, old_scm} when old_scm != scm -> rm_rf_app(config) + _ -> :ok + end + + Enum.each Mix.Project.load_paths(config), &Code.prepend_path(&1) + end - Enum.each Mix.Project.load_paths, &Code.prepend_path(&1) + defp rm_rf_app(config) do + File.rm_rf Mix.Project.app_path(config) + File.rm_rf Mix.Project.consolidation_path(config) end end diff --git a/lib/mix/lib/mix/tasks/local.ex b/lib/mix/lib/mix/tasks/local.ex index a28a7847357..174c660c85f 100644 --- a/lib/mix/lib/mix/tasks/local.ex +++ b/lib/mix/lib/mix/tasks/local.ex @@ -1,18 +1,18 @@ defmodule Mix.Tasks.Local do use Mix.Task - @shortdoc "List local tasks" + @shortdoc "Lists local tasks" @moduledoc """ - List local tasks. + Lists local tasks. """ + @spec run([]) :: :ok def run([]) do shell = Mix.shell - modules = Mix.Local.all_tasks + modules = Mix.Local.archives_tasks - docs = for module <- modules, - Mix.Task.is_task?(module) do + docs = for module <- modules do {Mix.Task.task_name(module), Mix.Task.shortdoc(module)} end diff --git a/lib/mix/lib/mix/tasks/local.hex.ex b/lib/mix/lib/mix/tasks/local.hex.ex index 5f51fcc4354..e40936cf7b8 100644 --- a/lib/mix/lib/mix/tasks/local.hex.ex +++ b/lib/mix/lib/mix/tasks/local.hex.ex @@ -1,61 +1,60 @@ defmodule Mix.Tasks.Local.Hex do use Mix.Task - @hex_url "https://hex.pm/installs/hex.ez" - @hex_requirement ">= 0.3.0" + @hex_list_path "/installs/hex-1.x.csv" + @hex_archive_path "/installs/[ELIXIR_VERSION]/hex-[HEX_VERSION].ez" - @shortdoc "Install hex locally" + @shortdoc "Installs Hex locally" @moduledoc """ - Install hex locally from #{@hex_url}. + Installs Hex locally. mix local.hex ## Command line options * `--force` - forces installation without a shell prompt; primarily - intended for automation in build systems like make - """ - def run(args) do - Mix.Tasks.Local.Install.run [@hex_url|args] - end + intended for automation in build systems like `make` - @doc false - def maybe_install(app) do - unless Code.ensure_loaded?(Hex) do - shell = Mix.shell - shell.info "Could not find hex, which is needed to build dependency #{inspect app}" + * `--if-missing` - performs installation only if Hex is not installed yet; + intended to avoid repeatedly reinstalling Hex in automation when a script + may be run multiple times - if shell.yes?("Shall I install hex?") do - run ["--force"] - end - end - end + If both options are set, `--force` takes precedence. - @doc false - def maybe_update do - if Code.ensure_loaded?(Hex) do - unless Version.match?(Hex.version, @hex_requirement) do - Mix.shell.info "Mix requires hex #{@hex_requirement} but you have #{Hex.version}" + ## Mirrors - if Mix.shell.yes?("Shall I abort the current command and update hex?") do - run ["--force"] - exit(0) - end + If you want to change the [default mirror](https://repo.hex.pm) + used for fetching Hex, set the `HEX_MIRROR` environment variable. + """ + @switches [if_missing: :boolean, force: :boolean] + + @spec run(OptionParser.argv) :: boolean + def run(argv) do + {opts, _} = OptionParser.parse!(argv, switches: @switches) + force? = Keyword.get(opts, :force, false) + if_missing? = Keyword.get(opts, :if_missing, false) + + should_install? = + case {force?, if_missing?} do + {false, true} -> Code.ensure_loaded?(Hex) + _ -> true end - end + + should_install? && run_install(argv) end - @doc false - def maybe_start do - try do - Code.ensure_loaded?(Hex) && Hex.start - catch - kind, reason -> - stacktrace = System.stacktrace - Mix.shell.error "Could not start Hex. Try fetching a new version with " <> - "`mix local.hex` or uninstalling it with `mix local.uninstall hex`" - :erlang.raise(kind, reason, stacktrace) - end + defp run_install(argv) do + hex_mirror = Mix.Hex.mirror + + {elixir_version, hex_version, sha512} = + Mix.Local.find_matching_versions_from_signed_csv!("Hex", hex_mirror <> @hex_list_path) + + url = + (hex_mirror <> @hex_archive_path) + |> String.replace("[ELIXIR_VERSION]", elixir_version) + |> String.replace("[HEX_VERSION]", hex_version) + + Mix.Tasks.Archive.Install.run [url, "--sha512", sha512 | argv] end end diff --git a/lib/mix/lib/mix/tasks/local.install.ex b/lib/mix/lib/mix/tasks/local.install.ex deleted file mode 100644 index 72e317f715e..00000000000 --- a/lib/mix/lib/mix/tasks/local.install.ex +++ /dev/null @@ -1,99 +0,0 @@ -defmodule Mix.Tasks.Local.Install do - use Mix.Task - - import Mix.Generator, only: [create_file: 2] - - @shortdoc "Install an archive locally" - - @moduledoc """ - Install an archive locally. - - If no argument is supplied but there is an archive in the root - (created with mix archive), then the archive will be installed - locally. For example: - - mix do archive, local.install - - The argument can be an archive located at some URL: - - mix local.install http://example.com/foo.ez - - After installed, the tasks in the archive are available locally: - - mix some_task - - ## Command line options - - * `--force` - forces installation without a shell prompt; primarily - intended for automation in build systems like make - - """ - - def run(argv) do - {opts, argv, _} = OptionParser.parse(argv, switches: [force: :boolean]) - - if src = List.first(argv) do - %URI{path: path} = URI.parse(src) - - case Path.extname(path) do - ".ez" -> install_archive(src, opts) - _ -> Mix.raise "mix local.install doesn't know how to install #{path}" - end - else - src = Mix.Archive.name(Mix.Project.config[:app], Mix.Project.config[:version]) - - if File.exists?(src) do - install_archive(src, opts) - else - Mix.raise "Expected PATH to be given, please use `mix local.install PATH`" - end - end - end - - defp install_archive(src, opts) do - previous = previous_versions(src) - - if opts[:force] || should_install?(src, previous) do - remove_previous_versions(previous) - dest = Mix.Local.archives_path() - File.mkdir_p!(dest) - archive = Path.join(dest, basename(src)) - create_file archive, Mix.Utils.read_path!(src) - Code.append_path(Mix.Archive.ebin(archive)) - end - end - - defp basename(path) do - %URI{path: path} = URI.parse(path) - Path.basename(path) - end - - defp should_install?(src, []) do - Mix.shell.yes?("Are you sure you want to install archive #{src}?") - end - - defp should_install?(_src, previous_files) do - files = Enum.map_join(previous_files, ", ", &Path.basename/1) - - Mix.shell.yes?("Found existing archives: #{files}.\n" <> - "Are you sure you want to replace them?") - end - - defp previous_versions(src) do - app = src - |> Mix.Archive.dir - |> String.split("-") - |> List.first - - if app do - Mix.Local.archive_files(app) - else - [] - end - end - - defp remove_previous_versions([]), - do: :ok - defp remove_previous_versions(previous), - do: Enum.each(previous, &File.rm!/1) -end diff --git a/lib/mix/lib/mix/tasks/local.public_keys.ex b/lib/mix/lib/mix/tasks/local.public_keys.ex new file mode 100644 index 00000000000..6acfdb026b2 --- /dev/null +++ b/lib/mix/lib/mix/tasks/local.public_keys.ex @@ -0,0 +1,80 @@ +defmodule Mix.Tasks.Local.PublicKeys do + use Mix.Task + + @shortdoc "Manages public keys" + + @moduledoc """ + Public keys are used by Mix to install packages like Rebar and Hex. + + Mix by default ships with a public key but new ones can be added + on demand. + + To list all available keys: + + $ mix local.public_keys + + To list all available keys showing the keys themselves: + + $ mix local.public_keys --detailed + + To add a new key: + + $ mix local.public_keys local/path/to/key + + Be careful when adding new keys. Only add keys from sources you + trust. + + Public keys are by default stored in your MIX_HOME under the + public_keys directory. + + ## Command line options + + * `--force` - forces installation without a shell prompt; primarily + intended for automation in build systems like `make` + """ + @spec run(OptionParser.argv) :: true + def run(argv) do + {opts, argv} = + OptionParser.parse!(argv, switches: [force: :boolean, detailed: :boolean]) + + case argv do + [] -> show(opts) + [path | _] -> install(path, opts) + end + end + + defp show(opts) do + for {id, key} <- Mix.PublicKey.public_keys do + Mix.shell.info "* #{id}" + if opts[:detailed] do + Mix.shell.info "\n#{key}" + end + end + + Mix.shell.info "Public keys (except in-memory ones) installed at: #{Mix.PublicKey.public_keys_path()}" + end + + defp install(source, opts) do + key = File.read!(source) + base = Path.basename(source) + dest = Path.join(Mix.PublicKey.public_keys_path, base) + + # Validate the key is good + _ = Mix.PublicKey.decode!(source, key) + + if opts[:force] || should_install?(source, dest) do + File.mkdir_p!(Mix.PublicKey.public_keys_path) + File.write!(dest, key) + Mix.shell.info [:green, "* creating ", :reset, Path.relative_to_cwd(dest)] + end + end + + defp should_install?(source, dest) do + if File.exists?(dest) do + Mix.shell.yes?("There is already a public key named #{Path.basename(dest)}.\n" <> + "Are you sure you want to replace it?") + else + Mix.shell.yes?("Are you sure you want to install public key #{source}?") + end + end +end diff --git a/lib/mix/lib/mix/tasks/local.rebar.ex b/lib/mix/lib/mix/tasks/local.rebar.ex index 37a0b5157d0..d0e85bcce79 100644 --- a/lib/mix/lib/mix/tasks/local.rebar.ex +++ b/lib/mix/lib/mix/tasks/local.rebar.ex @@ -1,33 +1,107 @@ defmodule Mix.Tasks.Local.Rebar do use Mix.Task - import Mix.Generator, only: [create_file: 2] + @rebar2_list_url "/installs/rebar-1.x.csv" + @rebar2_escript_url "/installs/[ELIXIR_VERSION]/rebar-[REBAR_VERSION]" + @rebar3_list_url "/installs/rebar3-1.x.csv" + @rebar3_escript_url "/installs/[ELIXIR_VERSION]/rebar3-[REBAR_VERSION]" - @rebar_url "http://s3.hex.pm/rebar" - @shortdoc "Install rebar locally" + @shortdoc "Installs Rebar locally" @moduledoc """ - Fetch a copy of rebar from the given path or url. It defaults to a - rebar copy that ships with Elixir source if available or fetches it - from #{@rebar_url}. + Fetches a copy of `rebar` or `rebar3` from the given path or URL. - The local copy is stored in your MIX_HOME (defaults to ~/.mix). - This version of rebar will be used as required by `mix deps.compile`. - """ + It defaults to safely download a Rebar copy from Hex's CDN. + However, a URL can be given as argument, usually for an existing + local copy of Rebar: + + mix local.rebar rebar path/to/rebar + mix local.rebar rebar3 path/to/rebar + + If neither `rebar` or `rebar3` are specified, both versions will be fetched. + + The local copy is stored in your `MIX_HOME` (defaults to `~/.mix`). + This version of Rebar will be used as required by `mix deps.compile`. + + ## Command line options + + * `rebar PATH` - specifies a path or URL for `rebar` + * `rebar3 PATH` - specifies a path or URL for `rebar3` + + * `--sha512` - checks the archive matches the given SHA-512 checksum + + * `--force` - forces installation without a shell prompt; primarily + intended for automation in build systems like `make` + + ## Mirrors + + If you want to change the [default mirror](https://repo.hex.pm) + to use for fetching `rebar` please set the `HEX_MIRROR` environment variable. + """ + @switches [force: :boolean, sha512: :string] + @spec run(OptionParser.argv) :: true def run(argv) do - {_, argv, _} = OptionParser.parse(argv) - do_install(case argv do - [] -> @rebar_url - [path|_] -> path - end) + {opts, argv, _} = OptionParser.parse(argv, switches: @switches) + + case argv do + ["rebar", path | _] -> + install_from_path(:rebar, path, opts) + ["rebar3", path | _] -> + install_from_path(:rebar3, path, opts) + [] -> + install_from_s3(:rebar, @rebar2_list_url, @rebar2_escript_url, opts) + install_from_s3(:rebar3, @rebar3_list_url, @rebar3_escript_url, opts) + _ -> + Mix.raise "Invalid arguments given to mix local.rebar. " <> + "To find out the proper call syntax run \"mix help local.rebar\"" + end end - defp do_install(path) do - rebar = Mix.Utils.read_path!(path) - local_rebar_path = Mix.Rebar.local_rebar_path - File.mkdir_p! Path.dirname(local_rebar_path) - create_file local_rebar_path, rebar - :file.change_mode local_rebar_path, 0755 + defp install_from_path(manager, path, opts) do + local = Mix.Rebar.local_rebar_path(manager) + + if opts[:force] || Mix.Utils.can_write?(local) do + case Mix.Utils.read_path(path, opts) do + {:ok, binary} -> + File.mkdir_p!(Path.dirname(local)) + File.write!(local, binary) + File.chmod!(local, 0o755) + Mix.shell.info [:green, "* creating ", :reset, Path.relative_to_cwd(local)] + :badpath -> + Mix.raise "Expected #{inspect path} to be a URL or a local file path" + {:local, message} -> + Mix.raise message + {kind, message} when kind in [:remote, :checksum] -> + Mix.raise """ + #{message} + + Could not fetch #{manager} at: + + #{path} + + Please download the file above manually to your current directory and run: + + mix local.rebar #{manager} ./#{Path.basename(local)} + """ + end + end + + true + end + + defp install_from_s3(manager, list_url, escript_url, opts) do + hex_mirror = Mix.Hex.mirror + list_url = hex_mirror <> list_url + + {elixir_version, rebar_version, sha512} = + Mix.Local.find_matching_versions_from_signed_csv!("Rebar", list_url) + + url = + (hex_mirror <> escript_url) + |> String.replace("[ELIXIR_VERSION]", elixir_version) + |> String.replace("[REBAR_VERSION]", rebar_version) + + install_from_path(manager, url, Keyword.put(opts, :sha512, sha512)) end end diff --git a/lib/mix/lib/mix/tasks/local.uninstall.ex b/lib/mix/lib/mix/tasks/local.uninstall.ex deleted file mode 100644 index 78e825708c1..00000000000 --- a/lib/mix/lib/mix/tasks/local.uninstall.ex +++ /dev/null @@ -1,32 +0,0 @@ -defmodule Mix.Tasks.Local.Uninstall do - use Mix.Task - - @shortdoc "Uninstall local tasks or archives" - - @moduledoc """ - Uninstall local tasks: - - mix local.uninstall archive - - """ - - def run(argv) do - {_, argv, _} = OptionParser.parse(argv) - if argv == [] do - Mix.raise "No archive was given to uninstall" - else - Enum.each argv, &do_uninstall(&1) - end - end - - defp do_uninstall(name) do - archives = Mix.Local.archive_files(name) - - if archives == [] do - Mix.raise "Could not find a local archive named #{inspect name} "<> - "at #{inspect Mix.Local.archives_path}" - end - - Enum.each(archives, &File.rm!(&1)) - end -end diff --git a/lib/mix/lib/mix/tasks/new.ex b/lib/mix/lib/mix/tasks/new.ex index b39df404736..a6beb5482b5 100644 --- a/lib/mix/lib/mix/tasks/new.ex +++ b/lib/mix/lib/mix/tasks/new.ex @@ -2,19 +2,18 @@ defmodule Mix.Tasks.New do use Mix.Task import Mix.Generator - import Mix.Utils, only: [camelize: 1, underscore: 1] - @shortdoc "Create a new Elixir project" + @shortdoc "Creates a new Elixir project" @moduledoc """ Creates a new Elixir project. It expects the path of the project as argument. - mix new PATH [--sup] [--module MODULE] [--umbrella] + mix new PATH [--sup] [--module MODULE] [--app APP] [--umbrella] - A project at the given PATH will be created. The + A project at the given PATH will be created. The application name and module name will be retrieved - from the path, unless `--module` is given. + from the path, unless `--module` or `--app` is given. A `--sup` option can be given to generate an OTP application skeleton including a supervision tree. Normally an app is @@ -23,6 +22,12 @@ defmodule Mix.Tasks.New do An `--umbrella` option can be given to generate an umbrella project. + An `--app` option can be given in order to + name the OTP application for the project. + + A `--module` option can be given in order + to name the modules in the generated code skeleton. + ## Examples mix new hello_world @@ -31,40 +36,55 @@ defmodule Mix.Tasks.New do mix new hello_world --module HelloWorld - To generate an app with supervisor and application callback: + To generate an app with a supervision tree and an application callback: mix new hello_world --sup """ + + @switches [ + app: :string, + module: :string, + sup: :boolean, + umbrella: :boolean + ] + + @spec run(OptionParser.argv) :: :ok def run(argv) do - {opts, argv, _} = OptionParser.parse(argv, switches: [sup: :boolean, umbrella: :boolean]) + {opts, argv} = OptionParser.parse!(argv, strict: @switches) case argv do [] -> - Mix.raise "Expected PATH to be given, please use `mix new PATH`" - [path|_] -> - name = Path.basename(Path.expand(path)) - check_project_name!(name) - File.mkdir_p!(path) + Mix.raise "Expected PATH to be given, please use \"mix new PATH\"" + [path | _] -> + app = opts[:app] || Path.basename(Path.expand(path)) + check_application_name!(app, !opts[:app]) + mod = opts[:module] || Macro.camelize(app) + check_mod_name_validity!(mod) + check_mod_name_availability!(mod) + unless path == "." do + check_directory_existence!(path) + File.mkdir_p!(path) + end File.cd! path, fn -> if opts[:umbrella] do - do_generate_umbrella(name, path, opts) + generate_umbrella(app, mod, path, opts) else - do_generate(name, path, opts) + generate(app, mod, path, opts) end end end end - defp do_generate(app, path, opts) do - mod = opts[:module] || camelize(app) - assigns = [app: app, mod: mod, otp_app: otp_app(mod, !!opts[:sup])] + defp generate(app, mod, path, opts) do + assigns = [app: app, mod: mod, sup_app: sup_app(mod, !!opts[:sup]), + version: get_version(System.version)] create_file "README.md", readme_template(assigns) - create_file ".gitignore", gitignore_text + create_file ".gitignore", gitignore_text() - if in_umbrella? do + if in_umbrella?() do create_file "mix.exs", mixfile_apps_template(assigns) else create_file "mix.exs", mixfile_template(assigns) @@ -74,73 +94,105 @@ defmodule Mix.Tasks.New do create_file "config/config.exs", config_template(assigns) create_directory "lib" + create_file "lib/#{app}.ex", lib_template(assigns) if opts[:sup] do - create_file "lib/#{app}.ex", lib_sup_template(assigns) - else - create_file "lib/#{app}.ex", lib_template(assigns) + create_file "lib/#{app}/application.ex", lib_app_template(assigns) end create_directory "test" create_file "test/test_helper.exs", test_helper_template(assigns) create_file "test/#{app}_test.exs", test_template(assigns) - Mix.shell.info """ + """ - Your mix project was created successfully. - You can use mix to compile it, test it, and more: + Your Mix project was created successfully. + You can use "mix" to compile it, test it, and more: - cd #{path} - mix test + #{cd_path(path)}mix test - Run `mix help` for more commands. + Run "mix help" for more commands. """ + |> String.trim_trailing + |> Mix.shell.info end - defp otp_app(_mod, false) do - " [applications: []]" - end + defp sup_app(_mod, false), do: "" + defp sup_app(mod, true), do: ",\n mod: {#{mod}.Application, []}" - defp otp_app(mod, true) do - " [applications: [],\n mod: {#{mod}, []}]" - end + defp cd_path("."), do: "" + defp cd_path(path), do: "cd #{path}\n " - defp do_generate_umbrella(app, path, _opts) do - mod = camelize(app) - assigns = [mod: mod] + defp generate_umbrella(_app, mod, path, _opts) do + assigns = [app: nil, mod: mod] - create_file ".gitignore", gitignore_text + create_file ".gitignore", gitignore_text() create_file "README.md", readme_template(assigns) create_file "mix.exs", mixfile_umbrella_template(assigns) create_directory "apps" create_directory "config" - create_file "config/config.exs", - config_template(assigns) <> config_umbrella_template(assigns) + create_file "config/config.exs", config_umbrella_template(assigns) - Mix.shell.info """ + """ Your umbrella project was created successfully. Inside your project, you will find an apps/ directory where you can create and host many apps: - cd #{path} - cd apps + #{cd_path(path)}cd apps mix new my_app - Commands like `mix compile` and `mix test` when executed + Commands like "mix compile" and "mix test" when executed in the umbrella project root will automatically run for each application in the apps/ directory. """ + |> String.trim_trailing + |> Mix.shell.info + end + + defp check_application_name!(name, inferred?) do + unless name =~ Regex.recompile!(~r/^[a-z][a-z0-9_]*$/) do + Mix.raise "Application name must start with a letter and have only lowercase " <> + "letters, numbers and underscore, got: #{inspect name}" <> + (if inferred? do + ". The application name is inferred from the path, if you'd like to " <> + "explicitly name the application then use the \"--app APP\" option" + else + "" + end) + end + end + + defp check_mod_name_validity!(name) do + unless name =~ Regex.recompile!(~r/^[A-Z]\w*(\.[A-Z]\w*)*$/) do + Mix.raise "Module name must be a valid Elixir alias (for example: Foo.Bar), got: #{inspect name}" + end + end + + defp check_mod_name_availability!(name) do + name = Module.concat(Elixir, name) + if Code.ensure_loaded?(name) do + Mix.raise "Module name #{inspect name} is already taken, please choose another name" + end end - defp check_project_name!(name) do - unless name =~ ~r/^[a-z][\w_]*$/ do - Mix.raise "Project path must start with a letter and have only lowercase letters, numbers and underscore" + defp check_directory_existence!(path) do + if File.dir?(path) and not Mix.shell.yes?("The directory #{inspect(path)} already exists. Are you sure you want to continue?") do + Mix.raise "Please select another directory for installation" end end + defp get_version(version) do + {:ok, version} = Version.parse(version) + "#{version.major}.#{version.minor}" <> + case version.pre do + [h | _] -> "-#{h}" + [] -> "" + end + end + defp in_umbrella? do apps = Path.dirname(File.cwd!) @@ -154,49 +206,78 @@ defmodule Mix.Tasks.New do end end - embed_template :readme, """ - <%= @mod %> - <%= String.duplicate("=", String.length(@mod)) %> + embed_template :readme, """ + # <%= @mod %> + + **TODO: Add description** + <%= if @app do %> + ## Installation + + If [available in Hex](https://hex.pm/docs/publish), the package can be installed + by adding `<%= @app %>` to your list of dependencies in `mix.exs`: + + ```elixir + def deps do + [{:<%= @app %>, "~> 0.1.0"}] + end + ``` + + Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) + and published on [HexDocs](https://hexdocs.pm). Once published, the docs can + be found at [https://hexdocs.pm/<%= @app %>](https://hexdocs.pm/<%= @app %>). + <% end %> + """ + + embed_text :gitignore, """ + # The directory Mix will write compiled artifacts to. + /_build/ + + # If you run "mix test --cover", coverage assets end up here. + /cover/ - ** TODO: Add description ** - """ + # The directory Mix downloads your dependencies sources to. + /deps/ - embed_text :gitignore, """ - /_build - /deps - erl_crash.dump - *.ez - """ + # Where 3rd-party dependencies like ExDoc output generated docs. + /doc/ + + # Ignore .fetch files in case you like to edit your project deps locally. + /.fetch + + # If the VM crashes, it generates a dump, let's ignore it too. + erl_crash.dump + + # Also ignore archive artifacts (built via "mix archive.build"). + *.ez + """ embed_template :mixfile, """ defmodule <%= @mod %>.Mixfile do use Mix.Project def project do - [app: :<%= @app %>, - version: "0.0.1", - elixir: "~> <%= System.version %>", - deps: deps] + [ + app: :<%= @app %>, + version: "0.1.0", + elixir: "~> <%= @version %>", + start_permanent: Mix.env == :prod, + deps: deps() + ] end - # Configuration for the OTP application - # - # Type `mix help compile.app` for more information + # Run "mix help compile.app" to learn about applications. def application do - <%= @otp_app %> + [ + extra_applications: [:logger]<%= @sup_app %> + ] end - # Dependencies can be hex.pm packages: - # - # {:mydep, "~> 0.3.0"} - # - # Or git/path repositories: - # - # {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1"} - # - # Type `mix help deps` for more examples and options + # Run "mix help deps" to learn about dependencies. defp deps do - [] + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}, + ] end end """ @@ -206,36 +287,33 @@ defmodule Mix.Tasks.New do use Mix.Project def project do - [app: :<%= @app %>, - version: "0.0.1", - deps_path: "../../deps", - lockfile: "../../mix.lock", - elixir: "~> <%= System.version %>", - deps: deps] + [ + app: :<%= @app %>, + version: "0.1.0", + build_path: "../../_build", + config_path: "../../config/config.exs", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> <%= @version %>", + start_permanent: Mix.env == :prod, + deps: deps() + ] end - # Configuration for the OTP application - # - # Type `mix help compile.app` for more information + # Run "mix help compile.app" to learn about applications. def application do - <%= @otp_app %> + [ + extra_applications: [:logger]<%= @sup_app %> + ] end - # Dependencies can be hex.pm packages: - # - # {:mydep, "~> 0.3.0"} - # - # Or git/path repositories: - # - # {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1"} - # - # To depend on another app inside the umbrella: - # - # {:myapp, in_umbrella: true} - # - # Type `mix help deps` for more examples and options + # Run "mix help deps" to learn about dependencies. defp deps do - [] + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}, + # {:sibling_app_in_umbrella, in_umbrella: true}, + ] end end """ @@ -245,22 +323,18 @@ defmodule Mix.Tasks.New do use Mix.Project def project do - [apps_path: "apps", - deps: deps] + [ + apps_path: "apps", + start_permanent: Mix.env == :prod, + deps: deps() + ] end - # Dependencies can be hex.pm packages: - # - # {:mydep, "~> 0.3.0"} - # - # Or git/path repositories: - # - # {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1"} + # Dependencies listed here are available only for this + # project and cannot be accessed from applications inside + # the apps folder. # - # Type `mix help deps` for more examples and options. - # - # Dependencies listed here are available only for this project - # and cannot be accessed from applications inside the apps folder + # Run "mix help deps" for examples and options. defp deps do [] end @@ -269,19 +343,27 @@ defmodule Mix.Tasks.New do embed_template :config, ~S""" # This file is responsible for configuring your application - # and its dependencies. The Mix.Config module provides functions - # to aid in doing so. + # and its dependencies with the aid of the Mix.Config module. use Mix.Config - # Note this file is loaded before any dependency and is restricted + # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this - # file won't be loaded nor affect the parent project. + # file won't be loaded nor affect the parent project. For this reason, + # if you want to provide default values for your application for + # 3rd-party users, it should be done in your "mix.exs" file. - # Sample configuration: + # You can configure your application as: + # + # config :<%= @app %>, key: :value + # + # and access this configuration in your application as: + # + # Application.get_env(:<%= @app %>, :key) + # + # You can also configure a 3rd-party app: + # + # config :logger, level: :info # - # config :my_dep, - # key: :value, - # limit: 42 # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment @@ -293,35 +375,64 @@ defmodule Mix.Tasks.New do """ embed_template :config_umbrella, ~S""" + # This file is responsible for configuring your application + # and its dependencies with the aid of the Mix.Config module. + use Mix.Config + + # By default, the umbrella project as well as each child + # application will require this configuration file, ensuring + # they all use the same configuration. While one could + # configure all applications here, we prefer to delegate + # back to each application for organization purposes. + import_config "../apps/*/config/config.exs" - # Finally, note that configuration defined in children projects - # inside apps/ are not automatically available to the umbrella parent. - # They can, however, be easily imported as well: + # Sample configuration (overrides the imported configuration above): # - # import_config "../apps/foo/config/config.exs" - # import_config "../apps/bar/config/config.exs" + # config :logger, :console, + # level: :info, + # format: "$date $time [$level] $metadata$message\n", + # metadata: [:user_id] """ embed_template :lib, """ defmodule <%= @mod %> do + @moduledoc \""" + Documentation for <%= @mod %>. + \""" + + @doc \""" + Hello world. + + ## Examples + + iex> <%= @mod %>.hello + :world + + \""" + def hello do + :world + end end """ - embed_template :lib_sup, """ - defmodule <%= @mod %> do + embed_template :lib_app, """ + defmodule <%= @mod %>.Application do + # See https://hexdocs.pm/elixir/Application.html + # for more information on OTP Applications + @moduledoc false + use Application - # See http://elixir-lang.org/docs/stable/elixir/Application.html - # for more information on OTP Applications def start(_type, _args) do import Supervisor.Spec, warn: false + # Define workers and child supervisors to be supervised children = [ - # Define workers and child supervisors to be supervised - # worker(<%= @mod %>.Worker, [arg1, arg2, arg3]) + # Starts a worker by calling: <%= @mod %>.Worker.start_link(arg1, arg2, arg3) + # worker(<%= @mod %>.Worker, [arg1, arg2, arg3]), ] - # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html + # See https://hexdocs.pm/elixir/Supervisor.html # for other strategies and supported options opts = [strategy: :one_for_one, name: <%= @mod %>.Supervisor] Supervisor.start_link(children, opts) @@ -332,9 +443,10 @@ defmodule Mix.Tasks.New do embed_template :test, """ defmodule <%= @mod %>Test do use ExUnit.Case + doctest <%= @mod %> - test "the truth" do - assert 1 + 1 == 2 + test "greets the world" do + assert <%= @mod %>.hello() == :world end end """ diff --git a/lib/mix/lib/mix/tasks/profile.cprof.ex b/lib/mix/lib/mix/tasks/profile.cprof.ex new file mode 100644 index 00000000000..a0636b74e73 --- /dev/null +++ b/lib/mix/lib/mix/tasks/profile.cprof.ex @@ -0,0 +1,224 @@ +defmodule Mix.Tasks.Profile.Cprof do + use Mix.Task + + @shortdoc "Profiles the given file or expression with cprof" + + @moduledoc """ + Profiles the given file or expression using Erlang's `cprof` tool. + + `cprof` can be useful when you want to discover the bottlenecks related + to function calls. + + Before running the code, it invokes the `app.start` task which compiles + and loads your project. Then the target expression is profiled, together + with all matching function calls, by setting breakpoints containing + counters. These can only be set on BEAM code so BIFs cannot be call + count traced. + + To profile the code, you can use syntax similar to the `mix run` task: + + mix profile.cprof -e Hello.world + mix profile.cprof -e "[1, 2, 3] |> Enum.reverse |> Enum.map(&Integer.to_string/1)" + mix profile.cprof my_script.exs arg1 arg2 arg3 + + ## Command line options + + * `--matching` - only profile calls matching the given `Module.function/arity` pattern + * `--limit` - filters out any results with a call count less than the limit + * `--module` - filters out any results not pertaining to the given module + * `--config`, `-c` - loads the given configuration file + * `--eval`, `-e` - evaluate the given code + * `--require`, `-r` - requires pattern before running the command + * `--parallel`, `-p` - makes all requires parallel + * `--no-compile` - does not compile even if files require compilation + * `--no-deps-check` - does not check dependencies + * `--no-archives-check` - does not check archives + * `--no-halt` - does not halt the system after running the command + * `--no-start` - does not start applications after compilation + * `--no-elixir-version-check` - does not check the Elixir version from mix.exs + + ## Profile output + + Example output: + CNT + Total 15 + Enum 6 <-- + Enum."-map/2-lists^map/1-0-"/2 4 + Enum.reverse/1 1 + Enum.map/2 1 + :elixir_compiler 4 <-- + anonymous fn/1 in :elixir_compiler.__FILE__/1 3 + anonymous fn/0 in :elixir_compiler.__FILE__/1 1 + String.Chars.Integer 3 <-- + String.Chars.Integer.to_string/1 3 + :erlang 2 <-- + :erlang.trace_pattern/3 2 + Profile done over 20229 matching functions + + The default output contains data gathered from all matching functions. The left + column structures each module and its total call count trace is presented on the right. + Each module has its count discriminated by function below. The `<--` symbol is meant to + help visualize where a new module call count begins. + + The first row (Total) is the sum of all function calls. In the last row the number of + matching functions that were considered for profiling is presented. + + When `--matching` option is specified, call count tracing will be started only for + the functions matching the given pattern: + + String.Chars.Integer 3 <-- + String.Chars.Integer.to_string/1 3 + Profile done over 1 matching functions + + The pattern can be a module name, such as `String` to count all calls to that module, + a call without arity, such as `String.split`, to count all calls to that function + regardless of arity, or a call with arity, such as `String.split/2`, to count all + calls to that exact module, function and arity. + + ## Caveats + + You should be aware the profiler is stopped as soon as the code has finished running. This + may need special attention, when: running asynchronous code as function calls which were + called before the profiler stopped will not be counted; running synchronous code as long + running computations and a profiler without a proper MFA trace pattern or filter may + lead to a result set which is difficult to comprehend. + + Other caveats are the impossibility to call count trace BIFs, since breakpoints can + only be set on BEAM code; functions calls performed by `:cprof` are not traced; the + maximum size of a call counter is equal to the host machine's word size + (for example, 2147483647 in a 32-bit host). + """ + + @switches [parallel: :boolean, require: :keep, eval: :keep, config: :keep, matching: :string, + halt: :boolean, compile: :boolean, deps_check: :boolean, limit: :integer, + module: :string, start: :boolean, archives_check: :boolean, warmup: :boolean, + elixir_version_check: :boolean, parallel_require: :keep] + + @spec run(OptionParser.argv) :: :ok + def run(args) do + {opts, head} = OptionParser.parse_head!(args, + aliases: [r: :require, p: :parallel, e: :eval, c: :config], + strict: @switches) + Mix.Tasks.Run.run(args, opts, head, &profile_code(&1, opts), + &profile_code(File.read!(&1), opts)) + end + + defp profile_code(code_string, opts) do + content = + quote do + unquote(__MODULE__).profile(fn -> + unquote(Code.string_to_quoted!(code_string)) + end, unquote(opts)) + end + # Use compile_quoted since it leaves less noise than eval_quoted + Code.compile_quoted(content) + end + + @doc false + def profile(fun, opts) do + fun + |> profile_and_analyse(opts) + |> print_output + + :cprof.stop() + end + + defp profile_and_analyse(fun, opts) do + if Keyword.get(opts, :warmup, true) do + IO.puts "Warmup..." + fun.() + end + + num_matched_functions = case Keyword.get(opts, :matching) do + nil -> + :cprof.start() + matching -> + case Mix.Utils.parse_mfa(matching) do + {:ok, args} -> apply(:cprof, :start, args) + :error -> Mix.raise "Invalid matching pattern: #{matching}" + end + end + + apply(fun, []) + + :cprof.pause() + + limit = Keyword.get(opts, :limit) + module = Keyword.get(opts, :module) + + analysis_result = case {limit, module} do + {nil, nil} -> + :cprof.analyse() + {limit, nil} -> + :cprof.analyse(limit) + {limit, module} -> + module = string_to_existing_module(module) + if limit do + :cprof.analyse(module, limit) + else + :cprof.analyse(module) + end + end + + {num_matched_functions, analysis_result} + end + + defp string_to_existing_module(":" <> module), do: String.to_existing_atom(module) + defp string_to_existing_module(module), do: Module.concat([module]) + + defp print_output({num_matched_functions, {all_call_count, mod_analysis_list}}) do + print_total_row(all_call_count) + Enum.each(mod_analysis_list, &print_analysis_result/1) + print_number_of_matched_functions(num_matched_functions) + end + + defp print_output({num_matched_functions, {_mod, _call_count, _mod_fun_list} = mod_analysis}) do + print_analysis_result(mod_analysis) + print_number_of_matched_functions(num_matched_functions) + end + + defp print_number_of_matched_functions(num_matched_functions) do + IO.puts "Profile done over #{num_matched_functions} matching functions" + end + + defp print_total_row(all_call_count) do + IO.puts "" + print_row(["s", "s", "s"], ["", "CNT", ""]) + print_row(["s", "B", "s"], ["Total", all_call_count, ""]) + end + + defp print_analysis_result({module, total_module_count, module_fun_list}) do + module + |> Atom.to_string + |> module_name_for_printing() + |> print_module(total_module_count, "", "<--") + Enum.each(module_fun_list, &print_function(&1, " ")) + end + + defp print_module(module, count, prefix, suffix) do + print_row(["s", "B", "s"], ["#{prefix}#{module}", count, suffix]) + end + + defp module_name_for_printing("Elixir." <> rest = _module_name), do: rest + defp module_name_for_printing(module_name), do: ":" <> module_name + + defp print_function({fun, count}, prefix, suffix \\ "") do + print_row(["s", "B", "s"], ["#{prefix}#{function_text(fun)}", count, suffix]) + end + + defp function_text({module, function, arity}) do + Exception.format_mfa(module, function, arity) + end + + defp function_text(other), do: inspect(other) + + @columns [-60, 12, 5] + defp print_row(formats, data) do + Stream.zip(@columns, formats) + |> Stream.map(fn({width, format}) -> "~#{width}#{format}" end) + |> Enum.join + |> :io.format(data) + + IO.puts "" + end +end diff --git a/lib/mix/lib/mix/tasks/profile.fprof.ex b/lib/mix/lib/mix/tasks/profile.fprof.ex new file mode 100644 index 00000000000..22a4a4d1388 --- /dev/null +++ b/lib/mix/lib/mix/tasks/profile.fprof.ex @@ -0,0 +1,265 @@ +defmodule Mix.Tasks.Profile.Fprof do + use Mix.Task + + @shortdoc "Profiles the given file or expression with fprof" + + @moduledoc """ + Profiles the given file or expression using Erlang's `fprof` tool. + + `fprof` can be useful when you want to discover the bottlenecks of a + sequential code. + + Before running the code, it invokes the `app.start` task which compiles + and loads your project. Then the target expression is profiled, together + with all processes which are spawned by it. Other processes (e.g. those + residing in the OTP application supervision tree) are not profiled. + + To profile the code, you can use syntax similar to the `mix run` task: + + mix profile.fprof -e Hello.world + mix profile.fprof my_script.exs arg1 arg2 arg3 + + ## Command line options + + * `--callers` - prints detailed information about immediate callers and called functions + * `--details` - includes profile data for each profiled process + * `--sort key` - sorts the output by given key: `acc` (default) or `own` + * `--config`, `-c` - loads the given configuration file + * `--eval`, `-e` - evaluates the given code + * `--require`, `-r` - requires pattern before running the command + * `--parallel`, `-p` - makes all requires parallel + * `--no-compile` - does not compile even if files require compilation + * `--no-deps-check` - does not check dependencies + * `--no-archives-check` - does not check archives + * `--no-start` - does not start applications after compilation + * `--no-elixir-version-check` - does not check the Elixir version from mix.exs + * `--no-warmup` - does not execute code once before profiling + + ## Profile output + + Example output: + # CNT ACC (ms) OWN (ms) + Total 200279 1972.188 1964.579 + :fprof.apply_start_stop/4 0 1972.188 0.012 + anonymous fn/0 in :elixir_compiler_2 1 1972.167 0.001 + Test.run/0 1 1972.166 0.007 + Test.do_something/1 3 1972.131 0.040 + Test.bottleneck/0 1 1599.490 0.007 + ... + + The default output contains data gathered from all profiled processes. + All times are wall clock milliseconds. The columns have the following meaning: + + * CNT - total number of invocations of the given function + * ACC - total time spent in the function + * OWN - time spent in the function, excluding the time of called functions + + The first row (Total) is the sum of all functions executed in all profiled + processes. For the given output, we had a total of 200279 function calls and spent + about 2 seconds running the code. + + More detailed information is returned if you provide the `--callers` and + `--details` options. + + When `--callers` option is specified, you'll see expanded function entries: + + Mod.caller1/0 3 200.000 0.017 + Mod.caller2/0 2 100.000 0.017 + Mod.some_function/0 5 300.000 0.017 <-- + Mod.called1/0 4 250.000 0.010 + Mod.called2/0 1 50.000 0.030 + + Here, the arrow (`<--`) indicates the __marked__ function - the function + described by this paragraph. You also see its immediate callers (above) and + called functions (below). + + All the values of caller functions describe the marked function. For example, + the first row means that `Mod.caller1/0` invoked `Mod.some_function/0` 3 times. + 200ms of the total time spent in `Mod.some_function/0` was spent processing + calls from this particular caller. + + In contrast, the values for the called functions describe those functions, but + in the context of the marked function. For example, the last row means that + `Mod.called2/0` was called once by `Mod.some_function/0`, and in that case + the total time spent in the function was 50ms. + + For a detailed explanation it's worth reading the analysis in + [Erlang documentation for fprof](http://www.erlang.org/doc/man/fprof.html#analysis). + + ## Caveats + + You should be aware that the code being profiled is running in an anonymous + function which is invoked by [`:fprof` module](http://wwww.erlang.org/doc/man/fprof.html). + Thus, you'll see some additional entries in your profile output, + such as `:fprof` calls, an anonymous + function with high ACC time, or an `:undefined` function which represents + the outer caller (non-profiled code which started the profiler). + + Also, keep in mind that profiling might significantly increase the running time + of the profiled processes. This might skew your results if, for example, those + processes perform some I/O operations, since running time of those operations + will remain unchanged, while CPU bound operations of the profiled processes + might take significantly longer. Thus, when profiling some intensive program, + try to reduce such dependencies, or be aware of the resulting bias. + + Finally, it's advised to profile your program with the `prod` environment, since + this should provide more realistic insights into bottlenecks. + """ + + @switches [parallel: :boolean, require: :keep, eval: :keep, config: :keep, + compile: :boolean, deps_check: :boolean, start: :boolean, archives_check: :boolean, + details: :boolean, callers: :boolean, sort: :string, elixir_version_check: :boolean, + warmup: :boolean, parallel_require: :keep] + + @spec run(OptionParser.argv) :: :ok + def run(args) do + {opts, head} = OptionParser.parse_head!(args, + aliases: [r: :require, p: :parallel, e: :eval, c: :config], + strict: @switches) + Mix.Tasks.Run.run(args, opts, head, &profile_code(&1, opts), + &profile_code(File.read!(&1), opts)) + end + + # Profiling functions + + defp profile_code(code_string, opts) do + content = + quote do + unquote(__MODULE__).profile(fn -> + unquote(Code.string_to_quoted!(code_string)) + end, unquote(opts)) + end + # Use compile_quoted since it leaves less noise than eval_quoted + Code.compile_quoted(content) + end + + @doc false + def profile(fun, opts) do + fun + |> profile_and_analyse(opts) + |> print_output + end + + defp profile_and_analyse(fun, opts) do + if Keyword.get(opts, :warmup, true) do + IO.puts "Warmup..." + fun.() + end + + sorting = case Keyword.get(opts, :sort, "acc") do + "acc" -> :acc + "own" -> :own + end + + {:ok, tracer} = :fprof.profile(:start) + :fprof.apply(fun, [], tracer: tracer) + + {:ok, analyse_dest} = StringIO.open("") + try do + :fprof.analyse( + dest: analyse_dest, + totals: true, + details: Keyword.get(opts, :details, false), + callers: Keyword.get(opts, :callers, false), + sort: sorting + ) + else + :ok -> + {_in, analysis_output} = StringIO.contents(analyse_dest) + String.to_charlist(analysis_output) + after + StringIO.close(analyse_dest) + end + end + + defp print_output(analysis_output) do + {_analysis_options, analysis_output} = next_term(analysis_output) + {total_row, analysis_output} = next_term(analysis_output) + print_total_row(total_row) + + Stream.unfold(analysis_output, &next_term/1) + |> Enum.each(&print_analysis_result/1) + end + + defp next_term(charlist) do + case :erl_scan.tokens([], charlist, 1) do + {:done, result, leftover} -> + case result do + {:ok, tokens, _} -> + {:ok, term} = :erl_parse.parse_term(tokens) + {term, leftover} + + {:eof, _} -> nil + end + _ -> nil + end + end + + defp print_total_row([{:totals, count, acc, own}]) do + IO.puts "" + print_row(["s", "s", "s", "s", "s"], ["", "CNT", "ACC (ms)", "OWN (ms)", ""]) + print_row(["s", "B", ".3f", ".3f", "s"], ["Total", count, acc, own, ""]) + end + + # Represents the "PID" entry + defp print_analysis_result([{pid_atom, count, :undefined, own} | info]) do + print_process(pid_atom, count, own) + + if spawned_by = info[:spawned_by] do + IO.puts(" spawned by #{spawned_by}") + end + + if spawned_as = info[:spawned_as] do + IO.puts(" as #{function_text(spawned_as)}") + end + + if initial_calls = info[:initial_calls] do + IO.puts(" initial calls:") + Enum.each(initial_calls, &IO.puts(" #{function_text(&1)}")) + end + + IO.puts("") + end + + # The function entry, when --callers option is provided + defp print_analysis_result({callers, function, subcalls}) do + IO.puts("") + Enum.each(callers, &print_function/1) + print_function(function, " ", "<--") + Enum.each(subcalls, &print_function(&1, " ")) + end + + # The function entry in the total section, and when --callers option is not + # provided + defp print_analysis_result({_fun, _count, _acc, _own} = function) do + print_function(function, "", "") + end + + defp print_process(pid_atom, count, own) do + IO.puts([?\n, String.duplicate("-", 100)]) + print_row(["s", "B", "s", ".3f", "s"], ["#{pid_atom}", count, "", own, ""]) + end + + defp print_function({fun, count, acc, own}, prefix \\ "", suffix \\ "") do + print_row( + ["s", "B", ".3f", ".3f", "s"], + ["#{prefix}#{function_text(fun)}", count, acc, own, suffix] + ) + end + + defp function_text({module, function, arity}) do + Exception.format_mfa(module, function, arity) + end + + defp function_text(other), do: inspect(other) + + @columns [-60, 10, 12, 12, 5] + defp print_row(formats, data) do + Stream.zip(@columns, formats) + |> Stream.map(fn({width, format}) -> "~#{width}#{format}" end) + |> Enum.join + |> :io.format(data) + + IO.puts "" + end +end diff --git a/lib/mix/lib/mix/tasks/run.ex b/lib/mix/lib/mix/tasks/run.ex index c4f7a8eddcf..6e42177df7d 100644 --- a/lib/mix/lib/mix/tasks/run.ex +++ b/lib/mix/lib/mix/tasks/run.ex @@ -1,69 +1,103 @@ defmodule Mix.Tasks.Run do use Mix.Task - @shortdoc "Run the given file or expression" + @shortdoc "Runs the given file or expression" @moduledoc """ Runs the given file or expression in the context of the application. - Before running the code, it invokes the `app.start` task which compiles - and loads your project. - - It is the goal of this task to provide a subset of the functionality - existent in the `elixir` executable, including setting up the `System.argv`: + You can use this task to execute a particular file or command: mix run -e Hello.world + mix run my_script.exs + + This task provides a subset of the functionality available in the + `elixir` executable, including setting up the `System.argv/0` arguments: + mix run my_script.exs arg1 arg2 arg3 - Many command line options need to be passed to the `elixir` executable - directly, which can be done as follows: + You can also use this task to simply start an application and keep + it running without halting: + + mix run --no-halt + + Before running any command, the task compiles and starts the current + application. Those can be configured with the options below. - elixir --sname hello -S mix run -e "My.code" + You may also pass options specific to the `elixir` executable as follows: + + elixir --sname hello -S mix run --no-halt ## Command line options * `--config`, `-c` - loads the given configuration file - * `--eval`, `-e` - evaluate the given code - * `--require`, `-r` - require pattern before running the command - * `--parallel-require`, `-pr` - - requires pattern in parallel - * `--no-compile` - do not compile even if files require compilation - * `--no-deps-check` - do not check dependencies - * `--no-halt` - do not halt the system after running the command - * `--no-start` - do not start applications after compilation + * `--eval`, `-e` - evaluate the given code + * `--require`, `-r` - requires pattern before running the command + * `--parallel`, `-p` - makes all requires parallel + * `--no-compile` - does not compile even if files require compilation + * `--no-deps-check` - does not check dependencies + * `--no-archives-check` - does not check archives + * `--no-halt` - does not halt the system after running the command + * `--no-start` - does not start applications after compilation + * `--no-elixir-version-check` - does not check the Elixir version from mix.exs """ + + @spec run(OptionParser.argv) :: :ok def run(args) do - {opts, head, _} = OptionParser.parse_head(args, - aliases: [r: :require, pr: :parallel_require, e: :eval, c: :config], - switches: [parallel_require: :keep, require: :keep, eval: :keep, config: :keep]) + {opts, head} = OptionParser.parse_head!(args, + aliases: [r: :require, p: :parallel, e: :eval, c: :config], + strict: [parallel: :boolean, require: :keep, eval: :keep, config: :keep, + halt: :boolean, compile: :boolean, deps_check: :boolean, start: :boolean, + archives_check: :boolean, elixir_version_check: :boolean, parallel_require: :keep]) + + run(args, opts, head, &Code.eval_string/1, &Code.require_file/1) + unless Keyword.get(opts, :halt, true), do: Process.sleep(:infinity) + :ok + end - # Require the project to be available - Mix.Project.get! + @doc false + @spec run(OptionParser.argv, Keyword.t, OptionParser.argv, + (String.t -> term()), (String.t -> term())) :: :ok + def run(args, opts, head, expr_evaluator, file_evaluator) do + # TODO: Remove on v2.0 + opts = + Enum.flat_map(opts, fn + {:parallel_require, value} -> + IO.warn "the --parallel-require option is deprecated in favour of using " <> + "--parallel to make all requires parallel and --require VAL for requiring" + [require: value, parallel: true] + opt -> + [opt] + end) {file, argv} = case {Keyword.has_key?(opts, :eval), head} do - {true, _} -> {nil, head} - {_, [h|t]} -> {h, t} - {_, []} -> {nil, []} + {true, _} -> {nil, head} + {_, [head | tail]} -> {head, tail} + {_, []} -> {nil, []} end System.argv(argv) - process_config opts + process_config(opts) # Start app after rewriting System.argv, - # but before requiring and evaling - Mix.Task.run "app.start", args - process_load opts + # but before requiring and evaling. + if opts[:start] != false do + Mix.Task.run "app.start", args + end + + process_load(opts, expr_evaluator) if file do if File.regular?(file) do - Code.require_file(file) + file_evaluator.(file) else Mix.raise "No such file: #{file}" end end - if opts[:no_halt], do: :timer.sleep(:infinity) + + :ok end defp process_config(opts) do @@ -75,24 +109,24 @@ defmodule Mix.Tasks.Run do end end - defp process_load(opts) do + defp process_load(opts, expr_evaluator) do + require_runner = + if opts[:parallel] do + &Kernel.ParallelRequire.files/1 + else + fn(files) -> Enum.each(files, &Code.require_file/1) end + end + Enum.each opts, fn - {:parallel_require, value} -> - case filter_patterns(value) do - [] -> - Mix.raise "No files matched pattern #{inspect value} given to --parallel-require" - filtered -> - Kernel.ParallelRequire.files(filtered) - end {:require, value} -> case filter_patterns(value) do [] -> Mix.raise "No files matched pattern #{inspect value} given to --require" filtered -> - Enum.each(filtered, &Code.require_file(&1)) + require_runner.(filtered) end {:eval, value} -> - Code.eval_string(value) + expr_evaluator.(value) _ -> :ok end diff --git a/lib/mix/lib/mix/tasks/test.ex b/lib/mix/lib/mix/tasks/test.ex index ab3d1cf7636..dadc467e1a5 100644 --- a/lib/mix/lib/mix/tasks/test.ex +++ b/lib/mix/lib/mix/tasks/test.ex @@ -3,16 +3,23 @@ defmodule Mix.Tasks.Test do @moduledoc false def start(compile_path, opts) do - Mix.shell.info "Cover compiling modules ... " - :cover.start - :cover.compile_beam_directory(compile_path |> to_char_list) + Mix.shell.info "Cover compiling modules ..." + _ = :cover.start + + case :cover.compile_beam_directory(compile_path |> to_charlist) do + results when is_list(results) -> + :ok + {:error, _} -> + Mix.raise "Failed to cover compile directory: " <> compile_path + end + output = opts[:output] fn() -> - Mix.shell.info "\nGenerating cover results ... " + Mix.shell.info "\nGenerating cover results ..." File.mkdir_p!(output) Enum.each :cover.modules, fn(mod) -> - :cover.analyse_to_file(mod, '#{output}/#{mod}.html', [:html]) + {:ok, _} = :cover.analyse_to_file(mod, '#{output}/#{mod}.html', [:html]) end end end @@ -20,11 +27,14 @@ defmodule Mix.Tasks.Test do use Mix.Task - @shortdoc "Run a project's tests" + alias Mix.Compilers.Test, as: CT + + @shortdoc "Runs a project's tests" @recursive true + @preferred_cli_env :test @moduledoc """ - Run the tests for a project. + Runs the tests for a project. This task starts the current application, loads up `test/test_helper.exs` and then requires all files matching the @@ -37,17 +47,30 @@ defmodule Mix.Tasks.Test do ## Command line options - * `--trace` - run tests with detailed reporting; automatically sets `--max-cases` to 1 - * `--max-cases` - set the maximum number of cases running async - * `--cover` - the directory to include coverage results - * `--force` - forces compilation regardless of modification times - * `--no-compile` - do not compile, even if files require compilation - * `--no-start` - do not start applications after compilation - * `--no-color` - disable color in the output - * `--include` - include tests that match the filter - * `--exclude` - exclude tests that match the filter - * `--only` - run only tests that match the filter - * `--seed` - seeds the random number generator used to randomize tests order + * `--color` - enables color in the output + * `--cover` - the directory to include coverage results + * `--exclude` - excludes tests that match the filter + * `--force` - forces compilation regardless of modification times + * `--formatter` - formatter module + * `--include` - includes tests that match the filter + * `--listen-on-stdin` - runs tests, and then listens on stdin. Receiving a newline will + result in the tests being run again. Very useful when combined with `--stale` and + external commands which produce output on stdout upon file system modification. + * `--max-cases` - sets the maximum number of cases running async + * `--no-archives-check` - does not check archives + * `--no-color` - disables color in the output + * `--no-compile` - does not compile, even if files require compilation + * `--no-deps-check` - does not check dependencies + * `--no-elixir-version-check` - does not check the Elixir version from mix.exs + * `--no-start` - does not start applications after compilation + * `--only` - runs only tests that match the filter + * `--raise` - raises if the test suite failed + * `--seed` - seeds the random number generator used to randomize tests order; + `--seed 0` disables randomization + * `--stale` - runs only tests which reference modules that changed since the + last `test --stale`. You can read more about this option in the "Stale" section below. + * `--timeout` - sets the timeout for the tests + * `--trace` - runs tests with detailed reporting; automatically sets `--max-cases` to 1 ## Filters @@ -64,7 +87,7 @@ defmodule Mix.Tasks.Test do mix test --include external:true The example above will run all tests that have the external flag set to - true. It is also possible to include all examples that have a given tag, + `true`. It is also possible to include all examples that have a given tag, regardless of its value: mix test --include external @@ -73,7 +96,7 @@ defmodule Mix.Tasks.Test do first (either in the test helper or via the `--exclude` option), the `--include` flag has no effect. - For this reason, mix also provides an `--only` option that excludes all + For this reason, Mix also provides an `--only` option that excludes all tests and includes only the given ones: mix test --only external @@ -91,14 +114,22 @@ defmodule Mix.Tasks.Test do mix test --only line:12 test/some/particular/file_test.exs + Note that line filter takes the closest test on or before the given line number. + In the case a single file contains more than one test module (test case), + line filter applies to every test case before the given line number, thus more + than one test might be taken for the run. + ## Configuration * `:test_paths` - list of paths containing test files, defaults to - `["test"]`. It is expected all test paths to contain a `test_helper.exs` - file. + `["test"]` if the `test` directory exists, otherwise it defaults to `[]`. + It is expected all test paths to contain a `test_helper.exs` file. * `:test_pattern` - a pattern to load test files, defaults to `*_test.exs`. + * `:warn_test_pattern` - a pattern to match potentially missed test files + and display a warning, defaults to `*_test.ex`. + * `:test_coverage` - a set of options to be passed down to the coverage mechanism. @@ -115,39 +146,64 @@ defmodule Mix.Tasks.Test do test_coverage: [tool: CoverModule] `CoverModule` can be any module that exports `start/2`, receiving the - compilation path and the `test_coverage` options as arguments. It must - return an anonymous function of zero arity that will be run after the - test suite is done or nil. + compilation path and the `test_coverage` options as arguments. + It must return either `nil` or an anonymous function of zero arity that will + be run after the test suite is done. + + ## "Stale" + + The `--stale` command line option attempts to run only those test files which + reference modules that have changed since the last time you ran this task with + `--stale`. + + The first time this task is run with `--stale`, all tests are run and a manifest + is generated. On subsequent runs, a test file is marked "stale" if any modules it + references (and any modules those modules reference, recursively) were modified + since the last run with `--stale`. A test file is also marked "stale" if it has + been changed since the last run with `--stale`. """ @switches [force: :boolean, color: :boolean, cover: :boolean, trace: :boolean, max_cases: :integer, include: :keep, - exclude: :keep, seed: :integer, only: :keep] + exclude: :keep, seed: :integer, only: :keep, compile: :boolean, + start: :boolean, timeout: :integer, raise: :boolean, + deps_check: :boolean, archives_check: :boolean, elixir_version_check: :boolean, + stale: :boolean, listen_on_stdin: :boolean, formatter: :keep] @cover [output: "cover", tool: Cover] + @spec run(OptionParser.argv) :: :ok def run(args) do - {opts, files, _} = OptionParser.parse(args, switches: @switches) + {opts, files} = OptionParser.parse!(args, strict: @switches) + + if opts[:listen_on_stdin] do + System.at_exit fn _ -> + IO.gets(:stdio, "") + Mix.shell.info "Restarting..." + :init.restart() + Process.sleep(:infinity) + end + end unless System.get_env("MIX_ENV") || Mix.env == :test do - Mix.raise "mix test is running on environment #{Mix.env}. If you are " <> + Mix.raise "\"mix test\" is running on environment \"#{Mix.env}\". If you are " <> "running tests along another task, please set MIX_ENV explicitly" end - Mix.Task.run "deps.loadpaths", args Mix.Task.run "loadpaths", args - unless opts[:no_compile] do - Mix.Task.run "compile", args + if Keyword.get(opts, :compile, true) do + Mix.Project.compile(args) end project = Mix.Project.config - cover = Keyword.merge(@cover, project[:test_coverage] || []) # Start cover after we load deps but before we start the app. cover = if opts[:cover] do - cover[:tool].start(Mix.Project.compile_path(project), cover) + compile_path = Mix.Project.compile_path(project) + cover = Keyword.merge(@cover, project[:test_coverage] || []) + cover[:tool].start(compile_path, cover) end # Start the app and configure exunit with command line options @@ -156,42 +212,81 @@ defmodule Mix.Tasks.Test do # that command line options override test_helper.exs Mix.shell.print_app Mix.Task.run "app.start", args - Application.load(:ex_unit) - opts = ex_unit_opts(opts) - ExUnit.configure(opts) + # Ensure ExUnit is loaded. + case Application.load(:ex_unit) do + :ok -> :ok + {:error, {:already_loaded, :ex_unit}} -> :ok + end + + # Configure ExUnit with command line options before requiring + # test helpers so that the configuration is available in helpers. + # Then configure ExUnit again so command line options override + ex_unit_opts = ex_unit_opts(opts) + ExUnit.configure(ex_unit_opts) - test_paths = project[:test_paths] || ["test"] + test_paths = project[:test_paths] || default_test_paths() Enum.each(test_paths, &require_test_helper(&1)) - ExUnit.configure(opts) + ExUnit.configure(merge_helper_opts(ex_unit_opts)) # Finally parse, require and load the files - test_files = parse_files(files, test_paths) + test_files = parse_files(files, test_paths) test_pattern = project[:test_pattern] || "*_test.exs" + warn_test_pattern = project[:warn_test_pattern] || "*_test.ex" - test_files = Mix.Utils.extract_files(test_files, test_pattern) - Kernel.ParallelRequire.files(test_files) + matched_test_files = Mix.Utils.extract_files(test_files, test_pattern) + matched_warn_test_files = + Mix.Utils.extract_files(test_files, warn_test_pattern) -- matched_test_files - # Run the test suite, coverage tools and register an exit hook - %{failures: failures} = ExUnit.run - if cover, do: cover.() + display_warn_test_pattern(matched_warn_test_files, test_pattern) + + case CT.require_and_run(files, matched_test_files, test_paths, opts) do + {:ok, %{failures: failures}} -> + cover && cover.() + + cond do + failures > 0 and opts[:raise] -> + Mix.raise "mix test failed" + failures > 0 -> + System.at_exit fn _ -> exit({:shutdown, 1}) end + true -> + :ok + end - System.at_exit fn _ -> - if failures > 0, do: System.halt(1) + :noop -> + :ok end end + defp display_warn_test_pattern(files, pattern) do + for file <- files do + Mix.shell.info "warning: #{file} does not match #{inspect pattern} and won't be loaded" + end + end + + @option_keys [:trace, :max_cases, :include, :exclude, + :seed, :timeout, :formatters, :colors] + @doc false def ex_unit_opts(opts) do - opts = opts - |> filter_opts(:include) - |> filter_opts(:exclude) - |> filter_only_opts() + opts + |> filter_opts(:include) + |> filter_opts(:exclude) + |> filter_opts(:only) + |> formatter_opts() + |> color_opts() + |> Keyword.take(@option_keys) + |> default_opts() + end + defp merge_helper_opts(opts) do + merge_opts(opts, :exclude) + end + + defp default_opts(opts) do # Set autorun to false because Mix # automatically runs the test suite for us. - [autorun: false] ++ - Dict.take(opts, [:trace, :max_cases, :color, :include, :exclude, :seed]) + [autorun: false] ++ opts end defp parse_files([], test_paths) do @@ -200,7 +295,7 @@ defmodule Mix.Tasks.Test do defp parse_files([single_file], _test_paths) do # Check if the single file path matches test/path/to_test.exs:123, if it does - # apply `--only line:123` and trim the trailing :123 part. + # apply "--only line:123" and trim the trailing :123 part. {single_file, opts} = ExUnit.Filters.parse_path(single_file) ExUnit.configure(opts) [single_file] @@ -216,6 +311,16 @@ defmodule Mix.Tasks.Test do end end + defp filter_opts(opts, :only) do + if filters = parse_filters(opts, :only) do + opts + |> Keyword.update(:include, filters, &(filters ++ &1)) + |> Keyword.update(:exclude, [:test], &[:test | &1]) + else + opts + end + end + defp filter_opts(opts, key) do if filters = parse_filters(opts, key) do Keyword.put(opts, key, filters) @@ -224,18 +329,33 @@ defmodule Mix.Tasks.Test do end end - defp filter_only_opts(opts) do - if filters = parse_filters(opts, :only) do - opts - |> Keyword.put_new(:include, []) - |> Keyword.put_new(:exclude, []) - |> Keyword.update!(:include, &(filters ++ &1)) - |> Keyword.update!(:exclude, &[:test|&1]) + def formatter_opts(opts) do + if Keyword.has_key?(opts, :formatter) do + formatters = + opts + |> Keyword.get_values(:formatter) + |> Enum.map(&Module.concat([&1])) + + Keyword.put(opts, :formatters, formatters) else opts end end + defp color_opts(opts) do + case Keyword.fetch(opts, :color) do + {:ok, enabled?} -> + Keyword.put(opts, :colors, [enabled: enabled?]) + :error -> + opts + end + end + + defp merge_opts(opts, key) do + value = List.wrap Application.get_env(:ex_unit, key, []) + Keyword.update(opts, key, value, &Enum.uniq(&1 ++ value)) + end + defp require_test_helper(dir) do file = Path.join(dir, "test_helper.exs") @@ -245,4 +365,12 @@ defmodule Mix.Tasks.Test do Mix.raise "Cannot run tests because test helper file #{inspect file} does not exist" end end + + defp default_test_paths do + if File.dir?("test") do + ["test"] + else + [] + end + end end diff --git a/lib/mix/lib/mix/tasks/xref.ex b/lib/mix/lib/mix/tasks/xref.ex new file mode 100644 index 00000000000..30db584b46e --- /dev/null +++ b/lib/mix/lib/mix/tasks/xref.ex @@ -0,0 +1,468 @@ +defmodule Mix.Tasks.Xref do + use Mix.Task + + alias Mix.Tasks.Compile.Elixir, as: E + import Mix.Compilers.Elixir, only: [read_manifest: 2, source: 1, source: 2, module: 1] + + @shortdoc "Performs cross reference checks" + @recursive true + + @moduledoc """ + Performs cross reference checks between modules. + + ## Xref modes + + The `xref` task expects a mode as first argument: + + mix xref MODE + + All available modes are discussed below. + + ### warnings + + Prints warnings for violated cross reference checks: + + mix xref warnings + + This is the mode used by Mix during compilation. + + ### unreachable + + Prints all unreachable "file:line: module.function/arity" entries: + + mix xref unreachable + + The "file:line" represents the file and line a call to an unknown + "module.function/arity" is made. + + ### callers CALLEE + + Prints all callers of the given `CALLEE`, which can be one of: `Module`, + `Module.function`, or `Module.function/arity`. Examples: + + mix xref callers MyMod + mix xref callers MyMod.fun + mix xref callers MyMod.fun/3 + + ### graph + + Prints a file dependency graph where an edge from `A` to `B` indicates + that `A` depends on `B`. + + mix xref graph --format dot + + The following options are accepted: + + * `--exclude` - paths to exclude + + * `--source` - displays all files that the given source file references (directly or indirectly) + + * `--sink` - displays all files that reference the given file (directly or indirectly) + + * `--format` - can be set to one of: + + * `pretty` - uses Unicode codepoints for formatting the graph. + This is the default except on Windows + + * `plain` - does not use Unicode codepoints for formatting the graph. + This is the default on Windows + + * `dot` - produces a DOT graph description in `xref_graph.dot` in the + current directory. Warning: this will override any previously generated file + + The `--source` and `--sink` options are particularly useful when trying to understand how + the modules in a particular file interact with the whole system. + + ## Shared options + + Those options are shared across all modes: + + * `--no-compile` - does not compile even if files require compilation + + * `--no-deps-check` - does not check dependencies + + * `--no-archives-check` - does not check archives + + * `--no-elixir-version-check` - does not check the Elixir version from mix.exs + + ## Configuration + + All configuration for Xref should be placed under the key `:xref`. + + * `:exclude` - a list of modules and `{module, function, arity}` tuples to ignore when checking + cross references. For example: `[MissingModule, {MissingModule2, :missing_func, 2}]` + + """ + + @switches [compile: :boolean, deps_check: :boolean, archives_check: :boolean, + elixir_version_check: :boolean, exclude: :keep, format: :string, + source: :string, sink: :string] + + @doc """ + Runs this task. + """ + @spec run(OptionParser.argv) :: :ok | :error + def run(args) do + {opts, args} = + OptionParser.parse!(args, strict: @switches) + + Mix.Task.run("loadpaths") + + if Keyword.get(opts, :compile, true) do + Mix.Task.run("compile") + end + + case args do + ["warnings"] -> + warnings() + ["unreachable"] -> + unreachable() + ["callers", callee] -> + callers(callee) + ["graph"] -> + graph(opts) + _ -> + Mix.raise "xref doesn't support this command. For more information run \"mix help xref\"" + end + end + + ## Modes + + defp warnings() do + if unreachable(&print_warnings/2) == [] do + :ok + else + :error + end + end + + defp unreachable() do + if unreachable(&print_entry/2) == [] do + :ok + else + :error + end + end + + defp callers(callee) do + callee + |> filter_for_callee() + |> do_callers() + + :ok + end + + defp graph(opts) do + write_graph(file_references(), excluded(opts), opts) + + :ok + end + + ## Unreachable + + defp unreachable(pair_fun) do + excludes = excludes() + each_source_entries(&source_warnings(&1, excludes), pair_fun) + end + + defp source_warnings(source, excludes) do + source(runtime_dispatches: runtime_dispatches) = source + + for {module, func_arity_lines} <- runtime_dispatches, + exports = load_exports(module), + {{func, arity}, lines} <- func_arity_lines, + warning = unreachable_mfa(exports, module, func, arity, lines, excludes), + do: warning + end + + defp load_exports(module) do + if :code.is_loaded(module) do + # If the module is loaded, we will use the faster function_exported?/3 check + module + else + # Otherwise we get all exports from :beam_lib to avoid loading modules + with file when is_list(file) <- :code.which(module), + {:ok, {^module, [exports: exports]}} <- :beam_lib.chunks(file, [:exports]) do + exports + else + _ -> :unknown_module + end + end + end + + defp unreachable_mfa(exports, module, func, arity, lines, excludes) do + cond do + excluded?(module, func, arity, excludes) -> + nil + skip?(module, func, arity) -> + nil + exports == :unknown_module -> + {Enum.sort(lines), :unknown_module, module, func, arity, nil} + is_atom(exports) and not function_exported?(module, func, arity) -> + {Enum.sort(lines), :unknown_function, module, func, arity, nil} + is_list(exports) and {func, arity} not in exports -> + {Enum.sort(lines), :unknown_function, module, func, arity, exports} + true -> + nil + end + end + + ## Print entries + + defp print_entry(file, entries) do + entries + |> Enum.sort() + |> Enum.each(&IO.write(format_entry(file, &1))) + end + + defp format_entry(file, {lines, _, module, function, arity, _}) do + for line <- lines do + [Exception.format_file_line(file, line), ?\s, Exception.format_mfa(module, function, arity), ?\n] + end + end + + ## Print warnings + + defp print_warnings(file, entries) do + prefix = IO.ANSI.format([:yellow, "warning: "]) + entries + |> Enum.sort() + |> Enum.each(&IO.write(:stderr, [prefix, format_warning(file, &1), ?\n])) + end + + defp format_warning(file, {lines, :unknown_function, module, function, arity, exports}) do + message = UndefinedFunctionError.function_not_exported(module, function, arity, exports) + [message, "\n", format_file_lines(file, lines)] + end + + defp format_warning(file, {lines, :unknown_module, module, function, arity, _}) do + ["function ", Exception.format_mfa(module, function, arity), + " is undefined (module #{inspect module} is not available)\n" | format_file_lines(file, lines)] + end + + defp format_file_lines(file, [line]) do + format_file_line(file, line) + end + + defp format_file_lines(file, lines) do + ["Found at #{length(lines)} locations:\n" | + Enum.map(lines, &format_file_line(file, &1))] + end + + defp format_file_line(file, line) do + [" ", file, ?:, Integer.to_string(line), ?\n] + end + + ## "Unreachable" helpers + + @protocol_builtins for {_, type} <- Protocol.__builtin__(), do: type + + defp skip?(:erlang, func, 2) when func in [:andalso, :orelse] do + true + end + + defp skip?(module, :__impl__, 1) do + {maybe_protocol, maybe_builtin} = module |> Module.split() |> Enum.split(-1) + maybe_protocol = Module.concat(maybe_protocol) + maybe_builtin = Module.concat(maybe_builtin) + + maybe_builtin in @protocol_builtins and + Code.ensure_loaded?(maybe_protocol) and + function_exported?(maybe_protocol, :__protocol__, 1) + end + + defp skip?(_, _, _) do + false + end + + defp excludes() do + Mix.Project.config() + |> Keyword.get(:xref, []) + |> Keyword.get(:exclude, []) + |> MapSet.new() + end + + defp excluded?(module, func, arity, excludes) do + MapSet.member?(excludes, module) or MapSet.member?(excludes, {module, func, arity}) + end + + ## Callers + + defp do_callers(filter) do + each_source_entries(&source_calls_for_filter(&1, filter), &print_calls/2) + end + + defp source_calls_for_filter(source, filter) do + runtime_dispatches = source(source, :runtime_dispatches) + compile_dispatches = source(source, :compile_dispatches) + dispatches = runtime_dispatches ++ compile_dispatches + + calls = + for {module, func_arity_lines} <- dispatches, + {{func, arity}, lines} <- func_arity_lines, + filter.({module, func, arity}), + do: {module, func, arity, lines} + + Enum.reduce calls, %{}, fn {module, func, arity, lines}, merged_calls -> + lines = MapSet.new(lines) + Map.update(merged_calls, {module, func, arity}, lines, &MapSet.union(&1, lines)) + end + end + + ## Print callers + + defp print_calls(file, calls) do + calls + |> Enum.sort() + |> Enum.each(&IO.write(format_call(file, &1))) + end + + defp format_call(file, {{module, func, arity}, lines}) do + for line <- Enum.sort(lines), + do: [file, ":", to_string(line), ": ", Exception.format_mfa(module, func, arity), ?\n] + end + + ## "Callers" helpers + + defp filter_for_callee(callee) do + case Mix.Utils.parse_mfa(callee) do + {:ok, mfa_list} -> + mfa_list_length = length(mfa_list) + fn {module, function, arity} -> + mfa_list == Enum.take([module, function, arity], mfa_list_length) + end + :error -> + Mix.raise "xref callers CALLEE expects Module, Module.function, or Module.function/arity, " <> + "got: " <> callee + end + end + + ## Graph helpers + + defp excluded(opts) do + opts + |> Keyword.get_values(:exclude) + |> Enum.flat_map(&[{&1, nil}, {&1, "(compile)"}, {&1, "(runtime)"}]) + end + + defp file_references() do + module_sources = + for manifest <- E.manifests(), + manifest_data = read_manifest(manifest, ""), + module(module: module, sources: sources) <- manifest_data, + source <- sources, + source = Enum.find(manifest_data, &match?(source(source: ^source), &1)), + do: {module, source} + + all_modules = MapSet.new(module_sources, &elem(&1, 0)) + + Map.new module_sources, fn {module, source} -> + source(runtime_references: runtime, compile_references: compile, source: file) = source + compile_references = + compile + |> MapSet.new() + |> MapSet.delete(module) + |> MapSet.intersection(all_modules) + |> Enum.filter(&module_sources[&1] != source) + |> Enum.map(&{source(module_sources[&1], :source), "(compile)"}) + + runtime_references = + runtime + |> MapSet.new() + |> MapSet.delete(module) + |> MapSet.intersection(all_modules) + |> Enum.filter(&module_sources[&1] != source) + |> Enum.map(&{source(module_sources[&1], :source), nil}) + + {file, compile_references ++ runtime_references} + end + end + + defp write_graph(file_references, excluded, opts) do + {root, file_references} = + case {opts[:source], opts[:sink]} do + {nil, nil} -> + {Enum.map(file_references, &{elem(&1, 0), nil}) -- excluded, file_references} + + {source, nil} -> + if file_references[source] do + {[{source, nil}], file_references} + else + Mix.raise "Source could not be found: #{source}" + end + + {nil, sink} -> + if file_references[sink] do + file_references = filter_for_sink(file_references, sink) + roots = + file_references + |> Map.delete(sink) + |> Enum.map(&{elem(&1, 0), nil}) + {roots -- excluded, file_references} + else + Mix.raise "Sink could not be found: #{sink}" + end + + {_, _} -> + Mix.raise "mix xref graph expects only one of --source and --sink" + end + + callback = + fn {file, type} -> + children = Map.get(file_references, file, []) + {{file, type}, children -- excluded} + end + + if opts[:format] == "dot" do + Mix.Utils.write_dot_graph!("xref_graph.dot", "xref graph", + root, callback, opts) + """ + Generated "xref_graph.dot" in the current directory. To generate a PNG: + + dot -Tpng xref_graph.dot -o xref_graph.png + + For more options see http://www.graphviz.org/. + """ + |> String.trim_trailing() + |> Mix.shell.info() + else + Mix.Utils.print_tree(root, callback, opts) + end + end + + defp filter_for_sink(file_references, sink) do + file_references + |> invert_references() + |> do_filter_for_sink([{sink, nil}], %{}) + |> invert_references() + end + + defp do_filter_for_sink(file_references, new_nodes, acc) do + Enum.reduce new_nodes, acc, fn {new_node_name, _type}, acc -> + new_nodes = file_references[new_node_name] + if acc[new_node_name] || !new_nodes do + acc + else + do_filter_for_sink(file_references, new_nodes, Map.put(acc, new_node_name, new_nodes)) + end + end + end + + defp invert_references(file_references) do + Enum.reduce file_references, %{}, fn {file, references}, acc -> + Enum.reduce references, acc, fn {reference, type}, acc -> + Map.update(acc, reference, [{file, type}], &[{file, type} | &1]) + end + end + end + + ## Helpers + + defp each_source_entries(entries_fun, pair_fun) do + for manifest <- E.manifests(), + source(source: file) = source <- read_manifest(manifest, ""), + entries = entries_fun.(source), + entries != [] and entries != %{}, + do: pair_fun.(file, entries) + end +end diff --git a/lib/mix/lib/mix/tasks_server.ex b/lib/mix/lib/mix/tasks_server.ex index 10404809590..3ca82b584c8 100644 --- a/lib/mix/lib/mix/tasks_server.ex +++ b/lib/mix/lib/mix/tasks_server.ex @@ -2,28 +2,25 @@ defmodule Mix.TasksServer do @moduledoc false def start_link() do - Agent.start_link(fn -> HashSet.new end, name: __MODULE__) + Agent.start_link(fn -> %{} end, name: __MODULE__) end - def clear_tasks() do - get_and_update fn set -> - { set, HashSet.new } - end + def clear() do + update fn _ -> %{} end end - def run_task(task, proj) do - run_item = { task, proj } + def run(tuple) do get_and_update fn set -> - { not(run_item in set), Set.put(set, run_item) } + {not Map.has_key?(set, tuple), Map.put(set, tuple, true)} end end - def put_task(task, proj) do - update &Set.put(&1, { task, proj }) + def put(tuple) do + update &Map.put(&1, tuple, true) end - def delete_task(task, proj) do - update &Set.delete(&1, { task, proj }) + def delete_many(many) do + update &Map.drop(&1, many) end defp get_and_update(fun) do diff --git a/lib/mix/lib/mix/utils.ex b/lib/mix/lib/mix/utils.ex index 2edbcc4fc5f..f2415fb67fc 100644 --- a/lib/mix/lib/mix/utils.ex +++ b/lib/mix/lib/mix/utils.ex @@ -1,17 +1,15 @@ defmodule Mix.Utils do - @moduledoc """ - Utilities used throughout Mix and tasks. - """ + @moduledoc false @doc """ - Get the mix home. + Gets the Mix home. It defaults to `~/.mix` unless the `MIX_HOME` environment variable is set. Developers should only store entries in the `MIX_HOME` directory which are guaranteed to - work accross multiple Elixir versions, as it is + work across multiple Elixir versions, as it is not recommended to swap the `MIX_HOME` directory as configuration and other important data may be stored there. @@ -21,14 +19,14 @@ defmodule Mix.Utils do end @doc """ - Get all paths defined in the MIX_PATH env variable. + Gets all paths defined in the MIX_PATH env variable. `MIX_PATH` may contain multiple paths. If on Windows, those - paths should be separated by `;`, if on unix systems, use `:`. + paths should be separated by `;`, if on Unix systems, use `:`. """ def mix_paths do if path = System.get_env("MIX_PATH") do - String.split(path, path_separator) + String.split(path, path_separator()) else [] end @@ -42,7 +40,65 @@ defmodule Mix.Utils do end @doc """ - Take a `command` name and attempts to load a module + Parses a string into module, function and arity. + + It returns `{:ok, mfa_list}`, where a `mfa_list` is + `[module, function, arity]`, `[module, function]` or `[module]`, + or the atom `:error`. + + iex> Mix.Utils.parse_mfa("Foo.bar/1") + {:ok, [Foo, :bar, 1]} + iex> Mix.Utils.parse_mfa(":foo.bar/1") + {:ok, [:foo, :bar, 1]} + iex> Mix.Utils.parse_mfa(":foo.bar") + {:ok, [:foo, :bar]} + iex> Mix.Utils.parse_mfa(":foo") + {:ok, [:foo]} + iex> Mix.Utils.parse_mfa("Foo") + {:ok, [Foo]} + + iex> Mix.Utils.parse_mfa("Foo.") + :error + iex> Mix.Utils.parse_mfa("Foo.bar.baz") + :error + iex> Mix.Utils.parse_mfa("Foo.bar/2/2") + :error + """ + def parse_mfa(mfa) do + with {:ok, quoted} <- Code.string_to_quoted(mfa), + [_ | _] = mfa_list <- quoted_to_mfa(quoted) do + {:ok, mfa_list} + else + _ -> :error + end + end + + defp quoted_to_mfa({:/, _, [dispatch, arity]}) when is_integer(arity) do + quoted_to_mf(dispatch, [arity]) + end + defp quoted_to_mfa(dispatch) do + quoted_to_mf(dispatch, []) + end + + defp quoted_to_mf({{:., _, [module, fun]}, _, []}, acc) when is_atom(fun) do + quoted_to_m(module, [fun | acc]) + end + defp quoted_to_mf(module, acc) do + quoted_to_m(module, acc) + end + + defp quoted_to_m({:__aliases__, _, aliases}, acc) do + [Module.concat(aliases) | acc] + end + defp quoted_to_m(atom, acc) when is_atom(atom) do + [atom | acc] + end + defp quoted_to_m(_, _acc) do + [] + end + + @doc """ + Takes a `command` name and attempts to load a module with the command name converted to a module name in the given `at` scope. @@ -69,7 +125,7 @@ defmodule Mix.Utils do end @doc """ - Extract all stale `sources` compared to the given `targets`. + Extracts all stale `sources` compared to the given `targets`. """ def extract_stale(_sources, []), do: [] def extract_stale([], _targets), do: [] @@ -79,7 +135,7 @@ defmodule Mix.Utils do end defp stale_stream(sources, targets) do - modified_target = targets |> Enum.map(&last_modified(&1)) |> Enum.min + modified_target = targets |> Enum.map(&last_modified/1) |> Enum.min Stream.filter(sources, fn(source) -> last_modified(source) > modified_target @@ -89,7 +145,7 @@ defmodule Mix.Utils do @doc """ Returns the date the given path was last modified. - If the path does not exist, it returns the unix epoch + If the path does not exist, it returns the Unix epoch (1970-01-01 00:00:00). """ def last_modified(path) @@ -99,14 +155,36 @@ defmodule Mix.Utils do end def last_modified(path) do - case File.stat(path) do - {:ok, %File.Stat{mtime: mtime}} -> mtime - {:error, _} -> {{1970, 1, 1}, {0, 0, 0}} + {mtime, _size} = last_modified_and_size(path) + mtime + end + + @doc false + def last_modified_and_size(path) do + now = :calendar.universal_time + + case :elixir_utils.read_mtime_and_size(path) do + {:ok, mtime, size} when mtime > now -> + Mix.shell.error("warning: mtime (modified time) for \"#{path}\" was set to the future, resetting to now") + File.touch!(path, now) + {mtime, size} + {:ok, mtime, size} -> + {mtime, size} + {:error, _} -> + {{{1970, 1, 1}, {0, 0, 0}}, 0} end end @doc """ - Extract files from a list of paths. + Prints n files are being compiled with the given extension. + """ + def compiling_n(1, ext), + do: Mix.shell.info "Compiling 1 file (.#{ext})" + def compiling_n(n, ext), + do: Mix.shell.info "Compiling #{n} files (.#{ext})" + + @doc """ + Extracts files from a list of paths. `exts_or_pattern` may be a list of extensions or a `Path.wildcard/1` pattern. @@ -124,116 +202,120 @@ defmodule Mix.Utils do def extract_files(paths, pattern) do Enum.flat_map(paths, fn path -> - if File.regular?(path), do: [path], else: Path.wildcard("#{path}/**/#{pattern}") + case :elixir_utils.read_file_type(path) do + {:ok, :directory} -> Path.wildcard("#{path}/**/#{pattern}") + {:ok, :regular} -> [path] + _ -> [] + end end) |> Enum.uniq end - @doc """ - Converts the given atom or binary to underscore format. - - If an atom is given, it is assumed to be an Elixir module, - so it is converted to a binary and then processed. - - ## Examples - - iex> Mix.Utils.underscore "FooBar" - "foo_bar" - - iex> Mix.Utils.underscore "Foo.Bar" - "foo/bar" + @type tree_node :: {name :: String.Chars.t, edge_info :: String.Chars.t} - iex> Mix.Utils.underscore Foo.Bar - "foo/bar" - - In general, `underscore` can be thought of as the reverse of - `camelize`, however, in some cases formatting may be lost: - - Mix.Utils.underscore "SAPExample" #=> "sap_example" - Mix.Utils.camelize "sap_example" #=> "SapExample" + @doc """ + Prints the given tree according to the callback. + The callback will be invoked for each node and it + must return a `{printed, children}` tuple. """ - def underscore(atom) when is_atom(atom) do - "Elixir." <> rest = Atom.to_string(atom) - underscore(rest) - end - - def underscore(""), do: "" - - def underscore(<>) do - <> <> do_underscore(t, h) - end + @spec print_tree([tree_node], (tree_node -> {tree_node, [tree_node]}), Keyword.t) :: :ok + def print_tree(nodes, callback, opts \\ []) do + pretty? = + case Keyword.get(opts, :format) do + "pretty" -> true + "plain" -> false + _other -> elem(:os.type, 0) != :win32 + end - defp do_underscore(<>, _) when h in ?A..?Z and not t in ?A..?Z do - <> <> do_underscore(rest, t) + print_tree(nodes, _depth = [], _parent = nil, _seen = MapSet.new(), pretty?, callback) + :ok end - defp do_underscore(<>, prev) when h in ?A..?Z and not prev in ?A..?Z do - <> <> do_underscore(t, h) + defp print_tree(_nodes = [], _depth, _parent, seen, _pretty, _callback) do + seen end - defp do_underscore(<>, _) do - <> <> do_underscore(t, ?-) - end + defp print_tree([node | nodes], depth, parent, seen, pretty?, callback) do + {{name, info}, children} = callback.(node) + key = {parent, name} - defp do_underscore(<< "..", t :: binary>>, _) do - <<"..">> <> underscore(t) + if MapSet.member?(seen, key) do + seen + else + info = if(info, do: " #{info}", else: "") + Mix.shell.info("#{depth(pretty?, depth)}#{prefix(pretty?, depth, nodes)}#{name}#{info}") + seen = print_tree(children, [(nodes != []) | depth], name, MapSet.put(seen, key), pretty?, callback) + print_tree(nodes, depth, parent, seen, pretty?, callback) + end end - defp do_underscore(<>, _), do: <> + defp depth(_pretty?, []), do: "" + defp depth(pretty?, depth), do: Enum.reverse(depth) |> tl |> Enum.map(&entry(pretty?, &1)) - defp do_underscore(<>, _) do - <> <> underscore(t) - end + defp entry(false, true), do: "| " + defp entry(false, false), do: " " + defp entry(true, true), do: "│ " + defp entry(true, false), do: " " - defp do_underscore(<>, _) do - <> <> do_underscore(t, h) - end - - defp do_underscore(<<>>, _) do - <<>> - end + defp prefix(false, [], _), do: "" + defp prefix(false, _, []), do: "`-- " + defp prefix(false, _, _), do: "|-- " + defp prefix(true, [], _), do: "" + defp prefix(true, _, []), do: "└── " + defp prefix(true, _, _), do: "├── " @doc """ - Converts the given string to CamelCase format. - - ## Examples - - iex> Mix.Utils.camelize "foo_bar" - "FooBar" + Outputs the given tree according to the callback as a DOT graph. + The callback will be invoked for each node and it + must return a `{printed, children}` tuple. """ - def camelize(""), do: "" - - def camelize(<>) do - camelize(t) + @spec write_dot_graph!(Path.t, String.t, [tree_node], (tree_node -> {tree_node, [tree_node]}), Keyword.t) :: :ok + def write_dot_graph!(path, title, nodes, callback, _opts \\ []) do + {dot, _} = build_dot_graph(make_ref(), nodes, MapSet.new(), callback) + File.write! path, "digraph \"#{title}\" {\n#{dot}}\n" end - def camelize(<>) do - <> <> do_camelize(t) - end + defp build_dot_graph(_parent, [], seen, _callback), do: {"", seen} + defp build_dot_graph(parent, [node | nodes], seen, callback) do + {{name, edge_info}, children} = callback.(node) + key = {parent, name} - defp do_camelize(<>) do - do_camelize(<< ?_, t :: binary >>) + if MapSet.member?(seen, key) do + {"", seen} + else + seen = MapSet.put(seen, key) + current = build_dot_current(parent, name, edge_info) + {children, seen} = build_dot_graph(name, children, seen, callback) + {siblings, seen} = build_dot_graph(parent, nodes, seen, callback) + {current <> children <> siblings, seen} + end end - defp do_camelize(<>) when h in ?a..?z do - <> <> do_camelize(t) - end + defp build_dot_current(parent, name, edge_info) do + edge_info = + if edge_info do + ~s( [label="#{edge_info}"]) + end - defp do_camelize(<>) do - <<>> - end + parent = + unless is_reference(parent) do + ~s("#{parent}" -> ) + end - defp do_camelize(<>) do - <> <> camelize(t) + ~s( #{parent}"#{name}"#{edge_info}\n) end - defp do_camelize(<>) do - <> <> do_camelize(t) + @doc false + def underscore(value) do + IO.warn "Mix.Utils.underscore/1 is deprecated, use Macro.underscore/1 instead" + Macro.underscore(value) end - defp do_camelize(<<>>) do - <<>> + @doc false + def camelize(value) do + IO.warn "Mix.Utils.camelize/1 is deprecated, use Macro.camelize/1 instead" + Macro.camelize(value) end @doc """ @@ -258,8 +340,11 @@ defmodule Mix.Utils do end def module_name_to_command(module, nesting) do - t = Regex.split(~r/\./, to_string(module)) - t |> Enum.drop(nesting) |> Enum.map(&first_to_lower(&1)) |> Enum.join(".") + module + |> to_string() + |> String.split(".") + |> Enum.drop(nesting) + |> Enum.map_join(".", &Macro.underscore/1) end @doc """ @@ -271,70 +356,65 @@ defmodule Mix.Utils do "Compile.Elixir" """ - def command_to_module_name(s) do - Regex.split(~r/\./, to_string(s)) |> - Enum.map(&first_to_upper(&1)) |> - Enum.join(".") + def command_to_module_name(command) do + command + |> to_string() + |> String.split(".") + |> Enum.map_join(".", &Macro.camelize/1) end - defp first_to_upper(<>), do: <> <> t - defp first_to_upper(<<>>), do: <<>> - - defp first_to_lower(<>), do: <> <> t - defp first_to_lower(<<>>), do: <<>> - - defp to_upper_char(char) when char in ?a..?z, do: char - 32 - defp to_upper_char(char), do: char - - defp to_lower_char(char) when char in ?A..?Z, do: char + 32 - defp to_lower_char(char), do: char - @doc """ - Symlink directory `source` to `target` or copy it recursively + Symlinks directory `source` to `target` or copies it recursively in case symlink fails. - Expect source and target to be absolute paths as it generates + Expects source and target to be absolute paths as it generates a relative symlink. """ def symlink_or_copy(source, target) do if File.exists?(source) do - source_list = String.to_char_list(source) + # Relative symbolic links on Windows are broken + link = case :os.type do + {:win32, _} -> source + _ -> make_relative_path(source, target) + end |> String.to_charlist + case :file.read_link(target) do - {:ok, ^source_list} -> + {:ok, ^link} -> :ok {:ok, _} -> File.rm!(target) - do_symlink_or_copy(source, target) + do_symlink_or_copy(source, target, link) {:error, :enoent} -> - do_symlink_or_copy(source, target) + do_symlink_or_copy(source, target, link) {:error, _} -> - File.rm_rf!(target) - do_symlink_or_copy(source, target) + unless File.dir?(target) do + File.rm_rf!(target) + end + do_symlink_or_copy(source, target, link) end else {:error, :enoent} end end - defp do_symlink_or_copy(source, target) do - if match? {:win32, _}, :os.type do - {:ok, File.cp_r!(source, target)} - else - symlink_source = make_relative_path(source, target) - case :file.make_symlink(symlink_source, target) do - :ok -> :ok - {:error, _} -> {:ok, File.cp_r!(source, target)} - end + defp do_symlink_or_copy(source, target, link) do + case :file.make_symlink(link, target) do + :ok -> + :ok + {:error, _} -> + {:ok, File.cp_r!(source, target, fn(orig, dest) -> + File.stat!(orig).mtime > File.stat!(dest).mtime + end)} end end - # Make a relative path in between two paths. + # Make a relative path between the two given paths. # Expects both paths to be fully expanded. defp make_relative_path(source, target) do do_make_relative_path(Path.split(source), Path.split(target)) end - defp do_make_relative_path([h|t1], [h|t2]) do + defp do_make_relative_path([h | t1], [h | t2]) do do_make_relative_path(t1, t2) end @@ -346,68 +426,107 @@ defmodule Mix.Utils do @doc """ Opens and reads content from either a URL or a local filesystem path. - Used by tasks like `local.install` and `local.rebar` that support - installation either from a URL or a local file. + Returns the contents as a `{:ok, binary}`, `:badpath` for invalid + paths or `{:local, message}` for local errors and `{:remote, message}` + for remote ones. + + ## Options - Raises if the given path is not a url, nor a file or if the - file or url are invalid. + * `:sha512` - checks against the given SHA-512 checksum. Returns + `{:checksum, message}` in case it fails """ - def read_path!(path) do + @spec read_path(String.t, Keyword.t) :: + {:ok, binary} | :badpath | {:remote, String.t} | + {:local, String.t} | {:checksum, String.t} + def read_path(path, opts \\ []) do cond do - url?(path) -> read_url(https://github.com/path) - file?(path) -> read_file(path) - :else -> Mix.raise "Expected #{path} to be a url or a local file path" + url?(path) -> + read_httpc(path) |> checksum(opts) + file?(path) -> + read_file(path) |> checksum(opts) + true -> + :badpath + end + end + + @checksums [:sha512] + + defp checksum({:ok, binary} = return, opts) do + Enum.find_value @checksums, return, fn hash -> + with expected when expected != nil <- opts[hash], + actual when actual != expected <- hexhash(binary, hash) do + {:checksum, """ + Data does not match the given SHA-512 checksum. + + Expected: #{expected} + Actual: #{actual} + """} + else + _ -> nil + end + end + end + + defp checksum({_, _} = error, _opts) do + error + end + + defp hexhash(binary, hash) do + Base.encode16 :crypto.hash(hash, binary), case: :lower + end + + @doc """ + Prompts the user to overwrite the file if it exists. Returns + the user input. + """ + def can_write?(path) do + if File.exists?(path) do + full = Path.expand(path) + Mix.shell.yes?(Path.relative_to_cwd(full) <> " already exists, overwrite?") + else + true end end defp read_file(path) do - File.read!(path) + try do + {:ok, File.read!(path)} + rescue + e in [File.Error] -> {:local, Exception.message(e)} + end end - defp read_url(https://github.com/path) do - :ssl.start - :inets.start + defp read_httpc(path) do + {:ok, _} = Application.ensure_all_started(:ssl) + {:ok, _} = Application.ensure_all_started(:inets) - # Starting a http client profile allows us to scope - # the effects of using a http proxy to this function - {:ok, pid} = :inets.start(:httpc, [{:profile, :mix}]) + # Starting an HTTP client profile allows us to scope + # the effects of using an HTTP proxy to this function + {:ok, _pid} = :inets.start(:httpc, [{:profile, :mix}]) headers = [{'user-agent', 'Mix/#{System.version}'}] request = {:binary.bin_to_list(path), headers} - # If a proxy environment variable was supplied add a proxy to httpc - if http_proxy = System.get_env("HTTP_PROXY"), do: proxy(http_proxy) - if https_proxy = System.get_env("HTTPS_PROXY"), do: proxy(https_proxy) - - # We are using relaxed: true because some clients is returning a Location + # We are using relaxed: true because some servers is returning a Location # header with relative paths, which does not follow the spec. This would # cause the request to fail with {:error, :no_scheme} unless :relaxed # is given. - case :httpc.request(:get, request, [relaxed: true], [body_format: :binary], :mix) do + # + # If a proxy environment variable was supplied add a proxy to httpc. + http_options = [relaxed: true] ++ proxy_config(path) + + case :httpc.request(:get, request, http_options, [body_format: :binary], :mix) do {:ok, {{_, status, _}, _, body}} when status in 200..299 -> - body + {:ok, body} {:ok, {{_, status, _}, _, _}} -> - Mix.raise "Could not access url #{path}, got status: #{status}" + {:remote, "httpc request failed with: {:bad_status_code, #{status}}"} {:error, reason} -> - Mix.raise "Could not access url #{path}, error: #{inspect reason}" + {:remote, "httpc request failed with: #{inspect reason}"} end after :inets.stop(:httpc, :mix) end - defp proxy(proxy) do - uri = URI.parse(proxy) - :httpc.set_options([{ proxy_scheme(uri.scheme), - { { uri.host |> String.to_char_list, uri.port }, [] } }], :mix) - end - - defp proxy_scheme(scheme) do - case scheme do - "http" -> :proxy - "https" -> :https_proxy - end - end - defp file?(path) do File.regular?(path) end @@ -415,4 +534,68 @@ defmodule Mix.Utils do defp url?(path) do URI.parse(path).scheme in ["http", "https"] end + + def proxy_config(url) do + {http_proxy, https_proxy} = proxy_env() + + proxy_auth(URI.parse(url), http_proxy, https_proxy) + end + + defp proxy_env do + http_proxy = System.get_env("HTTP_PROXY") || System.get_env("http_proxy") + https_proxy = System.get_env("HTTPS_PROXY") || System.get_env("https_proxy") + no_proxy = no_proxy_env() |> no_proxy_list() + + {proxy_setup(:http, http_proxy, no_proxy), proxy_setup(:https, https_proxy, no_proxy)} + end + + defp no_proxy_env() do + System.get_env("NO_PROXY") || System.get_env("no_proxy") + end + + defp no_proxy_list(nil) do + [] + end + + defp no_proxy_list(no_proxy) do + no_proxy + |> String.split(",") + |> Enum.map(&String.to_charlist/1) + end + + defp proxy_setup(scheme, proxy, no_proxy) do + uri = URI.parse(proxy || "") + + if uri.host && uri.port do + host = String.to_charlist(uri.host) + :httpc.set_options([{proxy_scheme(scheme), {{host, uri.port}, no_proxy}}], :mix) + end + + uri + end + + defp proxy_scheme(scheme) do + case scheme do + :http -> :proxy + :https -> :https_proxy + end + end + + defp proxy_auth(%URI{scheme: "http"}, http_proxy, _https_proxy), + do: proxy_auth(http_proxy) + defp proxy_auth(%URI{scheme: "https"}, _http_proxy, https_proxy), + do: proxy_auth(https_proxy) + + defp proxy_auth(nil), + do: [] + defp proxy_auth(%URI{userinfo: nil}), + do: [] + defp proxy_auth(%URI{userinfo: auth}) do + destructure [user, pass], String.split(auth, ":", parts: 2) + + user = String.to_charlist(user) + pass = String.to_charlist(pass || "") + + [proxy_auth: {user, pass}] + end end diff --git a/lib/mix/mix.exs b/lib/mix/mix.exs index 39f6ee8d8d1..59d406c1370 100644 --- a/lib/mix/mix.exs +++ b/lib/mix/mix.exs @@ -5,15 +5,12 @@ defmodule Mix.Mixfile do [app: :mix, build_per_environment: false, version: System.version, - escript_main_module: Mix.CLI] + escript: [main_module: Mix.CLI]] end def application do - [registered: [Mix.TasksServer, Mix.ProjectStack], + [registered: [Mix.State, Mix.TasksServer, Mix.ProjectStack], mod: {Mix, []}, - env: [shell: Mix.Shell.IO, - env: :dev, - scm: [Mix.SCM.Git, Mix.SCM.Path], - colors: []]] + env: [colors: []]] end end diff --git a/lib/mix/test/fixtures/.gitignore b/lib/mix/test/fixtures/.gitignore index a9a1386193c..b57d6fa2449 100644 --- a/lib/mix/test/fixtures/.gitignore +++ b/lib/mix/test/fixtures/.gitignore @@ -1,3 +1,6 @@ -git_repo -deps_on_git_repo -git_rebar +# Remember to update Makefile "clean_residual_files" target for any modifications +# made in this file. +/deps_on_git_repo/ +/git_rebar/ +/git_repo/ +/git_sparse_repo/ diff --git a/lib/mix/test/fixtures/archive/ebin/local_sample.app b/lib/mix/test/fixtures/archive/ebin/local_sample.app new file mode 100644 index 00000000000..cb713fe247f --- /dev/null +++ b/lib/mix/test/fixtures/archive/ebin/local_sample.app @@ -0,0 +1,3 @@ +{application,local_sample, + [{modules,['Elixir.Mix.Tasks.Local.Sample']}, + {applications,[kernel,stdlib,elixir]}]}. diff --git a/lib/mix/test/fixtures/archive/invalid-archive-0.1.0.ez b/lib/mix/test/fixtures/archive/invalid-archive-0.1.0.ez new file mode 100644 index 00000000000..e69de29bb2d diff --git a/lib/mix/test/fixtures/archive/lib/local.sample.ex b/lib/mix/test/fixtures/archive/lib/local.sample.ex index 0ecafccdc80..b2238e9d99f 100644 --- a/lib/mix/test/fixtures/archive/lib/local.sample.ex +++ b/lib/mix/test/fixtures/archive/lib/local.sample.ex @@ -7,4 +7,4 @@ defmodule Mix.Tasks.Local.Sample do def run(_) do Mix.shell.info "sample" end -end \ No newline at end of file +end diff --git a/lib/mix/test/fixtures/compile_erlang/src/b.erl b/lib/mix/test/fixtures/compile_erlang/src/b.erl index 05d830f4d69..ab8cb39190f 100644 --- a/lib/mix/test/fixtures/compile_erlang/src/b.erl +++ b/lib/mix/test/fixtures/compile_erlang/src/b.erl @@ -1,6 +1,7 @@ -module(b). --compile(export_all). +-export([b/0]). +-callback c() -> term(). -record(br, {cell=undefined}). -b() -> #br{cell=specified}. \ No newline at end of file +b() -> #br{cell=specified}. diff --git a/lib/mix/test/fixtures/compile_erlang/src/c.erl b/lib/mix/test/fixtures/compile_erlang/src/c.erl index 78f1022eaa7..62c54b0b4d1 100644 --- a/lib/mix/test/fixtures/compile_erlang/src/c.erl +++ b/lib/mix/test/fixtures/compile_erlang/src/c.erl @@ -1,6 +1,7 @@ -module(c). --compile(export_all). +-export([c/0]). -include("r.hrl"). +-behaviour(b). -c() -> #r{cell=specified}. \ No newline at end of file +c() -> #r{cell=specified}. diff --git a/lib/mix/test/fixtures/configs/bad_import.exs b/lib/mix/test/fixtures/configs/bad_import.exs index 2fce1f4eca8..f10a895ee01 100644 --- a/lib/mix/test/fixtures/configs/bad_import.exs +++ b/lib/mix/test/fixtures/configs/bad_import.exs @@ -1,3 +1,3 @@ use Mix.Config -import_config "bad_root.exs" \ No newline at end of file +import_config "bad_root.exs" diff --git a/lib/mix/test/fixtures/configs/good_config.exs b/lib/mix/test/fixtures/configs/good_config.exs index 34c8084ccaf..0c13b63b986 100644 --- a/lib/mix/test/fixtures/configs/good_config.exs +++ b/lib/mix/test/fixtures/configs/good_config.exs @@ -1 +1 @@ -[my_app: [key: :value]] \ No newline at end of file +[my_app: [key: :value]] diff --git a/lib/mix/test/fixtures/configs/imports_recursive.exs b/lib/mix/test/fixtures/configs/imports_recursive.exs new file mode 100644 index 00000000000..65ce85ef8ec --- /dev/null +++ b/lib/mix/test/fixtures/configs/imports_recursive.exs @@ -0,0 +1,3 @@ +use Mix.Config + +import_config "recursive.exs" diff --git a/lib/mix/test/fixtures/configs/nested.exs b/lib/mix/test/fixtures/configs/nested.exs new file mode 100644 index 00000000000..2e1da469142 --- /dev/null +++ b/lib/mix/test/fixtures/configs/nested.exs @@ -0,0 +1,2 @@ +use Mix.Config +config :app, Repo, key: [nested: true] diff --git a/lib/mix/test/fixtures/configs/recursive.exs b/lib/mix/test/fixtures/configs/recursive.exs new file mode 100644 index 00000000000..1a1f6eda0ae --- /dev/null +++ b/lib/mix/test/fixtures/configs/recursive.exs @@ -0,0 +1,3 @@ +use Mix.Config + +import_config "imports_recursive.exs" diff --git a/lib/mix/test/fixtures/deps_cycle/app1/mix.exs b/lib/mix/test/fixtures/deps_cycle/app1/mix.exs new file mode 100644 index 00000000000..d0d6d3d271c --- /dev/null +++ b/lib/mix/test/fixtures/deps_cycle/app1/mix.exs @@ -0,0 +1,13 @@ +defmodule App1 do + use Mix.Project + + def project do + [ + app: :app1, + version: "0.1.0", + deps: [ + {:app2, "0.1.0", in_umbrella: true} + ] + ] + end +end diff --git a/lib/mix/test/fixtures/deps_cycle/app2/mix.exs b/lib/mix/test/fixtures/deps_cycle/app2/mix.exs new file mode 100644 index 00000000000..bc201e673cc --- /dev/null +++ b/lib/mix/test/fixtures/deps_cycle/app2/mix.exs @@ -0,0 +1,13 @@ +defmodule App2 do + use Mix.Project + + def project do + [ + app: :app2, + version: "0.1.0", + deps: [ + {:app1, "0.1.0", in_umbrella: true} + ] + ] + end +end diff --git a/lib/mix/test/fixtures/deps_status/_build/dev/lib/nosemver/ebin/nosemver.app b/lib/mix/test/fixtures/deps_status/_build/dev/lib/nosemver/ebin/nosemver.app new file mode 100644 index 00000000000..bbcfa729b70 --- /dev/null +++ b/lib/mix/test/fixtures/deps_status/_build/dev/lib/nosemver/ebin/nosemver.app @@ -0,0 +1,3 @@ +{application, nosemver, [ + {vsn,"0.7"} +]}. \ No newline at end of file diff --git a/lib/mix/test/fixtures/deps_status/custom/deps_repo/mix.exs b/lib/mix/test/fixtures/deps_status/custom/deps_repo/mix.exs index 91b8a2106c9..3587b549a32 100644 --- a/lib/mix/test/fixtures/deps_status/custom/deps_repo/mix.exs +++ b/lib/mix/test/fixtures/deps_status/custom/deps_repo/mix.exs @@ -2,12 +2,9 @@ defmodule DepsRepo do use Mix.Project def project do - [ - app: :deps_repo, - version: "0.1.0", - deps: [ - {:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo")} - ] - ] + opts = Process.get(:custom_deps_git_repo_opts) || [] + [app: :deps_repo, + version: "0.1.0", + deps: [{:git_repo, "0.1.0", [git: MixTest.Case.fixture_path("git_repo")] ++ opts}]] end -end \ No newline at end of file +end diff --git a/lib/mix/test/fixtures/deps_status/custom/noscm_repo/mix.exs b/lib/mix/test/fixtures/deps_status/custom/noscm_repo/mix.exs index 1c182683647..ccfa2546e14 100644 --- a/lib/mix/test/fixtures/deps_status/custom/noscm_repo/mix.exs +++ b/lib/mix/test/fixtures/deps_status/custom/noscm_repo/mix.exs @@ -2,11 +2,10 @@ defmodule NoSCMRepo do use Mix.Project def project do - [ app: :noscm_repo, - version: "0.1.0", - deps: [ + [app: :noscm_repo, + version: "0.1.0", + deps: [ {:git_repo, "0.1.0"} - ] - ] + ]] end end diff --git a/lib/mix/test/fixtures/deps_status/custom/raw_repo/lib/raw_repo.ex b/lib/mix/test/fixtures/deps_status/custom/raw_repo/lib/raw_repo.ex index 6774ba7ddf8..806471ca97f 100644 --- a/lib/mix/test/fixtures/deps_status/custom/raw_repo/lib/raw_repo.ex +++ b/lib/mix/test/fixtures/deps_status/custom/raw_repo/lib/raw_repo.ex @@ -2,4 +2,4 @@ defmodule RawRepo do def hello do "world" end -end \ No newline at end of file +end diff --git a/lib/mix/test/fixtures/deps_status/custom/raw_repo/mix.exs b/lib/mix/test/fixtures/deps_status/custom/raw_repo/mix.exs index d53b107b8db..18bfb1d8367 100644 --- a/lib/mix/test/fixtures/deps_status/custom/raw_repo/mix.exs +++ b/lib/mix/test/fixtures/deps_status/custom/raw_repo/mix.exs @@ -1,4 +1,4 @@ -defmodule RawRepo.Mix do +defmodule RawRepo.Mixfile do use Mix.Project def project do diff --git a/lib/mix/test/fixtures/deps_status/deps/invalidapp/mix.exs b/lib/mix/test/fixtures/deps_status/deps/invalidapp/mix.exs index 4c26c245b42..ff811386227 100644 --- a/lib/mix/test/fixtures/deps_status/deps/invalidapp/mix.exs +++ b/lib/mix/test/fixtures/deps_status/deps/invalidapp/mix.exs @@ -2,7 +2,7 @@ defmodule InvalidApp.Mixfile do use Mix.Project def project do - [ app: :invalidapp, - version: "1.0" ] + [app: :invalidapp, + version: "1.0"] end end diff --git a/lib/mix/test/fixtures/deps_status/deps/nosemver/.gitkeep b/lib/mix/test/fixtures/deps_status/deps/nosemver/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/lib/mix/test/fixtures/deps_status/deps/ok/mix.exs b/lib/mix/test/fixtures/deps_status/deps/ok/mix.exs index e914a1ee7a1..c50e957d433 100644 --- a/lib/mix/test/fixtures/deps_status/deps/ok/mix.exs +++ b/lib/mix/test/fixtures/deps_status/deps/ok/mix.exs @@ -2,7 +2,11 @@ defmodule Ok.Mixfile do use Mix.Project def project do - [ app: :ok, - version: "0.1.0" ] + [app: :ok, + version: "0.1.0"] + end + + def application do + [extra_applications: [:logger]] end end diff --git a/lib/mix/test/fixtures/escript_test/config/config.exs b/lib/mix/test/fixtures/escript_test/config/config.exs new file mode 100644 index 00000000000..ce935aa1af2 --- /dev/null +++ b/lib/mix/test/fixtures/escript_test/config/config.exs @@ -0,0 +1,3 @@ +use Mix.Config + +config :escript_test, erl_val: "Erlang value" diff --git a/lib/mix/test/fixtures/escript_test/lib/escript_test.ex b/lib/mix/test/fixtures/escript_test/lib/escript_test.ex new file mode 100644 index 00000000000..f274221533e --- /dev/null +++ b/lib/mix/test/fixtures/escript_test/lib/escript_test.ex @@ -0,0 +1,13 @@ +defmodule EscriptTest do + def start do + :ok = Application.start(:escript_test) + end + + def main([]) do + IO.puts Application.get_env(:foobar, :value, "TEST") + end + + def main([protocol]) do + IO.puts Protocol.consolidated?(Module.concat([protocol])) + end +end diff --git a/lib/mix/test/fixtures/escripttest/priv/hello/world.txt b/lib/mix/test/fixtures/escript_test/priv/hello/world.txt similarity index 100% rename from lib/mix/test/fixtures/escripttest/priv/hello/world.txt rename to lib/mix/test/fixtures/escript_test/priv/hello/world.txt diff --git a/lib/mix/test/fixtures/escript_test/src/escript_test.erl b/lib/mix/test/fixtures/escript_test/src/escript_test.erl new file mode 100644 index 00000000000..7d2b34dbcf4 --- /dev/null +++ b/lib/mix/test/fixtures/escript_test/src/escript_test.erl @@ -0,0 +1,11 @@ +-module(escript_test). + +-export([start/0, main/1]). + + +start() -> + ok = application:start(escript_test). + +main(_Args) -> + {ok, Val} = application:get_env(escript_test, erl_val), + io:put_chars(Val). diff --git a/lib/mix/test/fixtures/escripttest/lib/escripttest.ex b/lib/mix/test/fixtures/escripttest/lib/escripttest.ex deleted file mode 100644 index bb60666e0b0..00000000000 --- a/lib/mix/test/fixtures/escripttest/lib/escripttest.ex +++ /dev/null @@ -1,9 +0,0 @@ -defmodule Escripttest do - def start do - :ok = Application.start(:escripttest) - end - - def main(_args) do - IO.puts "TEST" - end -end diff --git a/lib/mix/test/fixtures/no_mixfile/lib/a.ex b/lib/mix/test/fixtures/no_mixfile/lib/a.ex index 5a81e4b84b6..c8eb6fa27d2 100644 --- a/lib/mix/test/fixtures/no_mixfile/lib/a.ex +++ b/lib/mix/test/fixtures/no_mixfile/lib/a.ex @@ -1,3 +1,2 @@ defmodule A do - -end \ No newline at end of file +end diff --git a/lib/mix/test/fixtures/no_mixfile/lib/b.ex b/lib/mix/test/fixtures/no_mixfile/lib/b.ex index faf04809dbf..067ffa22218 100644 --- a/lib/mix/test/fixtures/no_mixfile/lib/b.ex +++ b/lib/mix/test/fixtures/no_mixfile/lib/b.ex @@ -1,3 +1,2 @@ defmodule B do - -end \ No newline at end of file +end diff --git a/lib/mix/test/fixtures/no_mixfile/lib/c.ex b/lib/mix/test/fixtures/no_mixfile/lib/c.ex deleted file mode 100644 index f998cc3153b..00000000000 --- a/lib/mix/test/fixtures/no_mixfile/lib/c.ex +++ /dev/null @@ -1,3 +0,0 @@ -defmodule C do - -end \ No newline at end of file diff --git a/lib/mix/test/fixtures/only_deps/mix.exs b/lib/mix/test/fixtures/only_deps/mix.exs deleted file mode 100644 index 7a10e965a22..00000000000 --- a/lib/mix/test/fixtures/only_deps/mix.exs +++ /dev/null @@ -1,8 +0,0 @@ -defmodule OnlyDeps.Mix do - use Mix.Project - - def project do - [app: :only_deps, version: "0.1.0", - deps: [{:git_repo, git: MixTest.Case.fixture_path("git_repo"), only: :other_env}] ] - end -end diff --git a/lib/mix/test/fixtures/rebar_dep/rebar.config b/lib/mix/test/fixtures/rebar_dep/rebar.config index e2980548ee2..10794f8208f 100644 --- a/lib/mix/test/fixtures/rebar_dep/rebar.config +++ b/lib/mix/test/fixtures/rebar_dep/rebar.config @@ -1,5 +1,3 @@ {sub_dirs, ["apps/*"]}. -{deps, [ - {git_rebar, "0.1..*", {git, "../../test/fixtures/git_rebar", master}} -]}. +{overrides, [{add, rebar_dep, [{src_dirs, ["apps/rebar_dep/src"]}]}]}. diff --git a/lib/mix/test/fixtures/rebar_dep/rebar.config.script b/lib/mix/test/fixtures/rebar_dep/rebar.config.script index e15a35f8c84..c20c273fec8 100644 --- a/lib/mix/test/fixtures/rebar_dep/rebar.config.script +++ b/lib/mix/test/fixtures/rebar_dep/rebar.config.script @@ -1 +1,4 @@ -CONFIG ++ [{'SCRIPT', SCRIPT}]. +CONFIG ++ + [{'SCRIPT', SCRIPT}] ++ + [{erl_opts, [warnings_as_errors]}] ++ + [{deps, [{git_rebar, "0.1..*", {git, filename:absname("../../test/fixtures/git_rebar"), master}}]}]. diff --git a/lib/mix/test/fixtures/rebar_override/rebar.config.script b/lib/mix/test/fixtures/rebar_override/rebar.config.script new file mode 100644 index 00000000000..98fefb465e7 --- /dev/null +++ b/lib/mix/test/fixtures/rebar_override/rebar.config.script @@ -0,0 +1,8 @@ +[ + {deps, [ + {git_rebar, {git, filename:absname("../../test/fixtures/git_rebar")}} + ]}, + {overrides, [ + {override, git_rebar, [{deps, [{git_repo, {git, filename:absname("../../test/fixtures/git_repo")}}]}]} + ]} +]. diff --git a/lib/mix/test/fixtures/test_stale/lib/a.ex b/lib/mix/test/fixtures/test_stale/lib/a.ex new file mode 100644 index 00000000000..d534bf917f1 --- /dev/null +++ b/lib/mix/test/fixtures/test_stale/lib/a.ex @@ -0,0 +1,3 @@ +defmodule A do + def f, do: :ok +end diff --git a/lib/mix/test/fixtures/test_stale/lib/b.ex b/lib/mix/test/fixtures/test_stale/lib/b.ex new file mode 100644 index 00000000000..2cd8410c8fc --- /dev/null +++ b/lib/mix/test/fixtures/test_stale/lib/b.ex @@ -0,0 +1,3 @@ +defmodule B do + def f, do: A.f() +end diff --git a/lib/mix/test/fixtures/test_stale/mix.exs b/lib/mix/test/fixtures/test_stale/mix.exs new file mode 100644 index 00000000000..1fb256dcee3 --- /dev/null +++ b/lib/mix/test/fixtures/test_stale/mix.exs @@ -0,0 +1,9 @@ +defmodule TestStale.Mixfile do + use Mix.Project + + def project do + [app: :test_stale, + version: "0.0.1", + test_pattern: "*_test_stale.exs"] + end +end diff --git a/lib/mix/test/fixtures/test_stale/test/a_test_stale.exs b/lib/mix/test/fixtures/test_stale/test/a_test_stale.exs new file mode 100644 index 00000000000..2251e1d684e --- /dev/null +++ b/lib/mix/test/fixtures/test_stale/test/a_test_stale.exs @@ -0,0 +1,7 @@ +defmodule ATest do + use ExUnit.Case + + test "f" do + assert A.f() == :ok + end +end diff --git a/lib/mix/test/fixtures/test_stale/test/b_test_stale.exs b/lib/mix/test/fixtures/test_stale/test/b_test_stale.exs new file mode 100644 index 00000000000..f7cf745e99e --- /dev/null +++ b/lib/mix/test/fixtures/test_stale/test/b_test_stale.exs @@ -0,0 +1,7 @@ +defmodule BTest do + use ExUnit.Case + + test "f" do + assert B.f() == :ok + end +end diff --git a/lib/mix/test/fixtures/test_stale/test/test_helper.exs b/lib/mix/test/fixtures/test_stale/test/test_helper.exs new file mode 100644 index 00000000000..869559e709e --- /dev/null +++ b/lib/mix/test/fixtures/test_stale/test/test_helper.exs @@ -0,0 +1 @@ +ExUnit.start() diff --git a/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/bar/mix.exs b/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/bar/mix.exs index c9e624f6251..ba481dfb47e 100644 --- a/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/bar/mix.exs +++ b/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/bar/mix.exs @@ -1,10 +1,10 @@ -defmodule Bar.Mix do +defmodule Bar.Mixfile do use Mix.Project def project do Mix.shell.info ":bar env is #{Mix.env}" - [ app: :bar, - version: "0.1.0", - deps: [ {:foo, in_umbrella: true} ] ] + [app: :bar, + version: "0.1.0", + deps: [{:foo, in_umbrella: true}]] end end diff --git a/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/dont_error_on_missing_mixfile/.gitkeep b/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/dont_error_on_missing_mixfile/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/foo/mix.exs b/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/foo/mix.exs index c6c5e9d4a68..43401789335 100644 --- a/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/foo/mix.exs +++ b/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/apps/foo/mix.exs @@ -1,9 +1,9 @@ -defmodule Foo.Mix do +defmodule Foo.Mixfile do use Mix.Project def project do Mix.shell.info ":foo env is #{Mix.env}" - [ app: :foo, - version: "0.1.0" ] + [app: :foo, + version: "0.1.0"] end end diff --git a/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/mix.exs b/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/mix.exs index 2717a59e753..d8410c2d39a 100644 --- a/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/mix.exs +++ b/lib/mix/test/fixtures/umbrella_dep/deps/umbrella/mix.exs @@ -2,6 +2,6 @@ defmodule Umbrella.Mixfile do use Mix.Project def project do - [ apps_path: "apps" ] + [apps_path: "apps"] end end diff --git a/lib/mix/test/fixtures/umbrella_dep/mix.exs b/lib/mix/test/fixtures/umbrella_dep/mix.exs index 33ab22d445a..dfaf25e0b3d 100644 --- a/lib/mix/test/fixtures/umbrella_dep/mix.exs +++ b/lib/mix/test/fixtures/umbrella_dep/mix.exs @@ -2,11 +2,11 @@ defmodule UmbrellaDep.Mixfile do use Mix.Project def project do - [ app: :umbrella_dep, - deps: deps ] + [app: :umbrella_dep, + deps: deps()] end defp deps do - [ {:umbrella, path: "deps/umbrella"} ] + [{:umbrella, path: "deps/umbrella"}] end end diff --git a/lib/mix/test/mix/aliases_test.exs b/lib/mix/test/mix/aliases_test.exs new file mode 100644 index 00000000000..ffe755fab1e --- /dev/null +++ b/lib/mix/test/mix/aliases_test.exs @@ -0,0 +1,53 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule Mix.AliasesTest do + use MixTest.Case + + defmodule Aliases do + def project do + [aliases: [h: "hello", + p: &inspect/1, + compile: "hello", + help: ["help", "hello"], + "nested.h": [&Mix.shell.info(inspect(&1)), "h foo bar"]]] + end + end + + setup do + Mix.Project.push Aliases + :ok + end + + test "runs string aliases" do + assert Mix.Task.run("h", []) == "Hello, World!" + assert Mix.Task.run("h", []) == :noop + assert Mix.Task.run("hello", []) == :noop + + Mix.Task.reenable "h" + Mix.Task.reenable "hello" + assert Mix.Task.run("h", ["foo", "bar"]) == "Hello, foo bar!" + end + + test "runs function aliases" do + assert Mix.Task.run("p", []) == "[]" + assert Mix.Task.run("p", []) == :noop + + Mix.Task.reenable "p" + assert Mix.Task.run("p", ["foo", "bar"]) == "[\"foo\", \"bar\"]" + end + + test "runs list aliases" do + assert Mix.Task.run("nested.h", ["baz"]) == "Hello, foo bar baz!" + assert_received {:mix_shell, :info, ["[]"]} + end + + test "run alias override" do + assert Mix.Task.run("compile", []) == "Hello, World!" + assert Mix.Task.run("compile", []) == :noop + end + + test "run alias override with recursion" do + assert Mix.Task.run("help", []) == "Hello, World!" + assert_received {:mix_shell, :info, ["mix test" <> _]} + end +end diff --git a/lib/mix/test/mix/archive_test.exs b/lib/mix/test/mix/archive_test.exs deleted file mode 100644 index eca377eb4c6..00000000000 --- a/lib/mix/test/mix/archive_test.exs +++ /dev/null @@ -1,23 +0,0 @@ -Code.require_file "../test_helper.exs", __DIR__ - -defmodule Mix.ArchiveTest do - use MixTest.Case - - doctest Mix.Archive - - test "archive" do - in_fixture "archive", fn -> - Mix.Archive.create(".", "sample.ez") - archive = 'sample.ez' - assert File.exists?(archive) - assert has_zip_file?(archive, 'sample/priv/not_really_an.so') - assert has_zip_file?(archive, 'sample/ebin/Elixir.Mix.Tasks.Local.Sample.beam') - end - end - - defp has_zip_file?(archive, name) do - :zip.list_dir(archive) - |> elem(1) - |> Enum.find(&match?({:zip_file, ^name, _, _, _, _}, &1)) - end -end diff --git a/lib/mix/test/mix/cli_test.exs b/lib/mix/test/mix/cli_test.exs index c244340c1d8..f632343e091 100644 --- a/lib/mix/test/mix/cli_test.exs +++ b/lib/mix/test/mix/cli_test.exs @@ -3,29 +3,6 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Mix.CLITest do use MixTest.Case - test "env configs" do - in_fixture "no_mixfile", fn -> - File.write! "custom.exs", """ - defmodule P do - use Mix.Project - def project, do: [app: :p, version: "0.1.0"] - end - """ - - System.put_env("MIX_ENV", "prod") - System.put_env("MIX_EXS", "custom.exs") - - output = System.cmd ~s(#{elixir_executable} #{mix_executable}) <> - ~s( run -e "IO.inspect {Mix.env, System.argv}" -- 1 2 3) - - System.delete_env("MIX_ENV") - System.delete_env("MIX_EXS") - - assert output =~ ~s({:prod, ["1", "2", "3"]}) - assert output =~ "Compiled lib/a.ex" - end - end - test "default task" do in_fixture "no_mixfile", fn -> File.write! "mix.exs", """ @@ -34,28 +11,13 @@ defmodule Mix.CLITest do def project, do: [app: :p, version: "0.1.0"] end """ - output = mix "" + mix ~w[] assert File.regular?("_build/dev/lib/p/ebin/Elixir.A.beam") - assert output =~ "Compiled lib/a.ex" end end - test "custom mix.exs" do - in_fixture "no_mixfile", fn -> - File.write! "mix.exs", """ - defmodule P do - use Mix.Project - def project, do: [app: :p] - end - """ - output = mix "" - assert File.regular?("_build/dev/lib/p/ebin/Elixir.A.beam") - assert output =~ "Compiled lib/a.ex" - end - end - - test "compiles and invokes simple task from CLI" do - in_fixture "no_mixfile", fn -> + test "compiles and invokes simple task from CLI", context do + in_tmp context.test, fn -> File.mkdir_p!("lib") File.write! "mix.exs", """ @@ -73,53 +35,122 @@ defmodule Mix.CLITest do """ File.write! "lib/hello.ex", """ - defmodule Mix.Tasks.Hello do + defmodule Mix.Tasks.MyHello do use Mix.Task - @shortdoc "Hello" + @shortdoc "Says hello" def run(_) do IO.puts Mix.Project.get!.hello_world + Mix.shell.info("This won't appear") + Mix.raise("oops") end end """ - contents = mix("hello") + contents = mix ~w[my_hello], [{"MIX_QUIET", "1"}] assert contents =~ "Hello from MyProject!\n" - assert contents =~ "Compiled lib/hello.ex\n" + refute contents =~ "This won't appear" + + contents = mix ~w[my_hello], [{"MIX_QUIET", "0"}] + assert contents =~ "Hello from MyProject!\n" + assert contents =~ "This won't appear" + + contents = mix ~w[my_hello], [{"MIX_DEBUG", "1"}] + assert contents =~ "** Running mix my_hello (inside MyProject)" + assert contents =~ "** (Mix.Error) oops" + + contents = mix ~w[my_hello], [{"MIX_DEBUG", "0"}] + refute contents =~ "** Running mix my_hello (inside MyProject)" + refute contents =~ "** (Mix.Error) oops" end end - test "no task error" do - in_fixture "no_mixfile", fn -> - contents = mix("no_task") - assert contents =~ "** (Mix) The task no_task could not be found\n" + test "no task error", context do + in_tmp context.test, fn -> + contents = mix ~w[no_task] + assert contents =~ "** (Mix) The task \"no_task\" could not be found" end end - test "--help smoke test" do - in_fixture "no_mixfile", fn -> - output = mix "--help" - assert output =~ ~r/mix compile\s+# Compile source files/ + test "tasks with slashes in them raise a NoTaskError right away", context do + in_tmp context.test, fn -> + contents = mix ~w[my/task] + assert contents =~ "** (Mix) The task \"my/task\" could not be found" + end + end + + test "--help smoke test", context do + in_tmp context.test, fn -> + output = mix ~w[--help] + assert output =~ ~r/mix compile\s+# Compiles source files/ refute output =~ "mix invalid" end end - test "--version smoke test" do - in_fixture "no_mixfile", fn -> - output = mix "--version" - assert output =~ ~r/Elixir [0-9\.a-z]+/ + test "--version smoke test", context do + in_tmp context.test, fn -> + output = mix ~w[--version] + assert output =~ ~r/Erlang.+\n\nMix [0-9\.a-z]+/ end end + test "env config", context do + in_tmp context.test, fn -> + File.write! "custom.exs", """ + defmodule P do + use Mix.Project + def project, do: [app: :p, version: "0.1.0"] + end + """ + + System.put_env("MIX_ENV", "prod") + System.put_env("MIX_EXS", "custom.exs") + + output = mix ["run", "-e", "IO.inspect {Mix.env, System.argv}", + "--", "1", "2", "3"] + assert output =~ ~s({:prod, ["1", "2", "3"]}) + end + after + System.delete_env("MIX_ENV") + System.delete_env("MIX_EXS") + end + + test "env config defaults to the tasks's preferred cli environment", context do + in_tmp context.test, fn -> + File.write! "custom.exs", """ + defmodule P do + use Mix.Project + def project, do: [app: :p, version: "0.1.0"] + end + + defmodule Mix.Tasks.TestTask do + use Mix.Task + @preferred_cli_env :prod + + def run(args) do + IO.inspect {Mix.env, args} + end + end + """ + + System.put_env("MIX_EXS", "custom.exs") + + output = mix ["test_task", "a", "b", "c"] + assert output =~ ~s({:prod, ["a", "b", "c"]}) + end + after + System.delete_env("MIX_EXS") + end + test "new with tests" do in_tmp "new_with_tests", fn -> - output = mix "new ." + output = mix ~w[new .] assert output =~ "* creating lib/new_with_tests.ex" - output = mix "test test/new_with_tests_test.exs --cover" + output = mix ~w[test test/new_with_tests_test.exs --cover] assert File.regular?("_build/test/lib/new_with_tests/ebin/Elixir.NewWithTests.beam") - assert output =~ "1 tests, 0 failures" + assert output =~ "2 tests, 0 failures" assert output =~ "Generating cover results ..." assert File.regular?("cover/Elixir.NewWithTests.html") end @@ -127,31 +158,12 @@ defmodule Mix.CLITest do test "new --sup with tests" do in_tmp "sup_with_tests", fn -> - output = mix "new --sup ." + output = mix ~w[new --sup .] assert output =~ "* creating lib/sup_with_tests.ex" - output = mix "test" + output = mix ~w[test] assert File.regular?("_build/test/lib/sup_with_tests/ebin/Elixir.SupWithTests.beam") - assert output =~ "1 tests, 0 failures" - end - end - - defp mix(args) do - System.cmd "#{elixir_executable} #{mix_executable} #{args} #{stderr_on_win}" - end - - defp stderr_on_win do - case :os.type do - {:win32, _} -> "2>&1" - _ -> "" + assert output =~ "2 tests, 0 failures" end end - - defp mix_executable do - Path.expand("../../../../bin/mix", __DIR__) - end - - defp elixir_executable do - Path.expand("../../../../bin/elixir", __DIR__) - end end diff --git a/lib/mix/test/mix/config_test.exs b/lib/mix/test/mix/config_test.exs index 762f78941a8..c5f5d4abbb5 100644 --- a/lib/mix/test/mix/config_test.exs +++ b/lib/mix/test/mix/config_test.exs @@ -5,37 +5,103 @@ defmodule Mix.ConfigTest do doctest Mix.Config + defmacrop config do + quote do + Mix.Config.Agent.get(var!(config_agent, Mix.Config)) + end + end + test "config/2" do use Mix.Config - assert var!(config, Mix.Config) == [] + assert config() == [] config :lager, key: :value - assert var!(config, Mix.Config) == [lager: [key: :value]] + assert config() == [lager: [key: :value]] config :lager, other: :value - assert var!(config, Mix.Config) == [lager: [other: :value, key: :value]] + assert config() == [lager: [key: :value, other: :value]] config :lager, key: :other - assert var!(config, Mix.Config) == [lager: [key: :other, other: :value]] + assert config() == [lager: [other: :value, key: :other]] + + # Works inside functions too... + f = fn -> config(:lager, key: :fn) end + f.() + assert config() == [lager: [other: :value, key: :fn]] + + # ...and in for comprehensions. + for _ <- 0..0, do: config(:lager, key: :for) + assert config() == [lager: [other: :value, key: :for]] end test "config/3" do use Mix.Config config :app, Repo, key: :value - assert var!(config, Mix.Config) == [app: [{Repo, key: :value}]] + assert config() == [app: [{Repo, key: :value}]] config :app, Repo, other: :value - assert var!(config, Mix.Config) == [app: [{Repo, other: :value, key: :value}]] + assert config() == [app: [{Repo, key: :value, other: :value}]] config :app, Repo, key: :other - assert var!(config, Mix.Config) == [app: [{Repo, [key: :other, other: :value]}]] + assert config() == [app: [{Repo, other: :value, key: :other}]] + + config :app, Repo, key: [nested: false] + assert config() == [app: [{Repo, other: :value, key: [nested: false]}]] + + config :app, Repo, key: [nested: true] + assert config() == [app: [{Repo, other: :value, key: [nested: true]}]] + + config :app, Repo, key: :other + assert config() == [app: [{Repo, other: :value, key: :other}]] end test "import_config/1" do use Mix.Config import_config fixture_path("configs/good_config.exs") - assert var!(config, Mix.Config) == [my_app: [key: :value]] + assert config() == [my_app: [key: :value]] + end + + test "import_config/1 raises for recursive import" do + use Mix.Config + + exception = + assert_raise Mix.Config.LoadError, fn -> + import_config fixture_path("configs/imports_recursive.exs") + end + + message = Exception.message(exception) + + assert message =~ ~r/could not load config .*\/imports_recursive\.exs\n/ + assert message =~ ~r/\(ArgumentError\) recursive load of.*\/imports_recursive.exs detected/ + end + + test "import_config/1 with wildcards" do + use Mix.Config + import_config fixture_path("configs/good_*.exs") + assert config() == [my_app: [key: :value]] + end + + test "import_config/1 with wildcard with no matches" do + use Mix.Config + import_config fixture_path("configs/nonexistent_*.exs") + assert config() == [] + end + + test "import_config/1 with nested" do + use Mix.Config + config :app, Repo, key: [nested: false, other: true] + + import_config fixture_path("configs/nested.exs") + assert config() == [app: [{Repo, key: [other: true, nested: true]}]] + end + + test "import_config/1 with bad path" do + use Mix.Config + + assert_raise Mix.Config.LoadError, ~r"could not load config", fn -> + import_config fixture_path("configs/unknown.exs") + end end test "read!/1" do diff --git a/lib/mix/test/mix/dep/lock_test.exs b/lib/mix/test/mix/dep/lock_test.exs index 9ffdedf70c8..6ca4e1becad 100644 --- a/lib/mix/test/mix/dep/lock_test.exs +++ b/lib/mix/test/mix/dep/lock_test.exs @@ -8,16 +8,16 @@ defmodule Mix.Dep.LockTest do :ok end - test "creates new lock and manifest files" do - in_fixture "no_mixfile", fn -> + test "creates new lock and manifest files", context do + in_tmp context.test, fn -> Mix.Dep.Lock.write %{foo: :bar} assert File.regular? "mix.lock" assert File.regular? "_build/dev/lib/sample/.compile.lock" end end - test "does not touch manifest file there is no change" do - in_fixture "no_mixfile", fn -> + test "does not touch manifest file there is no change", context do + in_tmp context.test, fn -> Mix.Dep.Lock.write %{foo: :bar, bar: :bat} File.rm! "_build/dev/lib/sample/.compile.lock" @@ -26,11 +26,20 @@ defmodule Mix.Dep.LockTest do end end - test "stores version in manifest" do - in_fixture "no_mixfile", fn -> - assert nil? Mix.Dep.Lock.elixir_vsn - Mix.Dep.Lock.touch - assert Mix.Dep.Lock.elixir_vsn == System.version + test "raises a proper error for merge conflicts", context do + in_tmp context.test, fn -> + File.write "mix.lock", ~S""" + %{"dep": {:hex, :dep, "0.1.0"}, + <<<<<<< HEAD + "foo": {:hex, :foo, "0.1.0"}, + ======= + "bar": {:hex, :bar, "0.1.0"}, + >>>>>>> foobar + "baz": {:hex, :baz, "0.1.0"}} + """ + assert_raise Mix.Error, ~r/Your mix\.lock contains merge conflicts/, fn -> + Mix.Dep.Lock.read() + end end end end diff --git a/lib/mix/test/mix/dep_test.exs b/lib/mix/test/mix/dep_test.exs index 735611682fc..439fa8729f8 100644 --- a/lib/mix/test/mix/dep_test.exs +++ b/lib/mix/test/mix/dep_test.exs @@ -5,33 +5,52 @@ defmodule Mix.DepTest do defmodule DepsApp do def project do - [ deps: [ - {:ok, "0.1.0", path: "deps/ok"}, - {:invalidvsn, "0.2.0", path: "deps/invalidvsn"}, - {:invalidapp, "0.1.0", path: "deps/invalidapp"}, - {:noappfile, "0.1.0", path: "deps/noappfile"}, - {:uncloned, git: "https://github.com/elixir-lang/uncloned.git"}, - {:optional, git: "https://github.com/elixir-lang/optional.git", optional: true} - ] ] + [deps: [ + {:ok, "0.1.0", path: "deps/ok"}, + {:invalidvsn, "0.2.0", path: "deps/invalidvsn"}, + {:invalidapp, "0.1.0", path: "deps/invalidapp"}, + {:noappfile, "0.1.0", path: "deps/noappfile"}, + {:uncloned, git: "https://github.com/elixir-lang/uncloned.git"}, + {:optional, git: "https://github.com/elixir-lang/optional.git", optional: true} + ]] end end - defmodule MixVersionApp do + defmodule ProcessDepsApp do def project do - [ deps: [ {:ok, "~> 0.1", path: "deps/ok"} ] ] + [app: :process_deps_app, deps: Process.get(:mix_deps)] end end - defmodule NoSCMApp do - def project do - [ deps: [ { :ok, "~> 0.1", not_really: :ok } ] ] + defp with_deps(deps, fun) do + Process.put(:mix_deps, deps) + Mix.Project.push ProcessDepsApp + fun.() + after + Mix.Project.pop + end + + defp assert_wrong_dependency(deps) do + with_deps deps, fn -> + assert_raise Mix.Error, ~r"Dependency specified in the wrong format", fn -> + Mix.Dep.loaded([]) + end end end - defmodule InvalidDepsReq do - def project do - [ deps: [ {:ok, "+- 0.1.0", path: "deps/ok"} ] ] + test "respects the MIX_NO_DEPS flag" do + Mix.Project.push DepsApp + + in_fixture "deps_status", fn -> + deps = Mix.Dep.cached() + assert length(deps) == 6 + + System.put_env("MIX_NO_DEPS", "1") + deps = Mix.Dep.cached() + assert length(deps) == 0 end + after + System.delete_env("MIX_NO_DEPS") end test "extracts all dependencies from the given project" do @@ -49,22 +68,39 @@ defmodule Mix.DepTest do end end - test "use requirements for dependencies" do - Mix.Project.push MixVersionApp + test "extracts all dependencies paths from the given project" do + Mix.Project.push DepsApp in_fixture "deps_status", fn -> - deps = Mix.Dep.loaded([]) - assert Enum.find deps, &match?(%Mix.Dep{app: :ok, status: {:ok, _}}, &1) + paths = Mix.Project.deps_paths + assert map_size(paths) == 6 + assert paths[:ok] =~ "deps/ok" + assert paths[:uncloned] =~ "deps/uncloned" end end - test "raises when no SCM is specified" do - Mix.Project.push NoSCMApp + test "fails on invalid dependencies" do + assert_wrong_dependency [{:ok}] + assert_wrong_dependency [{:ok, nil}] + assert_wrong_dependency [{:ok, nil, []}] + end - in_fixture "deps_status", fn -> - send self, {:mix_shell_input, :yes?, false} - msg = "Could not find a SCM for dependency :ok from Mix.DepTest.NoSCMApp" - assert_raise Mix.Error, msg, fn -> Mix.Dep.loaded([]) end + test "use requirements for dependencies" do + with_deps [{:ok, "~> 0.1", path: "deps/ok"}], fn -> + in_fixture "deps_status", fn -> + deps = Mix.Dep.loaded([]) + assert Enum.find deps, &match?(%Mix.Dep{app: :ok, status: {:ok, _}}, &1) + end + end + end + + test "raises when no SCM is specified" do + with_deps [{:ok, "~> 0.1", not_really: :ok}], fn -> + in_fixture "deps_status", fn -> + send self(), {:mix_shell_input, :yes?, false} + msg = "Could not find an SCM for dependency :ok from Mix.DepTest.ProcessDepsApp" + assert_raise Mix.Error, msg, fn -> Mix.Dep.loaded([]) end + end end end @@ -76,203 +112,499 @@ defmodule Mix.DepTest do {_, true, _} = Mix.Dep.Converger.converge(false, [], nil, fn dep, acc, lock -> - assert nil?(dep.manager) + assert is_nil(dep.manager) {dep, acc or true, lock} end) end test "raises on invalid deps req" do - Mix.Project.push InvalidDepsReq + with_deps [{:ok, "+- 0.1.0", path: "deps/ok"}], fn -> + in_fixture "deps_status", fn -> + assert_raise Mix.Error, ~r"Invalid requirement", fn -> + Mix.Dep.loaded([]) + end + end + end + end - in_fixture "deps_status", fn -> - assert_raise Mix.Error, ~r"Invalid requirement", fn -> - Mix.Dep.loaded([]) + test "nested deps come first" do + with_deps [{:deps_repo, "0.1.0", path: "custom/deps_repo"}], fn -> + in_fixture "deps_status", fn -> + assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo] end end end - defmodule NestedDepsApp do - def project do - [ - app: :raw_sample, - version: "0.1.0", - deps: [ - {:deps_repo, "0.1.0", path: "custom/deps_repo"} - ] - ] + test "nested optional deps are never added" do + with_deps [{:deps_repo, "0.1.0", path: "custom/deps_repo"}], fn -> + in_fixture "deps_status", fn -> + File.write! "custom/deps_repo/mix.exs", """ + defmodule DepsRepo do + use Mix.Project + + def project do + [app: :deps_repo, + version: "0.1.0", + deps: [{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true}]] + end + end + """ + + assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:deps_repo] + end end end - test "nested deps come first" do - Mix.Project.push NestedDepsApp + test "nested deps with convergence" do + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}] - in_fixture "deps_status", fn -> - assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo] + with_deps deps, fn -> + in_fixture "deps_status", fn -> + assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo] + end end end - test "nested optional deps are never added" do - Mix.Project.push NestedDepsApp + test "nested deps with convergence and managers" do + Process.put(:custom_deps_git_repo_opts, [manager: :make]) - in_fixture "deps_status", fn -> - File.write! "custom/deps_repo/mix.exs", """ - defmodule DepsRepo do - use Mix.Project - - def project do - [ - app: :deps_repo, - version: "0.1.0", - deps: [ - {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true} - ] - ] - end + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", manager: :rebar}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}] + + with_deps deps, fn -> + in_fixture "deps_status", fn -> + [dep1, dep2] = Mix.Dep.loaded([]) + assert dep1.manager == nil + assert dep2.manager == :rebar end - """ + end + end - assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:deps_repo] + test "nested deps with convergence and optional dependencies" do + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}] + + with_deps deps, fn -> + in_fixture "deps_status", fn -> + File.write! "custom/deps_repo/mix.exs", """ + defmodule DepsRepo do + use Mix.Project + + def project do + [app: :deps_repo, + version: "0.1.0", + deps: [{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true}]] + end + end + """ + + assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo] + end end end - defmodule ConvergedDepsApp do - def project do - [ - app: :raw_sample, - version: "0.1.0", - deps: [ - {:deps_repo, "0.1.0", path: "custom/deps_repo"}, - {:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo")} - ] - ] + test "nested deps with optional dependencies and cousin conflict" do + with_deps [{:deps_repo1, "0.1.0", path: "custom/deps_repo1"}, + {:deps_repo2, "0.1.0", path: "custom/deps_repo2"}], fn -> + in_fixture "deps_status", fn -> + File.mkdir_p!("custom/deps_repo1") + File.write! "custom/deps_repo1/mix.exs", """ + defmodule DepsRepo1 do + use Mix.Project + + def project do + [app: :deps_repo1, + version: "0.1.0", + deps: [{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true}]] + end + end + """ + + File.mkdir_p!("custom/deps_repo2") + File.write! "custom/deps_repo2/mix.exs", """ + defmodule DepsRepo2 do + use Mix.Project + + def project do + [app: :deps_repo2, + version: "0.1.0", + deps: [{:git_repo, "0.2.0", path: "somewhere"}]] + end + end + """ + + Mix.Tasks.Deps.run([]) + assert_received {:mix_shell, :info, ["* git_repo" <> _]} + assert_received {:mix_shell, :info, [msg]} + assert msg =~ "different specs were given for the git_repo" + end end end - test "correctly order converged deps" do - Mix.Project.push ConvergedDepsApp + ## Remove converger - in_fixture "deps_status", fn -> - assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo] + defmodule IdentityRemoteConverger do + @behaviour Mix.RemoteConverger + + def remote?(%Mix.Dep{app: :deps_repo}), do: false + def remote?(%Mix.Dep{}), do: true + def deps(_dep, _lock), do: [] + def post_converge, do: :ok + + def converge(deps, lock) do + Process.put(:remote_converger, deps) + lock end + end - test "correctly order converged deps even with optional dependencies" do - Mix.Project.push ConvergedDepsApp + test "remote converger" do + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}] - in_fixture "deps_status", fn -> - File.write! "custom/deps_repo/mix.exs", """ - defmodule DepsRepo do - use Mix.Project - - def project do - [ - app: :deps_repo, - version: "0.1.0", - deps: [ - {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), optional: true} - ] - ] - end + with_deps deps, fn -> + Mix.RemoteConverger.register(IdentityRemoteConverger) + + in_fixture "deps_status", fn -> + Mix.Tasks.Deps.Get.run([]) + + message = "* Getting git_repo (#{fixture_path("git_repo")})" + assert_received {:mix_shell, :info, [^message]} + + assert Process.get(:remote_converger) end - """ + end + after + Mix.RemoteConverger.register(nil) + end + + test "pass dependencies to remote converger in defined order" do + deps = [ + {:ok, "0.1.0", path: "deps/ok"}, + {:invalidvsn, "0.2.0", path: "deps/invalidvsn"}, + {:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:invalidapp, "0.1.0", path: "deps/invalidapp"}, + {:noappfile, "0.1.0", path: "deps/noappfile"} + ] + + with_deps deps, fn -> + Mix.RemoteConverger.register(IdentityRemoteConverger) - assert Enum.map(Mix.Dep.loaded([]), &(&1.app)) == [:git_repo, :deps_repo] + in_fixture "deps_status", fn -> + Mix.Tasks.Deps.Get.run([]) + + deps = Process.get(:remote_converger) |> Enum.map(& &1.app) + assert deps == [:ok, :invalidvsn, :deps_repo, :invalidapp, :noappfile, :git_repo] + end end + after + Mix.RemoteConverger.register(nil) end - defmodule IdentityRemoteConverger do + defmodule RaiseRemoteConverger do @behaviour Mix.RemoteConverger - def remote?(_app), do: true + def remote?(_app), do: false + def deps(_dep, _lock), do: :ok + def post_converge, do: :ok def converge(_deps, lock) do Process.put(:remote_converger, true) lock end + end + + test "remote converger is not invoked if deps diverge" do + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}] - def deps(_deps, _lock) do - [] + with_deps deps, fn -> + Mix.RemoteConverger.register(RaiseRemoteConverger) + + in_fixture "deps_status", fn -> + assert_raise Mix.Error, fn -> + Mix.Tasks.Deps.Get.run([]) + end + + assert_received {:mix_shell, :error, ["Dependencies have diverged:"]} + refute Process.get(:remote_converger) + end end + after + Mix.RemoteConverger.register(nil) end - test "remote converger" do - Mix.Project.push ConvergedDepsApp - Mix.RemoteConverger.register(IdentityRemoteConverger) + test "remote converger is not invoked if deps graph has cycles" do + deps = [{:app1, "0.1.0", path: "app1"}, + {:app2, "0.1.0", path: "app2"}] - in_fixture "deps_status", fn -> - Mix.Tasks.Deps.Get.run([]) + with_deps deps, fn -> + Mix.RemoteConverger.register(RaiseRemoteConverger) - message = "* Getting git_repo (#{fixture_path("git_repo")})" - assert_received {:mix_shell, :info, [^message]} + in_fixture "deps_cycle", fn -> + assert_raise Mix.Error, ~r/cycles in the dependency graph/, fn -> + Mix.Tasks.Deps.Get.run([]) + end - assert Process.get(:remote_converger) + refute Process.get(:remote_converger) + end end after Mix.RemoteConverger.register(nil) end - defmodule OnlyDeps do - def project do - [ deps: [ {:foo, github: "elixir-lang/foo"}, - {:bar, github: "elixir-lang/bar", only: :other_env} ] ] + ## Only handling + + test "only extracts deps matching environment" do + with_deps [{:foo, github: "elixir-lang/foo"}, + {:bar, github: "elixir-lang/bar", only: :other_env}], fn -> + in_fixture "deps_status", fn -> + deps = Mix.Dep.loaded([env: :other_env]) + assert length(deps) == 2 + + deps = Mix.Dep.loaded([]) + assert length(deps) == 2 + + assert [dep] = Mix.Dep.loaded([env: :prod]) + assert dep.app == :foo + end end end - test "only extract deps matching environment" do - Mix.Project.push OnlyDeps + test "only fetches parent deps matching specified env" do + with_deps [{:only, github: "elixir-lang/only", only: [:dev]}], fn -> + in_fixture "deps_status", fn -> + Mix.Tasks.Deps.Get.run(["--only", "prod"]) + refute_received {:mix_shell, :info, ["* Getting" <> _]} - in_fixture "deps_status", fn -> - deps = Mix.Dep.loaded([env: :other_env]) - assert length(deps) == 2 + assert_raise Mix.Error, "Can't continue due to errors on dependencies", fn -> + Mix.Tasks.Deps.Loadpaths.run([]) + end - deps = Mix.Dep.loaded([]) - assert length(deps) == 2 + Mix.ProjectStack.clear_cache() + Mix.env(:prod) + Mix.Tasks.Deps.Loadpaths.run([]) + end + end + end + + test "nested deps selects only prod dependencies" do + Process.put(:custom_deps_git_repo_opts, [only: :test]) + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}] + + with_deps deps, fn -> + in_fixture "deps_status", fn -> + loaded = Mix.Dep.loaded([]) + assert [:deps_repo] = Enum.map(loaded, &(&1.app)) - deps = Mix.Dep.loaded([env: :prod]) - assert length(deps) == 1 - assert Enum.find deps, &match?(%Mix.Dep{app: :foo}, &1) + loaded = Mix.Dep.loaded([env: :test]) + assert [:deps_repo] = Enum.map(loaded, &(&1.app)) + end end end - defmodule OnlyChildDeps do - def project do - [ app: :raw_sample, - version: "0.1.0", - deps: [ {:only_deps, path: fixture_path("only_deps")} ] ] + test "nested deps on only matching" do + # deps_repo wants git_repo for test, git_repo is restricted to only test + # We assert the dependencies match as expected, happens in umbrella apps + Process.put(:custom_deps_git_repo_opts, [only: :test]) + + # We need to pass env: :test so the child dependency is loaded + # in the first place (otherwise only :prod deps are loaded) + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", env: :test}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}] + + with_deps deps, fn -> + in_fixture "deps_status", fn -> + loaded = Mix.Dep.loaded([]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :dev]) + assert [:deps_repo] = Enum.map(loaded, &(&1.app)) + assert [noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :test]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + end end end - test "only fetch child deps matching prod env" do - Mix.Project.push OnlyChildDeps + test "nested deps on only conflict" do + # deps_repo wants all git_repo, git_repo is restricted to only test + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}] + + with_deps deps, fn -> + in_fixture "deps_status", fn -> + loaded = Mix.Dep.loaded([]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :dev]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :test]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + Mix.Tasks.Deps.run([]) + assert_received {:mix_shell, :info, ["* git_repo" <> _]} + assert_received {:mix_shell, :info, [msg]} + assert msg =~ "Remove the :only restriction from your dep" + end + end + end - in_fixture "deps_status", fn -> - Mix.Tasks.Deps.Get.run([]) - message = "* Getting git_repo (#{fixture_path("git_repo")})" - refute_received {:mix_shell, :info, [^message]} + test "nested deps on only conflict does not happen with optional deps" do + Process.put(:custom_deps_git_repo_opts, [optional: true]) + + # deps_repo wants all git_repo, git_repo is restricted to only test + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}] + + with_deps deps, fn -> + in_fixture "deps_status", fn -> + loaded = Mix.Dep.loaded([]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :dev]) + assert [:deps_repo] = Enum.map(loaded, &(&1.app)) + assert [noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :test]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + end end end - defmodule OnlyParentDeps do - def project do - [ app: :raw_sample, - version: "0.1.0", - deps: [ {:only, github: "elixir-lang/only", only: :dev} ] ] + test "nested deps with valid only subset" do + # deps_repo wants git_repo for prod, git_repo is restricted to only prod and test + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :prod}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: [:prod, :test]}] + + with_deps deps, fn -> + in_fixture "deps_status", fn -> + loaded = Mix.Dep.loaded([]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :dev]) + assert [] = Enum.map(loaded, &(&1.app)) + + loaded = Mix.Dep.loaded([env: :test]) + assert [:git_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :prod]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + end end end - test "only fetch parent deps matching specified env" do - Mix.Project.push OnlyParentDeps + test "nested deps with invalid only subset" do + # deps_repo wants git_repo for dev, git_repo is restricted to only test + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :dev}, + {:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo"), only: [:test]}] + + with_deps deps, fn -> + in_fixture "deps_status", fn -> + loaded = Mix.Dep.loaded([]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :dev]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [divergedonly: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :test]) + assert [:git_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _] = Enum.map(loaded, &(&1.status)) + + Mix.Tasks.Deps.run([]) + assert_received {:mix_shell, :info, ["* git_repo" <> _]} + assert_received {:mix_shell, :info, [msg]} + assert msg =~ "Ensure you specify at least the same environments in :only in your dep" + end + end + end - in_fixture "deps_status", fn -> - Mix.Tasks.Deps.Get.run(["--only", "prod"]) - refute_received {:mix_shell, :info, ["* Getting" <> _]} + test "nested deps with valid only in both parent and child" do + Process.put(:custom_deps_git_repo_opts, [only: :test]) + + # deps_repo has environment set to test so it loads the deps_git_repo set to test too + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", env: :test, only: [:dev, :test]}, + {:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo"), only: :test}] - assert_raise Mix.Error, "Can't continue due to errors on dependencies", fn -> - Mix.Tasks.Deps.Check.run([]) + with_deps deps, fn -> + in_fixture "deps_status", fn -> + loaded = Mix.Dep.loaded([]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :dev]) + assert [:deps_repo] = Enum.map(loaded, &(&1.app)) + assert [noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :test]) + assert [:git_repo, :deps_repo] = Enum.map(loaded, &(&1.app)) + assert [unavailable: _, noappfile: _] = Enum.map(loaded, &(&1.status)) + + loaded = Mix.Dep.loaded([env: :prod]) + assert [] = Enum.map(loaded, &(&1.app)) end + end + end - Mix.env(:prod) - Mix.Tasks.Deps.Check.run([]) + test "nested deps converge and diverge when only is not in_upper" do + loaded_only = fn deps -> + with_deps deps, fn -> + in_fixture "deps_status", fn -> + File.mkdir_p! "custom/other_repo" + File.write! "custom/other_repo/mix.exs", """ + defmodule OtherRepo do + use Mix.Project + + def project do + [app: :deps_repo, + version: "0.1.0", + deps: [{:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo")}]] + end + end + """ + + Mix.ProjectStack.clear_cache + loaded = Mix.Dep.loaded([]) + assert [:git_repo, _, _] = Enum.map(loaded, &(&1.app)) + hd(loaded).opts[:only] + end + end end + + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :prod}, + {:other_repo, "0.1.0", path: "custom/other_repo", only: :test}] + assert loaded_only.(deps) == [:test, :prod] + + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:other_repo, "0.1.0", path: "custom/other_repo", only: :test}] + refute loaded_only.(deps) + + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :prod}, + {:other_repo, "0.1.0", path: "custom/other_repo"}] + refute loaded_only.(deps) + + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo"}, + {:other_repo, "0.1.0", path: "custom/other_repo"}] + refute loaded_only.(deps) + + Process.put(:custom_deps_git_repo_opts, [optional: true]) + deps = [{:deps_repo, "0.1.0", path: "custom/deps_repo", only: :prod}, + {:other_repo, "0.1.0", path: "custom/other_repo", only: :test}] + assert loaded_only.(deps) == :test end end diff --git a/lib/mix/test/mix/generator_test.exs b/lib/mix/test/mix/generator_test.exs index 2370c30557a..d429682d86a 100644 --- a/lib/mix/test/mix/generator_test.exs +++ b/lib/mix/test/mix/generator_test.exs @@ -9,19 +9,19 @@ defmodule Mix.GeneratorTest do embed_text :self, from_file: __ENV__.file embed_template :bar, "<%= @a + @b %>" - test :embed_text do - assert foo_text == "foo" + test "embed text" do + assert foo_text() == "foo" end - test :embed_template do + test "embed template" do assert bar_template(a: 1, b: 2) == "3" end - test :from_file do - assert self_text =~ "import Mix.Generator" + test "from file" do + assert self_text() =~ "import Mix.Generator" end - test :create_file do + test "create file" do in_tmp "create_file", fn -> create_file "foo", "HELLO" assert File.read!("foo") == "HELLO" @@ -29,10 +29,22 @@ defmodule Mix.GeneratorTest do end end - test :create_with_conflict_returning_true do + test "force create file" do in_tmp "create_file", fn -> File.write! "foo", "HELLO" - send self, {:mix_shell_input, :yes?, true} + + create_file "foo", "WORLD", force: true + assert File.read!("foo") == "WORLD" + + refute_received {:mix_shell, :yes?, ["foo already exists, overwrite?"]} + assert_received {:mix_shell, :info, ["* creating foo"]} + end + end + + test "create with conflict returning true" do + in_tmp "create_file", fn -> + File.write! "foo", "HELLO" + send self(), {:mix_shell_input, :yes?, true} create_file "foo", "WORLD" assert File.read!("foo") == "WORLD" @@ -41,10 +53,10 @@ defmodule Mix.GeneratorTest do end end - test :create_with_conflict_returning_false do + test "create with conflict returning false" do in_tmp "create_file", fn -> File.write! "foo", "HELLO" - send self, {:mix_shell_input, :yes?, false} + send self(), {:mix_shell_input, :yes?, false} create_file "foo", "WORLD" assert File.read!("foo") == "HELLO" diff --git a/lib/mix/test/mix/local/installer_test.exs b/lib/mix/test/mix/local/installer_test.exs new file mode 100644 index 00000000000..5d7cc2ce81b --- /dev/null +++ b/lib/mix/test/mix/local/installer_test.exs @@ -0,0 +1,71 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Local.InstallerTest do + use MixTest.Case + + test "fetch" do + dep_spec = {:"git repo", git: fixture_path("git_repo")} + + config = + Mix.Local.Installer.fetch dep_spec, fn _mixfile -> + assert Mix.env() == :prod + Mix.Project.config() + end + + assert Mix.env() == :dev + + assert config[:app] == :git_repo + assert config[:deps_path] =~ ~r/mix-local-installer-fetcher-.*\/deps/ + assert config[:lockfile] =~ ~r/mix-local-installer-fetcher-.*\/mix.lock/ + end + + test "parse_args Git" do + assert Mix.Local.Installer.parse_args(["git", "https://example.com/user/repo.git"], []) == + {:fetcher, {:"new package", [branch: "master", git: "https://example.com/user/repo.git", submodules: nil]}} + end + + test "parse_args Git branch" do + assert Mix.Local.Installer.parse_args(["git", "https://example.com/user/repo.git", "branch", "not_master"], []) == + {:fetcher, {:"new package", [branch: "not_master", git: "https://example.com/user/repo.git", submodules: nil]}} + end + + test "parse_args Git ref" do + assert Mix.Local.Installer.parse_args(["git", "https://example.com/user/repo.git", "ref", "not_master"], []) == + {:fetcher, {:"new package", [ref: "not_master", git: "https://example.com/user/repo.git", submodules: nil]}} + end + + test "parse_args Git tag" do + assert Mix.Local.Installer.parse_args(["git", "https://example.com/user/repo.git", "tag", "not_master"], []) == + {:fetcher, {:"new package", [tag: "not_master", git: "https://example.com/user/repo.git", submodules: nil]}} + end + + test "parse_args Git submodules" do + assert Mix.Local.Installer.parse_args(["git", "https://example.com/user/repo.git"], [submodules: true]) == + {:fetcher, {:"new package", [branch: "master", git: "https://example.com/user/repo.git", submodules: true]}} + end + + test "parse_args Git app" do + assert Mix.Local.Installer.parse_args(["git", "https://example.com/user/repo.git"], [app: "my_app"]) == + {:fetcher, {:my_app, [branch: "master", git: "https://example.com/user/repo.git", submodules: nil]}} + end + + test "parse_args GitHub" do + assert Mix.Local.Installer.parse_args(["github", "user/repo"], []) == + {:fetcher, {:"new package", [branch: "master", git: "https://github.com/user/repo.git", submodules: nil]}} + end + + test "parse_args Hex" do + assert Mix.Local.Installer.parse_args(["hex", "a_package"], []) == + {:fetcher, {:a_package, ">= 0.0.0", [hex: :a_package]}} + end + + test "parse_args Hex app" do + assert Mix.Local.Installer.parse_args(["hex", "a_package"], [app: "my_app"]) == + {:fetcher, {:my_app, ">= 0.0.0", [hex: :a_package]}} + end + + test "parse_args Hex version spec" do + assert Mix.Local.Installer.parse_args(["hex", "a_package", "1.0.0"], []) == + {:fetcher, {:a_package, "1.0.0", [hex: :a_package]}} + end +end diff --git a/lib/mix/test/mix/local_test.exs b/lib/mix/test/mix/local_test.exs new file mode 100644 index 00000000000..1418a85f43b --- /dev/null +++ b/lib/mix/test/mix/local_test.exs @@ -0,0 +1,101 @@ +Code.require_file "../test_helper.exs", __DIR__ + +defmodule Mix.LocalTest do + use MixTest.Case + + # openssl rsa -in elixirest.pem -pubout > elixirest.pub + @public_key """ + -----BEGIN PUBLIC KEY----- + MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA37moKP1dGGLhsP3d8Fwv + W25SoYZUY2K+Iq7A0OBV36Rnb8yW3BWjfh5YtmPvUCfYUbNCW2HTMMgBntkQ4YmN + B9tHVZazl2uX9lGCfZZPFc/9umvKRojCPkMN81MfTxqnY0oaLHr6DB86RsWHB+ld + 782Xf+nd9q3LFdUl8SGlKX7uzfVWd4EWYNcL7aLeLSupZWeNg8uVmY3zua0EgIlQ + XryalIOZb/R+pwprWZoftCl+20FGYi/mJpo/idFtXsR0sJKF4X0W3NORT9RIRbs9 + WdjiFi+eIP7Nm8KSF4pbaXCqSmVf9cgvUuGTxc9/P5GcIPAlkcsSrE5peLyUCk5f + 2QIDAQAB + -----END PUBLIC KEY----- + """ + + # openssl genrsa -aes256 -out elixirtest.pem -passout stdin 2048 + @private_key """ + -----BEGIN RSA PRIVATE KEY----- + Proc-Type: 4,ENCRYPTED + DEK-Info: AES-256-CBC,48BA5153DA2F120ECE063B33C1204A49 + + 5gp3daNWujH7o9S/dJQEt9TYTRP0pPZtU55PlZrzWt52optr7XHW/ENOm84g5J70 + QCPELp12jfQsNiPwbVWXKy2zD3QlNiAelf65hqLWJTWli7XIXfdP46VXOu67OKf9 + Ziw4HQ+AdBEwFt20wJst77iy17sNlyxp5DhNDonnSizzIowgUAJkoNI5aBUU6D8X + KTSIftZW35Z4SudkazdoHepEfItZTI8mB4rvfn71Q4oOBA1rAuUUmdPWoPBfUHDa + hvIp2T2Q8zZYqm0+SjDxZUYOOreE7fuf5NSLhHHt7+jyWQmtaVxnOWms72G+9xT0 + NGmOEB0WEg1kBsUbYOXXwyCAZhNA6MaKCtgjQczRTK+geS1xNaFc9FDEk3ZjN4Z8 + PxrKQoqo+2aQGVcatZWCom80Dci3bIv7iZNA/y1rjfBn+MeitMOGscP7/CBrJAbI + bh1mvCu0McSnqlN0a+EuCVfJQYFMzjibpRVzKAST0QeaxXd5QxHfcPFPBLOpiVWc + NjHaZsHORyoJbUKGA4rgOiSB63mv7SDRA2mvxWpwV/+6MuwBah6t6CGoEsAr1Hbn + 1ySt5w27bw3QEf2KTiuxDubo8UrF0eYzP5A9MH8vRpSRZHg8T3SBVfPJ/pM16Lnn + 5BaMUdxDFJeet5HUYoke9Zm3udh2BvwGiKhzc9Pbw/EcsCcvChMimRTasqTaRf+S + uIm0Un7o+7kTuvBo2y87j2urCEUzft5QqEynbkR7p3vZnwoLLj+supXh3V8ivW4s + Z6ql+ukRcWd/ode+lbSiYfAJCLc1tCqJ3kTnMnADJBlL0TX7YnwBwWuwwPuZgeAv + F6nnBE1SBQ1WK+bjSVzIqmNFqsZw34wgpnz2heX0q8msF5pzd6EIeA+uz86k8XYh + 4eVZYGXxa4Exodh/MqEpRuN1ytWDXvHULh0gml7xwZC3R50UD8uBNt5RGjXUkjXc + V0atKuvgzVlsB4xbDhVP7EVYHBF02NfNOsvo7kh0Yl1IcT/42UaCGYuU1o9zotPv + 9b3SHz/HOmBVj2uCdR5XZ4EolP5Iv9vqIDt9DsuDpOyO+AFOww0FnJNCQ1Hmfb0T + qBYPv994oSPYLCGR4a8i/xfmmV8KbAIVEgK3AMbz8RxKr3WBWXWnzQdr4+y4EG24 + hSnR52XQ42edv/fkqf9ez+fKNQ9i7PtlPE96Q21NeLMNKHh43X8hJFDh+oPz3Aio + YSNMCZnoyRdrjBRCsVBpnyoLmuhWwG9RlcrEj3G0BxYPh/weaBOAKAHjSr28yuUj + yIa8uddszC6XHSiVUgu7SGO8gQmq++eNdckjX/pEug5MjcWLUqaUg6+YLFWY6NLf + uDPOYuivq7ErtKTvP2xl3TBEDKhdfqxA2+RFxbBDmKjffZnRkcknQsxhlzAdbg22 + Jwa2B1nrfjJpX5F+1Av2jHQGbIKMqZzv8fo1binMKpptFzokbWEOjcPCb3tPuomG + ZRkW3qO2pdyYX2N7VXYG9tGi2HrN/oFrWnHPoYF23v85V8WxNkODOCpTz85e6R5v + PVu+FCNFj5weEOTRhtEQyJo7mU5qIRwYeZvVxiC6W+XeFs95wdBE/Lvpg8yZ8D9d + -----END RSA PRIVATE KEY----- + """ + + @csv """ + 1.2.5,ABC,0.9.0 + 1.2.3,DEF,1.0.0 + 1.2.4,GHI,1.0.0 + """ + + # openssl dgst -sha512 -sign elixirtest.pem hex-1.x.csv | openssl base64 > elixirtest.csv.signed + @csv_signed """ + VRydmXOdEXQcKJu/SK/nKnE00T+s/T4mpXrYROMSXhD/s8ClvdimnGg61ie3YBS6 + LXOjlEhbtMHRM2rTOUvv4z7FcyzwvSxSjunlVi2g3c1pVOZ78MonnYhGb44tZw/q + SOVmV+jJhc9EZFMIAAM3plMoyssyw2pMh7ZB/DxCQTIem3Qf0Ujzc2bYkLVlw7R+ + 1Rn6dcYEgCzyldVkAUMaYBwieyweWALA+YVDCMudJJK2J7p1OnuoPSVV+N3OkB/Z + T6Jj5ljD+54XnuxAMcgCoF9lpOwXscnw/Ma+8JqIoWo0jNFE3ji+8dGCUzQUdSe8 + llLXgJJE2tGpDhEXBA3idg== + """ + + # We don't actually use it but it exists for documentation purposes. + _ = @private_key + + setup_all do + File.mkdir_p!(Mix.PublicKey.public_keys_path) + + Mix.PublicKey.public_keys_path + |> Path.join("test_key.pub") + |> File.write!(@public_key) + end + + test "select correct versions from csv" do + in_tmp "select correct versions from csv", fn -> + File.write!("csv", @csv) + File.write!("csv.signed", @csv_signed) + + assert {"1.0.0", "1.2.4", "GHI"} = + Mix.Local.find_matching_versions_from_signed_csv!("name", "csv") + end + end + + test "raise on bad signature" do + in_tmp "raise on bad signature", fn -> + csv_signed = String.replace(@csv_signed, "VRy", "BAD") + File.write!("csv", @csv) + File.write!("csv.signed", csv_signed) + + assert_raise Mix.Error, fn -> + Mix.Local.find_matching_versions_from_signed_csv!("name", "csv") + end + end + end +end diff --git a/lib/mix/test/mix/project_test.exs b/lib/mix/test/mix/project_test.exs index 7aaa7447158..5c5b11c0c3d 100644 --- a/lib/mix/test/mix/project_test.exs +++ b/lib/mix/test/mix/project_test.exs @@ -15,7 +15,7 @@ defmodule Mix.ProjectTest do assert Mix.Project.get == SampleProject assert %{name: SampleProject, config: _, file: "sample"} = Mix.Project.pop - assert nil = Mix.Project.pop + assert Mix.Project.pop == nil end test "does not allow the same project to be pushed twice" do @@ -31,7 +31,7 @@ defmodule Mix.ProjectTest do Mix.Project.push nil assert is_map Mix.Project.pop assert is_map Mix.Project.pop - assert nil? Mix.Project.pop + assert is_nil Mix.Project.pop end test "retrieves configuration from projects" do @@ -41,7 +41,7 @@ defmodule Mix.ProjectTest do test "removes private configuration" do Mix.Project.push(SampleProject) - assert nil? Mix.Project.config[:app_path] + assert is_nil Mix.Project.config[:app_path] end test "retrieves configuration even when a project is not set" do @@ -63,7 +63,16 @@ defmodule Mix.ProjectTest do end end - test "builds the project structure with ebin symlink" do + test "builds the project structure without symlinks" do + in_fixture "archive", fn -> + config = [app_path: Path.expand("_build/archive"), build_embedded: true] + assert Mix.Project.build_structure(config) == :ok + assert File.dir?("_build/archive/ebin") + assert {:error, _} = :file.read_link("_build/archive/ebin") + end + end + + test "builds the project structure with symlinks" do in_fixture "archive", fn -> config = [app_path: Path.expand("_build/archive")] File.mkdir_p!("include") @@ -79,26 +88,64 @@ defmodule Mix.ProjectTest do end end - test "config_files" do + test "in_project pushes given configuration", context do + in_tmp context.test, fn -> + result = Mix.Project.in_project :foo, ".", [hello: :world], fn _ -> + assert Mix.Project.config[:app] == :foo + assert Mix.Project.config[:hello] == :world + :result + end + + assert result == :result + end + end + + test "in_project prints nice error message if fails to load file", context do + in_tmp context.test, fn -> + File.write "mix.exs", """ + raise "oops" + """ + + assert_raise RuntimeError, "oops", fn -> + Mix.Project.in_project :hello, ".", [], fn _ -> + :ok + end + end + + assert_receive {:mix_shell, :error, ["Error while loading project :hello at" <> _]} + end + end + + test "config_files", context do Mix.Project.push(SampleProject) - in_fixture "no_mixfile", fn -> - File.mkdir_p!("config") + in_tmp context.test, fn -> + File.mkdir_p!("config/sub") File.write! "config/config.exs", "[]" File.write! "config/dev.exs", "[]" File.write! "config/.exs", "[]" + File.write! "config/sub/init.exs", "[]" files = Mix.Project.config_files + assert __ENV__.file in files assert "config/config.exs" in files assert "config/dev.exs" in files refute "config/.exs" in files + assert "config/sub/init.exs" in files end end - + defp assert_proj_dir_linked_or_copied(source, target, symlink_path) do case :file.read_link(source) do - {:ok, path} -> assert path == symlink_path + {:ok, path} -> + case :os.type do + # relative symlink on Windows are broken, see symlink_or_copy/2 + {:win32, _} -> + assert path == [source, '..', symlink_path] |> Path.join |> Path.expand |> String.to_charlist + _ -> + assert path == symlink_path + end {:error, _} -> assert File.ls!(source) == File.ls!(target) end end diff --git a/lib/mix/test/mix/rebar_test.exs b/lib/mix/test/mix/rebar_test.exs index bc509e508e8..8aef9f962e0 100644 --- a/lib/mix/test/mix/rebar_test.exs +++ b/lib/mix/test/mix/rebar_test.exs @@ -3,124 +3,261 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Mix.RebarTest do use MixTest.Case - # Have our own path implementation that bypasses some - # Path validation checks. We use this just for testing. - defmodule MyPath do - @behaviour Mix.SCM - - for {name, arity} <- Mix.SCM.Path.__info__(:functions) do - args = tl Enum.map 0..arity, &{:"x#{&1}", [], nil} - def unquote(name)(unquote_splicing(args)) do - Mix.SCM.Path.unquote(name)(unquote_splicing(args)) - end + defmodule RebarAsDep do + def project do + [app: :rebar_as_dep, + version: "0.1.0", + deps: [ + {:rebar_dep, path: MixTest.Case.tmp_path("rebar_dep"), app: false} + ]] end end - setup do - available = Mix.SCM.available - Application.put_env(:mix, :scm, [Mix.SCM.Git, MyPath]) - on_exit fn -> Application.put_env(:mix, :scm, available) end - :ok + defmodule Rebar3AsDep do + def project do + [app: :rebar_as_dep, + version: "0.1.0", + deps: [ + {:rebar_dep, path: MixTest.Case.tmp_path("rebar_dep"), app: false, manager: :rebar3} + ]] + end end - defmodule RebarAsDep do + defmodule RebarOverrideAsDep do def project do - [ app: :rebar_as_dep, - version: "0.1.0", - deps: [ - {:rebar_dep, path: MixTest.Case.tmp_path("rebar_dep"), app: false} - ] - ] + [app: :rebar_as_dep, + version: "0.1.0", + deps: [ + {:rebar_override, path: MixTest.Case.tmp_path("rebar_override"), app: false, manager: :rebar3} + ]] end end - test "load rebar config" do - path = MixTest.Case.tmp_path("rebar_dep") - config = Mix.Rebar.load_config(path) - assert config[:sub_dirs] == ['apps/*'] - assert config[:SCRIPT] == 'rebar.config.script' + describe "load_config/1" do + test "loads rebar.config" do + path = MixTest.Case.fixture_path("rebar_dep") + config = Mix.Rebar.load_config(path) + assert config[:sub_dirs] == ['apps/*'] + assert config[:SCRIPT] == 'rebar.config.script' + end + + test "loads rebar.config.script on dependency directory" do + path = MixTest.Case.fixture_path("rebar_dep_script") + config = Mix.Rebar.load_config(path) + assert config[:dir] == {:ok, String.to_charlist(path)} + end end - test "execute rebar.config.script on dependecy directory" do - path = MixTest.Case.fixture_path("rebar_dep_script") - config = Mix.Rebar.load_config(path) - assert config[:dir] == {:ok, String.to_char_list(path)} + describe "deps/1" do + test "parses Rebar dependencies" do + config = [deps: [{:git_rebar, '~> 1.0'}]] + assert [{:git_rebar, "~> 1.0"}] == + Mix.Rebar.deps(config) + + config = [deps: [{:git_rebar, '~> 1.0', {:pkg, :rebar_fork}}]] + assert [{:git_rebar, "~> 1.0", hex: :rebar_fork}] == + Mix.Rebar.deps(config) + + config = [deps: [{:git_rebar, {:pkg, :rebar_fork}}]] + assert [{:git_rebar, ">= 0.0.0", hex: :rebar_fork}] == + Mix.Rebar.deps(config) + + config = [deps: [{:git_rebar, '0.1..*', {:git, '../../test/fixtures/git_rebar', :master}}]] + assert [{:git_rebar, ~r"0.1..*", [git: "../../test/fixtures/git_rebar", ref: "master"]}] == + Mix.Rebar.deps(config) + + config = [deps: [{:git_rebar, {:git, '../../test/fixtures/git_rebar', :master}}]] + assert [{:git_rebar, ">= 0.0.0", [git: "../../test/fixtures/git_rebar", ref: "master"]}] == + Mix.Rebar.deps(config) + + config = [deps: [{:git_rebar, '0.1..*', {:git, '../../test/fixtures/git_rebar'}, [:raw]}]] + assert [{:git_rebar, ~r"0.1..*", [git: "../../test/fixtures/git_rebar", compile: false]}] == + Mix.Rebar.deps(config) + + config = [deps: [{:git_rebar, '', {:git, '../../test/fixtures/git_rebar', {:ref, '64691eb'}}}]] + assert [{:git_rebar, ~r"", [git: "../../test/fixtures/git_rebar", ref: "64691eb"]}] == + Mix.Rebar.deps(config) + end end - test "parse rebar dependencies" do - config = [deps: [{:git_rebar, '.*',}]] - assert [{:git_rebar, ~r".*", [path: "deps/git_rebar"]}] == - Mix.Rebar.deps(config) + describe "apply_overrides/3" do + test "applies overrides" do + config = [deps: {:git_rebar, '~> 2.0'}] + overrides = [{:override, [deps: [{:git_rebar, '~> 1.0'}]]}] + assert Mix.Rebar.apply_overrides(:foo, config, overrides) == + [deps: [{:git_rebar, '~> 1.0'}], overrides: overrides] + + config = [deps: [{:git_rebar, '~> 2.0'}]] + overrides = [{:override, :bar, [deps: [{:git_rebar, '~> 1.0'}]]}] + assert Mix.Rebar.apply_overrides(:foo, config, overrides) == + [deps: [{:git_rebar, '~> 2.0'}], overrides: overrides] + + config = [deps: [{:git_rebar, '~> 2.0'}]] + overrides = [{:override, :foo, [deps: [{:git_rebar, '~> 1.0'}]]}] + assert Mix.Rebar.apply_overrides(:foo, config, overrides) == + [deps: [{:git_rebar, '~> 1.0'}], overrides: overrides] - config = [deps: [{:git_rebar, '.*',}], deps_dir: "other_dir"] - assert [{:git_rebar, ~r".*", [path: "other_dir/git_rebar"]}] == - Mix.Rebar.deps(config) + config = [deps: [{:git_rebar, '~> 1.0'}]] + overrides = [{:add, :foo, [deps: [{:git_rebar2, '~> 2.0'}]]}] + assert Mix.Rebar.apply_overrides(:foo, config, overrides) == + [deps: [{:git_rebar2, '~> 2.0'}, {:git_rebar, '~> 1.0'}], overrides: overrides] + end + + test "concatenates overrides" do + config = [deps: {:git_rebar, '~> 2.0'}, overrides: [{:add, :bar, []}]] + overrides = [{:override, [deps: [{:git_rebar, '~> 1.0'}]]}] + assert Mix.Rebar.apply_overrides(:foo, config, overrides) == + [deps: [{:git_rebar, '~> 1.0'}], overrides: overrides ++ [{:add, :bar, []}]] + end + end - config = [deps: [{:git_rebar, '0.1..*', {:git, '../../test/fixtures/git_rebar', :master}}]] - assert [{:git_rebar, ~r"0.1..*", [git: "../../test/fixtures/git_rebar", ref: "master"]}] == - Mix.Rebar.deps(config) + describe "dependency_config/1" do + test "converts Rebar config to dependency config" do + config = Mix.Rebar.load_config(fixture_path("rebar_dep")) + dep_config = Mix.Rebar.dependency_config(config) - config = [deps: [{:git_rebar, '0.1..*', {:git, '../../test/fixtures/git_rebar'}, [:raw]}]] - assert [{:git_rebar, ~r"0.1..*", [git: "../../test/fixtures/git_rebar", compile: false]}] == - Mix.Rebar.deps(config) + assert config[:erl_opts] == [:warnings_as_errors] + assert dep_config[:erl_opts] == [] + end end - test "parse rebar dependencies from rebar.config" do - Mix.Project.push(RebarAsDep) + describe "recur/1" do + test "recurs over sub dirs" do + path = MixTest.Case.fixture_path("rebar_dep") - deps = Mix.Dep.loaded([]) - assert Enum.find(deps, &match?(%Mix.Dep{app: :rebar_dep}, &1)) + File.cd! path, fn -> + config = Mix.Rebar.load_config(path) + + Mix.Rebar.recur(config, fn config -> + if config[:sub_dirs] == ['from_apps_another'] do + Process.put(:inside_apps_another, true) + end + end) + end - assert Enum.find(deps, fn %Mix.Dep{app: app, opts: opts} -> - if app == :git_rebar do - assert Enum.find(opts, &match?({:git, "../../test/fixtures/git_rebar"}, &1)) - assert Enum.find(opts, &match?({:ref, "master"}, &1)) - true + unless Process.get(:inside_apps_another) do + flunk "Expected inside_apps_another to return true" end - end) + end end - test "recurs over sub dirs" do - path = MixTest.Case.tmp_path("rebar_dep") + describe "integration with Mix" do + test "inherits Rebar manager" do + Mix.Project.push(Rebar3AsDep) + deps = Mix.Dep.loaded([]) + assert Enum.all?(deps, &(&1.manager == :rebar3)) + end + - File.cd! path, fn -> - config = Mix.Rebar.load_config(path) + test "parses Rebar dependencies from rebar.config" do + Mix.Project.push(RebarAsDep) - Mix.Rebar.recur(config, fn config -> - if config[:sub_dirs] == ['from_apps_another'] do - Process.put(:inside_apps_another, true) + deps = Mix.Dep.loaded([]) + assert Enum.find(deps, &(&1.app == :rebar_dep)) + + assert Enum.find(deps, fn %Mix.Dep{app: app, opts: opts} -> + if app == :git_rebar do + assert Enum.find(opts, &match?({:git, _}, &1)) + assert Enum.find(opts, &match?({:ref, "master"}, &1)) + true end end) end - unless Process.get(:inside_apps_another) do - flunk "Expected inside_apps_another to return true" + test "handles Rebar overrides" do + Mix.Project.push(RebarOverrideAsDep) + + in_tmp "Rebar overrides", fn -> + Mix.Tasks.Deps.Get.run [] + assert Mix.Dep.loaded([]) |> Enum.map(& &1.app) == + [:git_repo, :git_rebar, :rebar_override] + end + after + purge [GitRepo.Mixfile] end - end - test "get and compile dependencies for rebar" do - # Use rebar from project root - System.put_env("MIX_HOME", MixTest.Case.elixir_root) - Mix.Project.push(RebarAsDep) - - in_tmp "get and compile dependencies for rebar", fn -> - Mix.Tasks.Deps.Get.run ["--no-compile"] - assert_received {:mix_shell, :info, ["* Getting git_rebar (../../test/fixtures/git_rebar)"]} - - Mix.Tasks.Deps.Compile.run [] - assert_received {:mix_shell, :run, ["==> git_rebar (compile)\n"]} - assert_received {:mix_shell, :run, ["==> rebar_dep (compile)\n"]} - assert :git_rebar.any_function == :ok - assert :rebar_dep.any_function == :ok - - load_paths = Mix.Dep.loaded([]) - |> Enum.map(&Mix.Dep.load_paths(&1)) - |> Enum.concat - - assert File.exists?("_build/dev/lib/rebar_dep/ebin/rebar_dep.beam") - assert File.exists?("_build/dev/lib/git_rebar/ebin/git_rebar.beam") - assert Enum.any?(load_paths, &String.ends_with?(&1, "git_rebar/ebin")) - assert Enum.any?(load_paths, &String.ends_with?(&1, "rebar_dep/ebin")) + test "gets and compiles dependencies for Rebar" do + Mix.Project.push(RebarAsDep) + + in_tmp "get and compile dependencies for Rebar", fn -> + Mix.Tasks.Deps.Get.run [] + assert_received {:mix_shell, :info, ["* Getting git_rebar" <> _]} + + Mix.Tasks.Deps.Compile.run [] + assert_received {:mix_shell, :run, ["===> Compiling git_rebar\n"]} + assert_received {:mix_shell, :run, ["===> Compiling rebar_dep\n"]} + assert :git_rebar.any_function == :ok + assert :rebar_dep.any_function == :ok + + load_paths = Mix.Dep.loaded([]) + |> Enum.map(&Mix.Dep.load_paths(&1)) + |> Enum.concat + + assert File.exists?("_build/dev/lib/rebar_dep/ebin/rebar_dep.beam") + assert File.exists?("_build/dev/lib/git_rebar/ebin/git_rebar.beam") + + # Assert we have no .compile.lock as a .compile.lock + # means we check for the Elixir version on every command. + refute File.exists?("_build/dev/lib/rebar_dep/.compile.lock") + refute File.exists?("_build/dev/lib/git_rebar/.compile.lock") + + assert Enum.any?(load_paths, &String.ends_with?(&1, "git_rebar/ebin")) + assert Enum.any?(load_paths, &String.ends_with?(&1, "rebar_dep/ebin")) + end + end + + test "gets and compiles dependencies for rebar3" do + Mix.Project.push(Rebar3AsDep) + + in_tmp "get and compile dependencies for rebar3", fn -> + Mix.Tasks.Deps.Get.run [] + assert_received {:mix_shell, :info, ["* Getting git_rebar " <> _]} + + Mix.Tasks.Deps.Compile.run [] + assert_received {:mix_shell, :run, ["===> Compiling git_rebar\n"]} + assert_received {:mix_shell, :run, ["===> Compiling rebar_dep\n"]} + assert :git_rebar.any_function == :ok + assert :rebar_dep.any_function == :ok + + load_paths = Mix.Dep.loaded([]) + |> Enum.map(&Mix.Dep.load_paths(&1)) + |> Enum.concat + + assert File.exists?("_build/dev/lib/rebar_dep/ebin/rebar_dep.beam") + assert File.exists?("_build/dev/lib/git_rebar/ebin/git_rebar.beam") + + # Assert we have no .compile.lock as a .compile.lock + # means we check for the Elixir version on every command. + refute File.exists?("_build/dev/lib/rebar_dep/.compile.lock") + refute File.exists?("_build/dev/lib/git_rebar/.compile.lock") + + assert Enum.any?(load_paths, &String.ends_with?(&1, "git_rebar/ebin")) + assert Enum.any?(load_paths, &String.ends_with?(&1, "rebar_dep/ebin")) + end + end + + test "gets and compiles dependencies for Rebar with Mix" do + Mix.Project.push(RebarAsDep) + + in_tmp "get and compile dependencies for Rebar with Mix", fn -> + File.write! MixTest.Case.tmp_path("rebar_dep/mix.exs"), """ + defmodule RebarDep.Mixfile do + use Mix.Project + + def project do + [app: :rebar_dep, + version: "0.0.1"] + end + end + """ + + Mix.Tasks.Deps.Compile.run [] + assert_received {:mix_shell, :info, ["==> rebar_dep"]} + assert_received {:mix_shell, :info, ["Generated rebar_dep app"]} + assert File.regular?("_build/dev/lib/rebar_dep/ebin/rebar_dep.app") + end + after + File.rm MixTest.Case.tmp_path("rebar_dep/mix.exs") end end end diff --git a/lib/mix/test/mix/scm/git_test.exs b/lib/mix/test/mix/scm/git_test.exs index fc99a4a830c..80152df5f1e 100644 --- a/lib/mix/test/mix/scm/git_test.exs +++ b/lib/mix/test/mix/scm/git_test.exs @@ -10,7 +10,7 @@ defmodule Mix.SCM.GitTest do assert Mix.SCM.Git.format_lock(lock(ref: "abcdef0")) == "abcdef0 (ref)" end - test "considers to dep equals if the have the same git and the same opts" do + test "considers two dep equals if the have the same Git and the same opts" do assert Mix.SCM.Git.equal?([git: "foo"], [git: "foo"]) refute Mix.SCM.Git.equal?([git: "foo"], [git: "bar"]) @@ -22,6 +22,16 @@ defmodule Mix.SCM.GitTest do assert Mix.SCM.Git.equal?([git: "foo", lock: 1], [git: "foo", lock: 2]) end + test "raises about conflicting Git checkout options" do + assert_raise Mix.Error, ~r/You should specify only one of branch, ref or tag/, fn -> + Mix.SCM.Git.accepts_options(nil, [git: "/repo", branch: "master", tag: "0.1.0"]) + end + + assert_raise Mix.Error, ~r/You should specify only one of branch, ref or tag/, fn -> + Mix.SCM.Git.accepts_options(nil, [git: "/repo", branch: "master", branch: "develop"]) + end + end + defp lock(opts \\ []) do [lock: {:git, "/repo", "abcdef0123456789", opts}] end diff --git a/lib/mix/test/mix/scm_test.exs b/lib/mix/test/mix/scm_test.exs index 4b04a99ba5f..e6980d03172 100644 --- a/lib/mix/test/mix/scm_test.exs +++ b/lib/mix/test/mix/scm_test.exs @@ -5,16 +5,16 @@ defmodule Mix.SCMTest do setup do available = Mix.SCM.available - on_exit fn -> Application.put_env(:mix, :scm, available) end + on_exit fn -> Mix.State.put(:scm, available) end :ok end - test "prepends a SCM" do + test "prepends an SCM" do Mix.SCM.prepend(Hello) assert Enum.at(Mix.SCM.available, 0) == Hello end - test "appends a SCM" do + test "appends an SCM" do Mix.SCM.append(Hello) assert Enum.at(Mix.SCM.available, -1) == Hello end diff --git a/lib/mix/test/mix/shell/io_test.exs b/lib/mix/test/mix/shell/io_test.exs index 697f53d3da8..b960ed0dbf8 100644 --- a/lib/mix/test/mix/shell/io_test.exs +++ b/lib/mix/test/mix/shell/io_test.exs @@ -28,4 +28,21 @@ defmodule Mix.Shell.IOTest do assert capture_io("n", fn -> refute yes?("Ok?") end) assert capture_io("", fn -> refute yes?("Ok?") end) end + + test "runs a given command" do + nl = os_newline() + + assert capture_io("", fn -> assert cmd("echo hello") == 0 end) == "hello" <> nl + + will_print_sample() + assert capture_io("", fn -> assert cmd("echo hello", print_app: false) == 0 end) == + "hello" <> nl + assert capture_io("", fn -> assert cmd("echo hello") == 0 end) == + "==> sample\nhello" <> nl + end + + defp will_print_sample do + Mix.Project.push nil + Mix.Project.push MixTest.Case.Sample + end end diff --git a/lib/mix/test/mix/shell/quiet_test.exs b/lib/mix/test/mix/shell/quiet_test.exs new file mode 100644 index 00000000000..626429be368 --- /dev/null +++ b/lib/mix/test/mix/shell/quiet_test.exs @@ -0,0 +1,44 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Shell.QuietTest do + use MixTest.Case + + import ExUnit.CaptureIO + import Mix.Shell.Quiet + + test "prints nothing to stdio when info is invoked" do + assert capture_io(fn -> + info "hello" + end) == "" + end + + test "prints error message to stderr" do + assert capture_io(:stderr, fn -> + error "hello" + end) =~ "hello" + end + + test "asks the user with yes?" do + assert capture_io("\n", fn -> yes?("Ok?") end) == "Ok? [Yn] " + assert capture_io("\n", fn -> assert yes?("Ok?") end) + assert capture_io("Yes", fn -> assert yes?("Ok?") end) + assert capture_io("yes", fn -> assert yes?("Ok?") end) + assert capture_io("y", fn -> assert yes?("Ok?") end) + + assert capture_io("n", fn -> refute yes?("Ok?") end) + assert capture_io("", fn -> refute yes?("Ok?") end) + end + + test "runs a given command" do + assert capture_io("", fn -> assert cmd("echo hello") == 0 end) == "" + + wont_print_sample() + assert capture_io("", fn -> assert cmd("echo hello", print_app: false) == 0 end) == "" + assert capture_io("", fn -> assert cmd("echo hello") == 0 end) == "" + end + + defp wont_print_sample do + Mix.Project.push nil + Mix.Project.push MixTest.Case.Sample + end +end diff --git a/lib/mix/test/mix/shell_test.exs b/lib/mix/test/mix/shell_test.exs index df9a0335c08..00ee72cfc00 100644 --- a/lib/mix/test/mix/shell_test.exs +++ b/lib/mix/test/mix/shell_test.exs @@ -4,11 +4,11 @@ defmodule Mix.ShellTest do use MixTest.Case defp capture_io(somefunc) do - ExUnit.CaptureIO.capture_io(somefunc) |> String.replace("\r\n","\n") + ExUnit.CaptureIO.capture_io(somefunc) |> String.replace("\r\n", "\n") end defp capture_io(from, somefunc) do - ExUnit.CaptureIO.capture_io(from, somefunc) |> String.replace("\r\n","\n") + ExUnit.CaptureIO.capture_io(from, somefunc) |> String.replace("\r\n", "\n") end setup do @@ -24,28 +24,33 @@ defmodule Mix.ShellTest do assert_received {:mix_shell, :info, ["abc"]} assert_received {:mix_shell, :error, ["def"]} - send self, {:mix_shell_input, :prompt, "world"} + send self(), {:mix_shell_input, :prompt, "world"} assert Mix.shell.prompt("hello?") == "world" assert_received {:mix_shell, :prompt, ["hello?"]} - send self, {:mix_shell_input, :yes?, true} + send self(), {:mix_shell_input, :yes?, true} assert Mix.shell.yes?("hello?") assert_received {:mix_shell, :yes?, ["hello?"]} assert Mix.shell.cmd("echo first") == 0 - - nl = os_newline + + nl = os_newline() assert_received {:mix_shell, :run, ["first" <> ^nl]} end - test "shell io" do + test "shell IO" do Mix.shell Mix.Shell.IO assert capture_io(fn -> Mix.shell.info "abc" end) == "abc\n" - assert capture_io(:stderr, fn -> Mix.shell.error "def" end) == - (IO.ANSI.escape "%{red,bright}def") <> "\n" + if IO.ANSI.enabled? do + assert capture_io(:stderr, fn -> Mix.shell.error "def" end) == + "#{IO.ANSI.red}#{IO.ANSI.bright}def#{IO.ANSI.reset}\n" + else + assert capture_io(:stderr, fn -> Mix.shell.error "def" end) == + "def\n" + end assert capture_io("world", fn -> assert Mix.shell.prompt("hello?") == "world" end) == "hello? " @@ -64,4 +69,12 @@ defmodule Mix.ShellTest do assert Mix.shell.cmd("echo first && echo second") == 0 end) |> String.replace(" \n", "\n")) == "first\nsecond\n" end + + test "shell cmd ignores output if desired" do + Mix.shell Mix.Shell.IO + + assert capture_io(fn -> + assert Mix.shell.cmd("echo first && echo second", quiet: true) == 0 + end) == "" + end end diff --git a/lib/mix/test/mix/task_test.exs b/lib/mix/test/mix/task_test.exs index 48f08fe8587..d7a527ec4de 100644 --- a/lib/mix/test/mix/task_test.exs +++ b/lib/mix/test/mix/task_test.exs @@ -1,87 +1,204 @@ Code.require_file "../test_helper.exs", __DIR__ -path = MixTest.Case.tmp_path("beams") -File.rm_rf!(path) -File.mkdir_p!(path) - -write_beam = fn {:module, name, bin, _} -> - path - |> Path.join(Atom.to_string(name) <> ".beam") - |> File.write!(bin) -end - -defmodule Mix.Tasks.Hello do - use Mix.Task - @shortdoc "This is short documentation, see" - - @moduledoc """ - A test task. - """ - - def run(_) do - "Hello, World!" - end -end |> write_beam.() - -defmodule Mix.Tasks.Invalid do -end |> write_beam.() - defmodule Mix.TaskTest do use MixTest.Case - setup do - Code.prepend_path unquote(path) - :ok + defmodule SampleProject do + def project do + [app: :sample, version: "0.0.1"] + end end - test :run do + test "run/2" do assert Mix.Task.run("hello") == "Hello, World!" assert Mix.Task.run("hello") == :noop - assert_raise Mix.NoTaskError, "The task unknown could not be found", fn -> + assert_raise Mix.NoTaskError, "The task \"unknown\" could not be found", fn -> Mix.Task.run("unknown") end - assert_raise Mix.InvalidTaskError, "The task invalid does not export run/1", fn -> + assert_raise Mix.NoTaskError, "The task \"helli\" could not be found. Did you mean \"hello\"?", fn -> + Mix.Task.run("helli") + end + + assert_raise Mix.InvalidTaskError, "The task \"invalid\" does not export run/1", fn -> Mix.Task.run("invalid") end + + misnamed_message = + "The task \"acronym.http\" could not be found because the module is named " <> + "Mix.Tasks.Acronym.HTTP instead of Mix.Tasks.Acronym.Http as expected. " <> + "Please rename it and try again" + assert_raise Mix.NoTaskError, misnamed_message, fn -> + Mix.Task.run("acronym.http") + end + end + + test "run/2 converts OptionParser.ParseError into Mix errors" do + assert_raise Mix.Error, + "Could not invoke task \"hello\": 1 error found!\n--unknown : Unknown option", fn -> + Mix.Task.run("hello", ["--parser", "--unknown"]) + end + + Mix.Task.clear + + assert_raise Mix.Error, + "Could not invoke task \"hello\": 1 error found!\n--int : Expected type integer, got \"foo\"", fn -> + Mix.Task.run("hello", ["--parser", "--int", "foo"]) + end + end + + test "run/2 outputs task debug info if Mix.debug? is true" do + Mix.shell Mix.Shell.IO + Mix.debug(true) + + assert ExUnit.CaptureIO.capture_io(fn -> Mix.Task.run("hello") end) =~ + "** Running mix hello" + after + Mix.shell(Mix.Shell.Process) + Mix.debug(false) + end + + test "run/2 tries to load deps if task is missing", context do + in_tmp context.test, fn -> + Mix.Project.push(SampleProject, "sample") + + {:module, _, bin, _} = + defmodule Elixir.Mix.Tasks.TaskHello do + use Mix.Task + def run(_), do: "Hello, World" + end + :code.purge(Mix.Tasks.TaskHello) + :code.delete(Mix.Tasks.TaskHello) + + assert_raise Mix.NoTaskError, fn -> + Mix.Task.run("task_hello") + end + + # Clean up the tasks and copy it into deps + Mix.TasksServer.clear + File.mkdir_p!("_build/dev/lib/sample/ebin") + File.write!("_build/dev/lib/sample/ebin/Elixir.Mix.Tasks.TaskHello.beam", bin) + + # Task was found from deps loadpaths + assert Mix.Task.run("task_hello") == "Hello, World" + + # The compile task should not have run yet + assert Mix.TasksServer.run({:task, "compile", Mix.Project.get}) + end end - test :clear do - Mix.Task.run("hello") - assert {"hello", nil} in Mix.Task.clear + test "run/2 tries to compile if task is missing", context do + in_tmp context.test, fn -> + Mix.Project.push(SampleProject, "sample") + + assert_raise Mix.NoTaskError, fn -> + Mix.Task.run("unknown") + end + + # Check if compile task have run + refute Mix.TasksServer.run({:task, "compile", Mix.Project.get}) + end + end + + test "clear/0" do + assert Mix.Task.run("hello") == "Hello, World!" + Mix.Task.clear + assert Mix.Task.run("hello") == "Hello, World!" end - test :reenable do + test "reenable/1" do assert Mix.Task.run("hello") == "Hello, World!" Mix.Task.reenable("hello") assert Mix.Task.run("hello") == "Hello, World!" end - test :get! do + test "reenable/1 for recursive inside umbrella" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + assert [:ok, :ok] = Mix.Task.run "clean" + assert :noop = Mix.Task.run "clean" + + Mix.Task.reenable "clean" + assert [:ok, :ok] = Mix.Task.run "clean" + assert :noop = Mix.Task.run "clean" + end) + end + end + + test "reenable/1 for non-recursive inside umbrella" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + assert [:ok, :ok] = Mix.Task.run "clean" + assert :ok = Mix.Task.run "loadpaths" # loadpaths is not recursive + end) + end + end + + test "rerun/1" do + assert Mix.Task.run("hello") == "Hello, World!" + assert Mix.Task.rerun("hello") == "Hello, World!" + end + + test "rerun/1 for umbrella" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + assert [:ok, :ok] = Mix.Task.run "clean" + assert :noop = Mix.Task.run "clean" + assert [:ok, :ok] = Mix.Task.rerun "clean" + end) + end + end + + test "get!" do assert Mix.Task.get!("hello") == Mix.Tasks.Hello - assert_raise Mix.NoTaskError, "The task unknown could not be found", fn -> + assert_raise Mix.NoTaskError, "The task \"unknown\" could not be found", fn -> Mix.Task.get!("unknown") end - assert_raise Mix.InvalidTaskError, "The task invalid does not export run/1", fn -> + assert_raise Mix.InvalidTaskError, "The task \"invalid\" does not export run/1", fn -> Mix.Task.get!("invalid") end end - test :all_modules do + test "alias?/1" do + refute Mix.Task.alias?(:sample) + refute Mix.Task.alias?("sample") + + Mix.Project.push MixTest.Case.Sample + assert Mix.Task.alias?(:sample) + assert Mix.Task.alias?("sample") + refute Mix.Task.alias?("another") + after + Mix.Project.pop + end + + test "all_modules/0" do Mix.Task.load_all modules = Mix.Task.all_modules assert Mix.Tasks.Hello in modules assert Mix.Tasks.Compile in modules end - test :moduledoc do + test "moduledoc/1" do + Code.prepend_path MixTest.Case.tmp_path("beams") assert Mix.Task.moduledoc(Mix.Tasks.Hello) == "A test task.\n" end - test :shortdoc do + test "preferred_cli_env/1 returns nil for missing task" do + assert Mix.Task.preferred_cli_env(:no_task) == nil + end + + test "preferred_cli_env/1 returns nil when task does not have `preferred_cli_env` attribute" do + assert Mix.Task.preferred_cli_env(:deps) == nil + end + + test "preferred_cli_env/1 returns specified `preferred_cli_env` attribute" do + assert Mix.Task.preferred_cli_env(:test) == :test + end + + test "shortdoc/1" do assert Mix.Task.shortdoc(Mix.Tasks.Hello) == "This is short documentation, see" end end diff --git a/lib/mix/test/mix/tasks/app.start_test.exs b/lib/mix/test/mix/tasks/app.start_test.exs index dc08870afbf..266800f634a 100644 --- a/lib/mix/test/mix/tasks/app.start_test.exs +++ b/lib/mix/test/mix/tasks/app.start_test.exs @@ -5,56 +5,21 @@ defmodule Mix.Tasks.App.StartTest do defmodule AppStartSample do def project do - [app: :app_start_sample, version: "0.1.0"] + [app: :app_start_sample, version: "0.1.0", start_permanent: true] end - end - defmodule WrongElixirProject do - def project do - [app: :error, version: "0.1.0", elixir: "~> 0.8.1"] + def application do + [applications: [:logger]] end end - defmodule InvalidElixirRequirement do + defmodule WrongElixirProject do def project do - [app: :error, version: "0.1.0", elixir: "~> ~> 0.8.1"] - end - end - - setup config do - if app = config[:app] do - :error_logger.tty(false) - - on_exit fn -> - :application.stop(app) - :application.unload(app) - :error_logger.tty(true) - end - end - - :ok - end - - test "recompiles project if elixir version changed" do - Mix.Project.push MixTest.Case.Sample - - in_fixture "no_mixfile", fn -> - Mix.Tasks.Compile.run [] - purge [A, B, C] - - assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} - assert System.version == Mix.Dep.Lock.elixir_vsn - - Mix.Task.clear - File.write!("_build/dev/lib/sample/.compile.lock", "the_past") - File.touch!("_build/dev/lib/sample/.compile.lock", {{2010, 1, 1}, {0, 0, 0}}) - - Mix.Tasks.App.Start.run ["--no-start"] - assert System.version == Mix.Dep.Lock.elixir_vsn - assert File.stat!("_build/dev/lib/sample/.compile.lock").mtime > {{2010, 1, 1}, {0, 0, 0}} + [app: :error, version: "0.1.0", elixir: "~> 0.8.1"] end end + @tag apps: [:app_start_sample] test "compiles and starts the project" do Mix.Project.push AppStartSample @@ -63,46 +28,80 @@ defmodule Mix.Tasks.App.StartTest do Mix.Tasks.App.Start.run ["--no-compile"] end + refute List.keyfind(Application.started_applications, :logger, 0) + Application.start(:logger) + Mix.Tasks.App.Start.run ["--no-start"] assert File.regular?("_build/dev/lib/app_start_sample/ebin/Elixir.A.beam") assert File.regular?("_build/dev/lib/app_start_sample/ebin/app_start_sample.app") - refute List.keyfind(:application.loaded_applications, :app_start_sample, 0) + + refute List.keyfind(Application.started_applications, :app_start_sample, 0) + assert List.keyfind(Application.started_applications, :logger, 0) Mix.Tasks.App.Start.run [] - assert List.keyfind(:application.loaded_applications, :app_start_sample, 0) + assert List.keyfind(Application.started_applications, :app_start_sample, 0) + assert List.keyfind(Application.started_applications, :logger, 0) end end - test "validates the Elixir version requirement" do + + @tag apps: [:app_start_sample, :app_loaded_sample] + test "start checks for invalid configuration", context do + Mix.Project.push AppStartSample + + in_tmp context.test, fn -> + :ok = :application.load({:application, :app_loaded_sample, [vsn: '1.0.0', env: []]}) + Mix.ProjectStack.configured_applications([:app_start_sample, :app_unknown_sample, :app_loaded_sample]) + + Mix.Tasks.Compile.run([]) + Mix.Tasks.App.Start.run([]) + + assert_received {:mix_shell, :error, ["You have configured application :app_unknown_sample" <> _]} + refute_received {:mix_shell, :error, ["You have configured application :app_loaded_sample" <> _]} + end + end + + @tag apps: [:error] + test "validates Elixir version requirement", context do + Mix.ProjectStack.post_config elixir: "~> ~> 0.8.1" Mix.Project.push WrongElixirProject - in_fixture "no_mixfile", fn -> - assert_raise Mix.ElixirVersionError, ~r/You're trying to run :error on Elixir/, fn -> + in_tmp context.test, fn -> + assert_raise Mix.Error, ~r"Invalid Elixir version requirement", fn -> Mix.Tasks.App.Start.run ["--no-start"] end end end - test "validates invalid Elixir version requirement" do - Mix.Project.push InvalidElixirRequirement + @tag apps: [:error] + test "validates the Elixir version with requirement", context do + Mix.Project.push WrongElixirProject - in_fixture "no_mixfile", fn -> - assert_raise Mix.Error, ~r"Invalid Elixir version requirement", fn -> + in_tmp context.test, fn -> + assert_raise Mix.ElixirVersionError, ~r/You're trying to run :error on Elixir/, fn -> Mix.Tasks.App.Start.run ["--no-start"] end end end - test "does not validate the Elixir version requirement when disabled" do + @tag apps: [:error] + test "does not validate the Elixir version with requirement when disabled", context do Mix.Project.push WrongElixirProject - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> Mix.Tasks.App.Start.run ["--no-start", "--no-elixir-version-check"] end end test "start does nothing if app is nil" do - assert Mix.Tasks.App.Start.start(nil) == :error + assert Mix.Tasks.App.Start.start([app: nil], []) == :ok + end + + test "allows type to be configured" do + assert Mix.Tasks.App.Start.type([], [permanent: true]) == :permanent + assert Mix.Tasks.App.Start.type([], [temporary: true]) == :temporary + assert Mix.Tasks.App.Start.type([start_permanent: true], []) == :permanent + assert Mix.Tasks.App.Start.type([], []) == :temporary end defmodule ReturnSample do @@ -121,46 +120,56 @@ defmodule Mix.Tasks.App.StartTest do def start(_type, return), do: return end - @tag app: :return_sample - test "start points to report on error" do + @tag apps: [:return_sample] + test "start points to report on error", context do Mix.Project.push ReturnSample - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> Process.put(:application_definition, mod: {ReturnApp, {:error, :bye}}) Mix.Tasks.Compile.run [] - assert_raise Mix.Error, "Could not start application return_sample: Mix.Tasks.App.StartTest.ReturnApp.start(:normal, {:error, :bye}) returned an error: :bye", - fn -> - Mix.Tasks.App.Start.start(:return_sample) + message = "Could not start application return_sample: " <> + "Mix.Tasks.App.StartTest.ReturnApp.start(:normal, {:error, :bye}) " <> + "returned an error: :bye" + + assert_raise Mix.Error, message, fn -> + Mix.Tasks.App.Start.start([app: :return_sample], []) end end end - @tag app: :return_sample - test "start points to report on exception error" do + @tag apps: [:return_sample] + test "start points to report on exception error", context do Mix.Project.push ReturnSample - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> Process.put(:application_definition, mod: {ReturnApp, {:error, {:badarg, [{ReturnApp, :start, 2, []}] }}}) Mix.Tasks.Compile.run [] - assert_raise Mix.Error, "Could not start application return_sample: Mix.Tasks.App.StartTest.ReturnApp.start(:normal, {:error, {:badarg, [{Mix.Tasks.App.StartTest.ReturnApp, :start, 2, []}]}}) returned an error: an exception was raised:\n" <> - " ** (ArgumentError) argument error\n" <> - " Mix.Tasks.App.StartTest.ReturnApp.start/2", fn -> - Mix.Tasks.App.Start.start(:return_sample) + message = "Could not start application return_sample: " <> + "Mix.Tasks.App.StartTest.ReturnApp.start(:normal, {:error, {:badarg, [{Mix.Tasks.App.StartTest.ReturnApp, :start, 2, []}]}}) " <> + "returned an error: an exception was raised:\n" <> + " ** (ArgumentError) argument error\n" <> + " Mix.Tasks.App.StartTest.ReturnApp.start/2" + + assert_raise Mix.Error, message, fn -> + Mix.Tasks.App.Start.start([app: :return_sample], []) end end end - @tag app: :return_sample - test "start points to report on bad return" do + @tag apps: [:return_sample] + test "start points to report on bad return", context do Mix.Project.push ReturnSample - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> Process.put(:application_definition, mod: {ReturnApp, :bad}) Mix.Tasks.Compile.run [] - assert_raise Mix.Error, "Could not start application return_sample: Mix.Tasks.App.StartTest.ReturnApp.start(:normal, :bad) returned a bad value: :bad", - fn -> - Mix.Tasks.App.Start.start(:return_sample) + message = "Could not start application return_sample: " <> + "Mix.Tasks.App.StartTest.ReturnApp.start(:normal, :bad) " <> + "returned a bad value: :bad" + + assert_raise Mix.Error, message, fn -> + Mix.Tasks.App.Start.start([app: :return_sample], []) end end end @@ -181,32 +190,36 @@ defmodule Mix.Tasks.App.StartTest do def start(_type, reason), do: exit(reason) end - @tag app: :exit_sample - test "start points to report on exit" do + @tag apps: [:exit_sample] + test "start points to report on exit", context do Mix.Project.push ExitSample - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> Process.put(:application_definition, mod: {ExitApp, :bye}) Mix.Tasks.Compile.run [] - assert_raise Mix.Error, "Could not start application exit_sample: exited in: Mix.Tasks.App.StartTest.ExitApp.start(:normal, :bye)\n" <> - " ** (EXIT) :bye", - fn -> - Mix.Tasks.App.Start.start(:exit_sample) + message = "Could not start application exit_sample: exited in: " <> + "Mix.Tasks.App.StartTest.ExitApp.start(:normal, :bye)\n" <> + " ** (EXIT) :bye" + + assert_raise Mix.Error, message, fn -> + Mix.Tasks.App.Start.start([app: :exit_sample], []) end end end - @tag app: :exit_sample - test "start points to report on normal exit" do + @tag apps: [:exit_sample] + test "start points to report on normal exit", context do Mix.Project.push ExitSample - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> Process.put(:application_definition, mod: {ExitApp, :normal}) Mix.Tasks.Compile.run [] - assert_raise Mix.Error, "Could not start application exit_sample: exited in: Mix.Tasks.App.StartTest.ExitApp.start(:normal, :normal)\n" <> - " ** (EXIT) normal", - fn -> - Mix.Tasks.App.Start.start(:exit_sample) + message = "Could not start application exit_sample: exited in: " <> + "Mix.Tasks.App.StartTest.ExitApp.start(:normal, :normal)\n" <> + " ** (EXIT) normal" + + assert_raise Mix.Error, message, fn -> + Mix.Tasks.App.Start.start([app: :exit_sample], []) end end end diff --git a/lib/mix/test/mix/tasks/app.tree_test.exs b/lib/mix/test/mix/tasks/app.tree_test.exs new file mode 100644 index 00000000000..7fea640860d --- /dev/null +++ b/lib/mix/test/mix/tasks/app.tree_test.exs @@ -0,0 +1,108 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.App.TreeTest do + use MixTest.Case + + defmodule AppDepsSample do + def project do + [app: :test, version: "0.1.0", start_permanent: true] + end + + def application do + [applications: [:logger, :app_deps_sample]] + end + end + + @tag apps: [:test, :app_deps_sample, :app_deps2_sample, :app_deps3_sample, :app_deps4_sample] + test "shows the application tree", context do + Mix.Project.push AppDepsSample + + in_tmp context.test, fn -> + load_apps() + Mix.Tasks.App.Tree.run(["--format", "pretty"]) + + assert_received {:mix_shell, :info, ["test"]} + assert_received {:mix_shell, :info, ["└── app_deps_sample"]} + assert_received {:mix_shell, :info, [" ├── app_deps2_sample"]} + assert_received {:mix_shell, :info, [" │ └── app_deps4_sample (included)"]} + assert_received {:mix_shell, :info, [" └── app_deps3_sample"]} + end + end + + @tag apps: [:foo, :bar] + test "show the application tree for umbrella apps" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + Mix.Task.run "app.tree", ["--format", "pretty"] + assert_received {:mix_shell, :info, ["├── elixir"]} + assert_received {:mix_shell, :info, ["foo"]} + assert_received {:mix_shell, :info, [" └── elixir"]} + end) + end + end + + @tag apps: [:test, :app_deps_sample, :app_deps2_sample, :app_deps3_sample, :app_deps4_sample] + test "shows the given application tree", context do + Mix.Project.push AppDepsSample + + in_tmp context.test, fn -> + assert_raise Mix.Error, "could not find application app_deps_sample", fn -> + Mix.Tasks.App.Tree.run(["--format", "pretty", "app_deps_sample"]) + end + + load_apps() + Mix.Tasks.App.Tree.run(["--format", "pretty", "app_deps_sample"]) + + assert_received {:mix_shell, :info, ["app_deps_sample"]} + assert_received {:mix_shell, :info, ["├── app_deps2_sample"]} + assert_received {:mix_shell, :info, ["│ └── app_deps4_sample (included)"]} + assert_received {:mix_shell, :info, ["└── app_deps3_sample"]} + end + end + + @tag apps: [:test, :app_deps_sample, :app_deps2_sample, :app_deps3_sample, :app_deps4_sample] + test "shows the application dependency tree excluding applications", context do + Mix.Project.push AppDepsSample + + in_tmp context.test, fn -> + load_apps() + Mix.Tasks.App.Tree.run(["--format", "pretty", "--exclude", "app_deps4_sample", "--exclude", "app_deps3_sample"]) + + assert_received {:mix_shell, :info, ["test"]} + assert_received {:mix_shell, :info, ["└── app_deps_sample"]} + assert_received {:mix_shell, :info, [" └── app_deps2_sample"]} + refute_received {:mix_shell, :info, [" │ └── app_deps4_sample (included)"]} + refute_received {:mix_shell, :info, [" └── app_deps3_sample"]} + end + end + + @tag apps: [:test, :app_deps_sample, :app_deps2_sample, :app_deps3_sample, :app_deps4_sample] + test "shows the application tree in dot form", context do + Mix.Project.push AppDepsSample + + in_tmp context.test, fn -> + load_apps() + Mix.Tasks.App.Tree.run(["--format", "dot"]) + + assert File.read!("app_tree.dot") == """ + digraph "application tree" { + "test" + "test" -> "elixir" + "test" -> "logger" + "logger" -> "elixir" + "test" -> "app_deps_sample" + "app_deps_sample" -> "app_deps2_sample" + "app_deps2_sample" -> "app_deps4_sample" [label="(included)"] + "app_deps_sample" -> "app_deps3_sample" + } + """ + end + end + + def load_apps() do + :ok = :application.load({:application, :app_deps4_sample, [vsn: '1.0.0', env: []]}) + :ok = :application.load({:application, :app_deps3_sample, [vsn: '1.0.0', env: []]}) + :ok = :application.load({:application, :app_deps2_sample, [vsn: '1.0.0', env: [], included_applications: [:app_deps4_sample]]}) + :ok = :application.load({:application, :app_deps_sample, [vsn: '1.0.0', env: [], applications: [:app_deps2_sample, :app_deps3_sample]]}) + end +end diff --git a/lib/mix/test/mix/tasks/archive_test.exs b/lib/mix/test/mix/tasks/archive_test.exs new file mode 100644 index 00000000000..40838d92d82 --- /dev/null +++ b/lib/mix/test/mix/tasks/archive_test.exs @@ -0,0 +1,221 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.ArchiveTest do + use MixTest.Case + + defmodule ArchiveProject do + def project do + [app: :archive, version: "0.1.0", elixir: "~> 0.1.0"] + end + end + + defmodule ArchiveProject2 do + def project do + [app: :archive, version: "0.2.0"] + end + end + + setup do + File.rm_rf! tmp_path("userhome") + System.put_env "MIX_ARCHIVES", tmp_path("userhome/.mix/archives/") + Mix.Project.push(ArchiveProject) + :ok + end + + test "archive build" do + in_fixture "archive", fn -> + Mix.Tasks.Archive.Build.run ["--no-elixir-version-check"] + assert File.regular? 'archive-0.1.0.ez' + assert has_zip_file?('archive-0.1.0.ez', 'archive-0.1.0/.elixir') + assert has_zip_file?('archive-0.1.0.ez', 'archive-0.1.0/priv/not_really_an.so') + assert has_zip_file?('archive-0.1.0.ez', 'archive-0.1.0/ebin/Elixir.Mix.Tasks.Local.Sample.beam') + assert has_zip_file?('archive-0.1.0.ez', 'archive-0.1.0/ebin/archive.app') + end + end + + test "archive install" do + in_fixture "archive", fn -> + # Build and install archive + Mix.Tasks.Archive.Build.run ["--no-elixir-version-check"] + assert_received {:mix_shell, :info, ["Generated archive \"archive-0.1.0.ez\" with MIX_ENV=dev"]} + assert File.regular? 'archive-0.1.0.ez' + + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Install.run [] + refute File.regular? tmp_path("userhome/.mix/archives/archive-0.1.0.ez") + assert File.dir? tmp_path("userhome/.mix/archives/archive-0.1.0/archive-0.1.0/ebin") + + # Check that the version warning is printed after installation + version_error = "warning: the archive archive-0.1.0 requires Elixir \"~> 0.1.0\" but you are running on v#{System.version}" + assert_received {:mix_shell, :error, [^version_error]} + + archive = tmp_path("userhome/.mix/archives/archive-0.1.0/archive-0.1.0/ebin") + assert to_charlist(archive) in :code.get_path + + # Try to override it with URL + send self(), {:mix_shell_input, :yes?, false} + Mix.Tasks.Archive.Install.run ["https://example.com/archive-0.1.0?hello.ez"] + assert_received {:mix_shell, :yes?, ["Found existing entry: " <> _]} + + # Loading the archive should emit warning again + Mix.Local.append_archives + assert_received {:mix_shell, :error, [^version_error]} + + # List archive + Mix.Tasks.Local.run [] + info = "mix local.sample # A local install sample" + assert_received {:mix_shell, :info, [^info]} + + Mix.Tasks.Archive.run [] + assert_received {:mix_shell, :info, ["* archive-0.1.0"]} + + # Run archived task + Mix.Task.run "local.sample" + assert_received {:mix_shell, :info, ["sample"]} + end + end + + test "archive install invalid file" do + in_fixture "archive", fn -> + file_name = "invalid-archive-0.1.0.ez" + assert File.regular?(file_name) + + send self(), {:mix_shell_input, :yes?, true} + assert_raise Mix.Error, ~r/invalid archive file/, fn -> + Mix.Tasks.Archive.Install.run [file_name] + end + end + end + + test "archive install missing file" do + assert_raise Mix.Error, ~r[Expected "./unlikely-to-exist-0.1.0.ez" to be a URL or a local file path], fn -> + Mix.Tasks.Archive.Install.run ["./unlikely-to-exist-0.1.0.ez"] + end + end + + test "archive update" do + in_fixture "archive", fn() -> + # Install previous version + Mix.Tasks.Archive.Build.run ["--no-elixir-version-check"] + assert File.regular? "archive-0.1.0.ez" + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Install.run [] + assert_received {:mix_shell, :error, [_]} + + # Build new version + Mix.Project.push(ArchiveProject2) + Mix.Tasks.Archive.Build.run ["--no-compile"] + assert File.regular? "archive-0.2.0.ez" + assert_received {:mix_shell, :info, ["Generated archive \"archive-0.2.0.ez\" with MIX_ENV=dev"]} + + # Install new version + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Install.run [] + refute File.regular? tmp_path("userhome/.mix/archives/archive-0.2.0.ez") + assert File.dir? tmp_path("userhome/.mix/archives/archive-0.2.0/archive-0.2.0/ebin") + + # Re-install current version should not change system + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Install.run [] + refute File.regular? tmp_path("userhome/.mix/archives/archive-0.2.0.ez") + assert File.dir? tmp_path("userhome/.mix/archives/archive-0.2.0/archive-0.2.0/ebin") + + # Try to install a missing version does not remove archive + assert_raise Mix.Error, fn -> + Mix.Tasks.Archive.Install.run ["./archive-0.0.0.ez"] + end + + assert File.dir? tmp_path("userhome/.mix/archives/archive-0.2.0/archive-0.2.0/ebin") + refute File.regular? tmp_path("userhome/.mix/archives/archive-0.1.0.ez") + + # Load archive without warnings because there is no :elixir requirement in mix.exs + Mix.Local.append_archives + refute_received {:mix_shell, :error, [_]} + + # Check uninstall confirmation + send self(), {:mix_shell_input, :yes?, false} + Mix.Tasks.Archive.Uninstall.run ["archive-0.2.0"] + assert File.dir? tmp_path("userhome/.mix/archives/archive-0.2.0/archive-0.2.0/ebin") + + # Remove it! + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Uninstall.run ["archive-0.2.0"] + refute File.dir? tmp_path("userhome/.mix/archives/archive-0.2.0/archive-0.2.0/ebin") + end + end + + defp has_zip_file?(archive, name) do + {:ok, files} = :zip.list_dir(archive) + Enum.find(files, &match?({:zip_file, ^name, _, _, _, _}, &1)) + end + + test "archive checksum" do + in_fixture "archive", fn() -> + Mix.Tasks.Archive.Build.run ["--no-elixir-version-check"] + assert File.regular? "archive-0.1.0.ez" + send self(), {:mix_shell_input, :yes?, true} + + # Install with wrong checksum + assert_raise Mix.Error, ~r"Data does not match the given SHA-512 checksum", fn -> + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Install.run ["--sha512", "wrong"] + end + + # Install with correct checksum + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Install.run ["--sha512", sha512("archive-0.1.0.ez")] + refute File.regular? tmp_path("userhome/.mix/archives/archive-0.1.0.ez") + assert File.dir? tmp_path("userhome/.mix/archives/archive-0.1.0/archive-0.1.0/ebin") + end + end + + test "archive check" do + # Install the archive + in_fixture "archive", fn() -> + Mix.Tasks.Archive.Build.run ["--no-elixir-version-check"] + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Install.run [] + end + + assert_raise Mix.Error, ~r/Expected archive to be in the format/, fn -> + archive_check [:archive] + end + + assert_raise Mix.Error, ~r/Archive "archive" could not be found/, fn -> + archive_check [{:archive, ">= 1.0.0"}] + end + + # Load the archive + Mix.Local.append_archives + + assert_raise Mix.Error, ~r/Archive \"archive-0.1.0\" does not match requirement >= 1.0.0/, fn -> + archive_check [{:archive, ">= 1.0.0"}] + end + + archive_check [{:archive, ">= 0.0.0"}] + end + + defp archive_check(archives) do + Mix.Project.pop + Mix.ProjectStack.post_config archives: archives + Mix.Project.push MixTest.Case.Sample + Mix.Tasks.Archive.Check.run([]) + end + + defp sha512(file) do + Base.encode16 :crypto.hash(:sha512, File.read!(file)), case: :lower + end + + test "archive.install from Git" do + in_fixture "git_repo", fn -> + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Archive.Install.run ["git", File.cwd!()] + assert_received {:mix_shell, :info, ["Generated archive \"git_repo-0.1.0.ez\" with MIX_ENV=prod"]} + + refute File.regular? tmp_path("userhome/.mix/archives/git_repo-0.1.0.ez") + assert File.dir? tmp_path("userhome/.mix/archives/git_repo-0.1.0/git_repo-0.1.0/ebin") + end + after + purge [GitRepo, GitRepo.Mixfile] + end +end diff --git a/lib/mix/test/mix/tasks/clean_test.exs b/lib/mix/test/mix/tasks/clean_test.exs index a0efb4565da..0f35b3ca5db 100644 --- a/lib/mix/test/mix/tasks/clean_test.exs +++ b/lib/mix/test/mix/tasks/clean_test.exs @@ -21,37 +21,32 @@ defmodule Mix.Tasks.CleanTest do :ok end - test "removes the build application" do + test "cleans the application build" do in_fixture "deps_status", fn -> - Mix.Tasks.Compile.run ["--no-deps"] - assert File.exists?("_build/dev/lib/sample") + File.mkdir_p! "_build/dev/consolidated" + File.mkdir_p! "_build/dev/lib/sample" + File.mkdir_p! "_build/test/lib/sample" + File.mkdir_p! "_build/dev/lib/ok" Mix.Tasks.Clean.run [] + refute File.exists?("_build/dev/consolidated") refute File.exists?("_build/dev/lib/sample") + refute File.exists?("_build/test/lib/sample") + assert File.exists?("_build/dev/lib/ok") end end - test "cleans deps" do + test "cleans dependencies build" do in_fixture "deps_status", fn -> - assert File.exists?("_build/dev/lib/ok") - Mix.Tasks.Clean.run ["--all"] + File.mkdir_p! "_build/dev/lib/ok" + File.mkdir_p! "_build/test/lib/ok" + Mix.Tasks.Clean.run ["--deps", "--only", "dev"] refute File.exists?("_build/dev") - assert_received {:mix_shell, :info, ["* Cleaning ok"]} - - # Assert we don't choke on unfetched deps - assert_received {:mix_shell, :info, ["* Cleaning unknown"]} - end - end - - test "cleans all deps and builds" do - in_fixture "deps_status", fn -> - assert File.exists?("_build/dev/lib/ok") - Mix.Tasks.Clean.run ["--all"] + assert File.exists?("_build/test") - refute File.exists?("_build") - assert_received {:mix_shell, :info, ["* Cleaning ok"]} - assert_received {:mix_shell, :info, ["* Cleaning unknown"]} + Mix.Tasks.Clean.run ["--deps"] + refute File.exists?("_build/test") end end end diff --git a/lib/mix/test/mix/tasks/cmd_test.exs b/lib/mix/test/mix/tasks/cmd_test.exs index db4bf069213..a76587445b8 100644 --- a/lib/mix/test/mix/tasks/cmd_test.exs +++ b/lib/mix/test/mix/tasks/cmd_test.exs @@ -7,7 +7,7 @@ defmodule Mix.Tasks.CmdTest do in_fixture "umbrella_dep/deps/umbrella", fn -> Mix.Project.in_project(:umbrella, ".", fn _ -> Mix.Task.run "cmd", ["echo", "hello"] - nl = os_newline + nl = os_newline() assert_received {:mix_shell, :info, ["==> bar"]} assert_received {:mix_shell, :run, ["hello" <> ^nl]} assert_received {:mix_shell, :info, ["==> foo"]} @@ -15,4 +15,17 @@ defmodule Mix.Tasks.CmdTest do end) end end + + test "only runs the cmd for specified apps" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + Mix.Task.run "cmd", ["--app", "bar", "echo", "hello"] + nl = os_newline() + assert_received {:mix_shell, :info, ["==> bar"]} + assert_received {:mix_shell, :run, ["hello" <> ^nl]} + refute_received {:mix_shell, :info, ["==> foo"]} + refute_received {:mix_shell, :run, ["hello" <> ^nl]} + end) + end + end end diff --git a/lib/mix/test/mix/tasks/compile.app_test.exs b/lib/mix/test/mix/tasks/compile.app_test.exs index 3f15c13da74..d878a3701fd 100644 --- a/lib/mix/test/mix/tasks/compile.app_test.exs +++ b/lib/mix/test/mix/tasks/compile.app_test.exs @@ -5,12 +5,36 @@ defmodule Mix.Tasks.Compile.AppTest do defmodule CustomProject do def project do - [app: :custom_project, version: "0.2.0"] + [app: :custom_project, version: "0.2.0", + description: "Some UTF-8 description (uma descrição em UTF-8)"] end def application do [maxT: :infinity, - applications: [:example_app]] + applications: [:example_app], + extra_applications: [:logger]] + end + end + + defmodule CustomDeps do + def project do + [app: :custom_deps, version: "0.2.0", deps: deps()] + end + + def application do + [extra_applications: [:logger], included_applications: [:ok9]] + end + + def deps do + [{:ok1, path: "../ok"}, + {:ok2, path: "../ok", only: :prod}, + {:ok3, path: "../ok", only: :dev}, + {:ok4, path: "../ok", runtime: true}, + {:ok5, path: "../ok", runtime: false}, + {:ok6, path: "../ok", optional: true}, + {:ok7, path: "../ok", optional: false}, + {:ok8, path: "../ok", app: false}, + {:ok9, path: "../ok"}] end end @@ -20,16 +44,7 @@ defmodule Mix.Tasks.Compile.AppTest do end def application do - case Process.get(:error) do - :modules -> [modules: :invalid] - :maxT -> [maxT: :invalid] - :registered -> [registered: ["invalid"]] - :included_applications -> [included_applications: ["invalid"]] - :applications -> [applications: ["invalid"]] - :env -> [env: [:a]] - :mod -> [mod: {Mod}] - :start_phases -> [start_phases: [:invalid]] - end + Process.get(:application) end end @@ -56,7 +71,7 @@ defmodule Mix.Tasks.Compile.AppTest do end end - test "use custom application settings" do + test "uses custom application settings" do Mix.Project.push CustomProject in_fixture "no_mixfile", fn -> @@ -65,7 +80,19 @@ defmodule Mix.Tasks.Compile.AppTest do contents = File.read!("_build/dev/lib/custom_project/ebin/custom_project.app") assert contents =~ "0.2.0" assert contents =~ "{maxT,infinity}" - assert contents =~ "{applications,[kernel,stdlib,elixir,example_app]}" + assert contents =~ "{applications,[kernel,stdlib,elixir,logger,example_app]}" + assert contents =~ "Some UTF-8 description (uma descrição em UTF-8)" + end + end + + test "automatically infers applications" do + Mix.Project.push CustomDeps + + in_fixture "no_mixfile", fn -> + Mix.Tasks.Compile.Elixir.run([]) + Mix.Tasks.Compile.App.run([]) + contents = File.read!("_build/dev/lib/custom_deps/ebin/custom_deps.app") + assert contents =~ "{applications,[kernel,stdlib,elixir,logger,ok1,ok3,ok4,ok7]}" end end @@ -73,13 +100,70 @@ defmodule Mix.Tasks.Compile.AppTest do Mix.Project.push InvalidProject in_fixture "no_mixfile", fn -> - for error <- [:modules, :maxT, :registered, :included_applications, - :applications, :env, :mod, :start_phases] do - Process.put(:error, error) + Process.put(:application, [:not_a_keyword, applications: []]) + message = "Application configuration returned from application/0 should be a keyword list" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [modules: :invalid]) + message = "Application modules (:modules) should be a list of atoms, got: :invalid" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end - assert_raise Mix.Error, ~r/:#{error}/, fn -> - Mix.Tasks.Compile.App.run([]) - end + Process.put(:application, [maxT: :invalid]) + message = "Application maximum time (:maxT) is not an integer or :infinity, got: :invalid" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [registered: ["invalid"]]) + message = "Application registered processes (:registered) should be a list of atoms, got: [\"invalid\"]" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [extra_applications: ["invalid"]]) + message = "Application extra applications (:extra_applications) should be a list of atoms, got: [\"invalid\"]" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [included_applications: ["invalid"]]) + message = "Application included applications (:included_applications) should be a list of atoms, got: [\"invalid\"]" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [applications: ["invalid"]]) + message = "Application applications (:applications) should be a list of atoms, got: [\"invalid\"]" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [applications: nil]) + message = "Application applications (:applications) should be a list of atoms, got: nil" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [env: [:invalid]]) + message = "Application environment (:env) should be a keyword list, got: [:invalid]" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [mod: {Mod}]) + message = "Application callback module (:mod) should be either [] or {module, start_args}, got: {Mod}" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) + end + + Process.put(:application, [start_phases: [:invalid]]) + message = "Application start phases (:start_phases) should be a keyword list, got: [:invalid]" + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Compile.App.run([]) end end end @@ -104,7 +188,8 @@ defmodule Mix.Tasks.Compile.AppTest do Mix.Project.push InvalidVsnProject in_fixture "no_mixfile", fn -> - assert_raise Mix.Error, "Expected :version to be a SemVer version", fn -> + message = "Expected :version to be a SemVer version, got: \"0.3\"" + assert_raise Mix.Error, message, fn -> Mix.Tasks.Compile.App.run([]) end end diff --git a/lib/mix/test/mix/tasks/compile.elixir_test.exs b/lib/mix/test/mix/tasks/compile.elixir_test.exs index 6e82c816933..4749f9c6dc8 100644 --- a/lib/mix/test/mix/tasks/compile.elixir_test.exs +++ b/lib/mix/test/mix/tasks/compile.elixir_test.exs @@ -8,39 +8,78 @@ defmodule Mix.Tasks.Compile.ElixirTest do :ok end - test "compiles a project" do + @elixir_otp_version {System.version, :erlang.system_info(:otp_release)} + + test "compiles a project without per environment build" do Mix.Project.pop Mix.ProjectStack.post_config [build_per_environment: false] Mix.Project.push MixTest.Case.Sample in_fixture "no_mixfile", fn -> - Mix.Tasks.Compile.Elixir.run [] + Mix.Tasks.Compile.Elixir.run ["--verbose"] assert File.regular?("_build/shared/lib/sample/ebin/Elixir.A.beam") assert File.regular?("_build/shared/lib/sample/ebin/Elixir.B.beam") - assert File.regular?("_build/shared/lib/sample/ebin/Elixir.C.beam") assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} - assert_received {:mix_shell, :info, ["Compiled lib/c.ex"]} end end test "compiles a project with per environment build" do in_fixture "no_mixfile", fn -> - Mix.Tasks.Compile.Elixir.run [] + Mix.Tasks.Compile.Elixir.run ["--verbose"] assert File.regular?("_build/dev/lib/sample/ebin/Elixir.A.beam") assert File.regular?("_build/dev/lib/sample/ebin/Elixir.B.beam") - assert File.regular?("_build/dev/lib/sample/ebin/Elixir.C.beam") assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} - assert_received {:mix_shell, :info, ["Compiled lib/c.ex"]} end end - test "does not write beam down on failures" do + test "recompiles project if Elixir version changed" do + in_fixture "no_mixfile", fn -> + Mix.Tasks.Compile.run [] + purge [A, B] + + assert File.exists?("_build/dev/lib/sample") + assert File.exists?("_build/dev/consolidated") + assert Mix.Dep.ElixirSCM.read == {:ok, @elixir_otp_version, Mix.SCM.Path} + + Mix.Task.clear + File.write!("_build/dev/consolidated/.to_be_removed", "") + manifest_data = :erlang.term_to_binary({:v1, "0.0.0", nil}) + File.write!("_build/dev/lib/sample/.compile.elixir_scm", manifest_data) + File.touch!("_build/dev/lib/sample/.compile.elixir_scm", {{2010, 1, 1}, {0, 0, 0}}) + + Mix.Tasks.Compile.run [] + assert Mix.Dep.ElixirSCM.read == {:ok, @elixir_otp_version, Mix.SCM.Path} + assert File.stat!("_build/dev/lib/sample/.compile.elixir_scm").mtime > {{2010, 1, 1}, {0, 0, 0}} + refute File.exists?("_build/dev/consolidated/.to_be_removed") + end + end + + test "recompiles project if scm changed" do + in_fixture "no_mixfile", fn -> + Mix.Tasks.Compile.run ["--verbose"] + purge [A, B] + + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + assert Mix.Dep.ElixirSCM.read == {:ok, @elixir_otp_version, Mix.SCM.Path} + + Mix.Task.clear + manifest_data = :erlang.term_to_binary({:v2, @elixir_otp_version, :another}) + File.write!("_build/dev/lib/sample/.compile.elixir_scm", manifest_data) + File.touch!("_build/dev/lib/sample/.compile.elixir_scm", {{2010, 1, 1}, {0, 0, 0}}) + + Mix.Tasks.Compile.run [] + assert Mix.Dep.ElixirSCM.read == {:ok, @elixir_otp_version, Mix.SCM.Path} + assert File.stat!("_build/dev/lib/sample/.compile.elixir_scm").mtime > {{2010, 1, 1}, {0, 0, 0}} + end + end + + test "does not write BEAM files down on failures" do import ExUnit.CaptureIO in_tmp "blank", fn -> @@ -48,36 +87,42 @@ defmodule Mix.Tasks.Compile.ElixirTest do File.write!("lib/a.ex", "raise ~s(oops)") capture_io fn -> - assert catch_exit(Mix.Tasks.Compile.Elixir.run []) == 1 + assert catch_exit(Mix.Tasks.Compile.Elixir.run []) == {:shutdown, 1} end refute File.regular?("_build/dev/lib/sample/ebin/Elixir.A.beam") end end - test "removes old artifact files" do + test "removes, purges and deletes old artifacts" do in_fixture "no_mixfile", fn -> assert Mix.Tasks.Compile.Elixir.run([]) == :ok assert File.regular?("_build/dev/lib/sample/ebin/Elixir.A.beam") + assert Code.ensure_loaded?(A) File.rm!("lib/a.ex") assert Mix.Tasks.Compile.Elixir.run([]) == :ok refute File.regular?("_build/dev/lib/sample/ebin/Elixir.A.beam") + refute Code.ensure_loaded?(A) + refute String.contains?(File.read!("_build/dev/lib/sample/.compile.elixir"), "Elixir.A") end end - test "compiles only changed files" do + test "compiles mtime changed files" do in_fixture "no_mixfile", fn -> - assert Mix.Tasks.Compile.Elixir.run([]) == :ok + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} Mix.shell.flush - purge [A, B, C] + purge [A, B] future = {{2020, 1, 1}, {0, 0, 0}} File.touch!("lib/a.ex", future) - Mix.Tasks.Compile.Elixir.run [] + Mix.Tasks.Compile.Elixir.run ["--verbose"] + + assert_received {:mix_shell, :error, ["warning: mtime (modified time) for \"lib/a.ex\" was set to the future, resetting to now"]} + refute_received {:mix_shell, :error, ["warning: mtime (modified time) for \"lib/b.ex\" was set to the future, resetting to now"]} assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} refute_received {:mix_shell, :info, ["Compiled lib/b.ex"]} @@ -87,20 +132,41 @@ defmodule Mix.Tasks.Compile.ElixirTest do end end + test "compiles size changed files" do + in_fixture "no_mixfile", fn -> + past = {{2010, 1, 1}, {0, 0, 0}} + File.touch!("lib/a.ex", past) + + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} + + Mix.shell.flush + purge [A, B] + + File.write!("lib/a.ex", File.read!("lib/a.ex") <> "\n") + File.touch!("lib/a.ex", past) + Mix.Tasks.Compile.Elixir.run ["--verbose"] + + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + refute_received {:mix_shell, :info, ["Compiled lib/b.ex"]} + end + end + test "compiles dependent changed modules" do in_fixture "no_mixfile", fn -> File.write!("lib/a.ex", "defmodule A, do: B.module_info") - assert Mix.Tasks.Compile.Elixir.run([]) == :ok + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} Mix.shell.flush - purge [A, B, C] + purge [A, B] future = {{2020, 1, 1}, {0, 0, 0}} File.touch!("lib/b.ex", future) - Mix.Tasks.Compile.Elixir.run [] + Mix.Tasks.Compile.Elixir.run ["--verbose"] assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} @@ -111,16 +177,16 @@ defmodule Mix.Tasks.Compile.ElixirTest do in_fixture "no_mixfile", fn -> File.write!("lib/a.ex", "defmodule A, do: B.module_info") - assert Mix.Tasks.Compile.Elixir.run([]) == :ok + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} Mix.shell.flush - purge [A, B, C] + purge [A, B] File.rm("lib/b.ex") File.write!("lib/a.ex", "defmodule A, do: nil") - Mix.Tasks.Compile.Elixir.run [] + Mix.Tasks.Compile.Elixir.run ["--verbose"] assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} refute_received {:mix_shell, :info, ["Compiled lib/b.ex"]} @@ -129,53 +195,140 @@ defmodule Mix.Tasks.Compile.ElixirTest do test "compiles dependent changed files" do in_fixture "no_mixfile", fn -> + tmp = tmp_path("c.eex") File.touch!("lib/a.eex") + File.write!("lib/a.ex", """ defmodule A do @external_resource "lib/a.eex" + @external_resource #{inspect tmp} def a, do: :ok end """) - assert Mix.Tasks.Compile.Elixir.run([]) == :ok + # Compiles with missing external resources + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :noop Mix.shell.flush - purge [A, B, C] + purge [A, B] + # Update local existing resource File.touch!("lib/a.eex", {{2020, 1, 1}, {0, 0, 0}}) - assert Mix.Tasks.Compile.Elixir.run([]) == :ok + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + refute_received {:mix_shell, :info, ["Compiled lib/b.ex"]} + + # Does not update on old existing resource + File.touch!("lib/a.eex", {{1970, 1, 1}, {0, 0, 0}}) + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :noop + Mix.shell.flush + purge [A, B] + # Update external existing resource + File.touch!(tmp, {{2020, 1, 1}, {0, 0, 0}}) + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} refute_received {:mix_shell, :info, ["Compiled lib/b.ex"]} end + after + File.rm tmp_path("c.eex") end - test "recompiles with --force" do + test "recompiles modules with multiple sources" do in_fixture "no_mixfile", fn -> - assert Mix.Tasks.Compile.Elixir.run([]) == :ok - purge [A, B, C] + File.write!("lib/a.ex", """ + defmodule A do + def one, do: 1 + end - # Now we have a noop - assert Mix.Tasks.Compile.Elixir.run([]) == :noop + defmodule B do + def two, do: 2 + end + """) - # --force - assert Mix.Tasks.Compile.Elixir.run(["--force"]) == :ok + File.write!("lib/b.ex", """ + B.two() + + defmodule A do + end + """) + + assert Mix.Tasks.Compile.Elixir.run(["--verbose", "--ignore-module-conflict"]) == :ok + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} + refute function_exported?(A, :one, 0) + + Mix.shell.flush + purge [A] + + File.rm("lib/b.ex") + Mix.Tasks.Compile.Elixir.run ["--verbose"] + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + refute_received {:mix_shell, :info, ["Compiled lib/b.ex"]} + assert function_exported?(A, :one, 0) + end + end + + test "does not recompile empty files" do + in_fixture "no_mixfile", fn -> + File.write!("lib/a.ex", "") + + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :noop + refute_received {:mix_shell, :info, ["Compiled lib/a.ex"]} end end - defmodule SourcePathsProject do - def project do - [app: :source_paths, elixirc_paths: ["unknown"]] + test "compiles files with autoload disabled" do + in_fixture "no_mixfile", fn -> + File.write!("lib/a.ex", """ + defmodule A do + @compile {:autoload, false} + end + """) + + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok + purge [A, B] end end - test "use custom source paths" do - Mix.Project.push SourcePathsProject + test "recompiles with --force" do + in_fixture "no_mixfile", fn -> + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok + purge [A, B] + + # Now we have a noop + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :noop + + # --force + assert Mix.Tasks.Compile.Elixir.run(["--force", "--verbose"]) == :ok + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + end + end + test "does not treat remote typespecs as compile time dependencies" do in_fixture "no_mixfile", fn -> - # Nothing to compile with the custom source paths - assert Mix.Tasks.Compile.Elixir.run([]) - refute_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + File.write!("lib/b.ex", """ + defmodule B do + @type t :: A.t + end + """) + + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + assert_received {:mix_shell, :info, ["Compiled lib/b.ex"]} + + Mix.shell.flush + purge [A, B] + + future = {{2020, 1, 1}, {0, 0, 0}} + File.touch!("lib/a.ex", future) + Mix.Tasks.Compile.Elixir.run ["--verbose"] + + assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + refute_received {:mix_shell, :info, ["Compiled lib/b.ex"]} end end end diff --git a/lib/mix/test/mix/tasks/compile.erlang_test.exs b/lib/mix/test/mix/tasks/compile.erlang_test.exs index 4bca6edb462..1327c3b9db8 100644 --- a/lib/mix/test/mix/tasks/compile.erlang_test.exs +++ b/lib/mix/test/mix/tasks/compile.erlang_test.exs @@ -16,7 +16,7 @@ defmodule Mix.Tasks.Compile.ErlangTest do def zzz(), do: b """ - assert_raise CompileError, fn -> + assert_raise Mix.Error, fn -> capture_io fn -> Mix.Tasks.Compile.Erlang.run [] end @@ -29,17 +29,21 @@ defmodule Mix.Tasks.Compile.ErlangTest do test "compiles src/b.erl and src/c.erl" do in_fixture "compile_erlang", fn -> - assert Mix.Tasks.Compile.Erlang.run([]) == :ok + assert Mix.Tasks.Compile.Erlang.run(["--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled src/b.erl"]} assert_received {:mix_shell, :info, ["Compiled src/c.erl"]} assert File.regular?("_build/dev/lib/sample/ebin/b.beam") assert File.regular?("_build/dev/lib/sample/ebin/c.beam") - assert Mix.Tasks.Compile.Erlang.run([]) == :noop + assert File.read!("_build/dev/lib/sample/.compile.erlang") == + "_build/dev/lib/sample/ebin/b.beam\n" <> + "_build/dev/lib/sample/ebin/c.beam" + + assert Mix.Tasks.Compile.Erlang.run(["--verbose"]) == :noop refute_received {:mix_shell, :info, ["Compiled src/b.erl"]} - assert Mix.Tasks.Compile.Erlang.run(["--force"]) == :ok + assert Mix.Tasks.Compile.Erlang.run(["--force", "--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled src/b.erl"]} assert_received {:mix_shell, :info, ["Compiled src/c.erl"]} end @@ -55,4 +59,25 @@ defmodule Mix.Tasks.Compile.ErlangTest do refute File.regular?("_build/dev/lib/sample/ebin/b.beam") end end + + test "compilation purges the module" do + in_fixture "compile_erlang", fn -> + # Create the first version of the module. + defmodule :purge_test do + def version, do: :v1 + end + assert :v1 == :purge_test.version + + # Create the second version of the module (this time as Erlang source). + File.write! "src/purge_test.erl", """ + -module(purge_test). + -export([version/0]). + version() -> v2. + """ + assert Mix.Tasks.Compile.Erlang.run([]) == :ok + + # If the module was not purged on recompilation, this would fail. + assert :v2 == :purge_test.version + end + end end diff --git a/lib/mix/test/mix/tasks/compile.leex_test.exs b/lib/mix/test/mix/tasks/compile.leex_test.exs index f312e3cdecb..8a70dcc6e0c 100644 --- a/lib/mix/test/mix/tasks/compile.leex_test.exs +++ b/lib/mix/test/mix/tasks/compile.leex_test.exs @@ -15,7 +15,7 @@ defmodule Mix.Tasks.Compile.LeexTest do oops. """ - assert_raise CompileError, fn -> + assert_raise Mix.Error, fn -> capture_io fn -> Mix.Tasks.Compile.Leex.run ["--force"] end @@ -27,15 +27,14 @@ defmodule Mix.Tasks.Compile.LeexTest do test "compiles src/test_ok.xrl" do in_fixture "compile_leex", fn -> - assert Mix.Tasks.Compile.Leex.run([]) == :ok - + assert Mix.Tasks.Compile.Leex.run(["--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled src/test_ok.xrl"]} assert File.regular?("src/test_ok.erl") - assert Mix.Tasks.Compile.Leex.run([]) == :noop + assert Mix.Tasks.Compile.Leex.run(["--verbose"]) == :noop refute_received {:mix_shell, :info, ["Compiled src/test_ok.xrl"]} - assert Mix.Tasks.Compile.Leex.run(["--force"]) == :ok + assert Mix.Tasks.Compile.Leex.run(["--force", "--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled src/test_ok.xrl"]} end end diff --git a/lib/mix/test/mix/tasks/compile.protocols_test.exs b/lib/mix/test/mix/tasks/compile.protocols_test.exs index a9b598db077..51e8b258fed 100644 --- a/lib/mix/test/mix/tasks/compile.protocols_test.exs +++ b/lib/mix/test/mix/tasks/compile.protocols_test.exs @@ -3,20 +3,88 @@ Code.require_file "../../test_helper.exs", __DIR__ defmodule Mix.Tasks.Compile.ProtocolsTest do use MixTest.Case - test "compiles and consolidates protocols" do + @old {{2010, 1, 1}, {0, 0, 0}} + + test "compiles and consolidates local protocols", context do + Mix.Project.push MixTest.Case.Sample + + in_tmp context.test, fn -> + File.mkdir_p!("lib") + assert Mix.Task.run("compile") + + # Define a local protocol + File.write!("lib/protocol.ex", """ + defprotocol Compile.Protocol do + def foo(a, b) + end + """) + assert compile_elixir_and_protocols() == :ok + mark_as_old!("_build/dev/consolidated/Elixir.Compile.Protocol.beam") + + # Implement a local protocol + File.write!("lib/impl.ex", """ + defimpl Compile.Protocol, for: Integer do + def foo(a, b), do: a + b + end + """) + assert compile_elixir_and_protocols() == :ok + assert mark_as_old!("_build/dev/consolidated/Elixir.Compile.Protocol.beam") != @old + + # Delete a local implementation + File.rm!("lib/impl.ex") + assert compile_elixir_and_protocols() == :ok + assert mark_as_old!("_build/dev/consolidated/Elixir.Compile.Protocol.beam") != @old + + # Delete a local protocol + File.rm!("lib/protocol.ex") + assert compile_elixir_and_protocols() == :noop + refute File.regular?("_build/dev/consolidated/Elixir.Compile.Protocol.beam") + end + end + + test "compiles and consolidates deps protocols", context do Mix.Project.push MixTest.Case.Sample - in_fixture "no_mixfile", fn -> - assert Mix.Tasks.Compile.Protocols.run([]) == :ok - assert_received {:mix_shell, :info, ["Consolidated Enumerable"]} + in_tmp context.test, fn -> + File.mkdir_p!("lib") + + assert Mix.Task.run("compile") + mark_as_old!("_build/dev/consolidated/Elixir.String.Chars.beam") + + assert compile_elixir_and_protocols() == :noop + assert mtime("_build/dev/consolidated/Elixir.String.Chars.beam") == @old - assert File.regular? "_build/dev/consolidated/Elixir.Enumerable.beam" - purge [Enumerable] + # Implement a deps protocol + File.write!("lib/struct.ex", """ + defmodule Compile.Protocol.Struct do + defstruct a: nil + defimpl String.Chars do + def to_string(_), do: "ok" + end + end + """) + assert compile_elixir_and_protocols() == :ok + assert mark_as_old!("_build/dev/consolidated/Elixir.String.Chars.beam") != @old - Code.prepend_path("_build/dev/consolidated") - assert Protocol.consolidated?(Enumerable) + # Delete the local implementation + File.rm!("lib/struct.ex") + assert compile_elixir_and_protocols() == :ok + assert mark_as_old!("_build/dev/consolidated/Elixir.String.Chars.beam") != @old end - after - purge [Enumerable] + end + + defp compile_elixir_and_protocols do + Mix.Tasks.Compile.Elixir.run([]) + Mix.Tasks.Compile.Protocols.run([]) + end + + defp mtime(path) do + File.stat!(path).mtime + end + + defp mark_as_old!(path) do + mtime = mtime(path) + File.touch!(path, @old) + mtime end end diff --git a/lib/mix/test/mix/tasks/compile.xref_test.exs b/lib/mix/test/mix/tasks/compile.xref_test.exs new file mode 100644 index 00000000000..d74c6516512 --- /dev/null +++ b/lib/mix/test/mix/tasks/compile.xref_test.exs @@ -0,0 +1,103 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.Compile.XrefTest do + use MixTest.Case + + import ExUnit.CaptureIO + + setup do + Mix.Project.push MixTest.Case.Sample + :ok + end + + test "doesn't do anything if no Elixir manifest" do + in_fixture "no_mixfile", fn -> + write_no_func() + + [xref_manifest] = Mix.Tasks.Compile.Xref.manifests() + + assert_no_warn fn -> + assert Mix.Tasks.Compile.Xref.run([]) == :noop + refute File.exists? xref_manifest + end + end + end + + test "doesn't xref if not stale, unless forced" do + in_fixture "no_mixfile", fn -> + write_no_func() + + assert_warn_no_func fn -> + assert Mix.Tasks.Compile.Elixir.run([]) == :ok + assert Mix.Tasks.Compile.Xref.run([]) == :noop + end + + assert_no_warn fn -> + assert Mix.Tasks.Compile.Xref.run([]) == :noop + end + + Mix.Task.reenable("xref") + + assert_warn_no_func fn -> + assert Mix.Tasks.Compile.Xref.run(["--force"]) == :noop + end + end + end + + test "xrefs if stale" do + in_fixture "no_mixfile", fn -> + write_no_func() + + assert_warn_no_func fn -> + assert Mix.Tasks.Compile.Elixir.run([]) == :ok + assert Mix.Tasks.Compile.Xref.run([]) == :noop + end + + [manifest] = Mix.Tasks.Compile.Elixir.manifests() + future = {{2020, 1, 1}, {0, 0, 0}} + File.touch!(manifest, future) + + Mix.Task.reenable("xref") + + assert_warn_no_func fn -> + assert Mix.Tasks.Compile.Xref.run([]) == :noop + end + end + end + + test "exits if warnings-as-errors" do + in_fixture "no_mixfile", fn -> + write_no_func() + + assert_warn_no_func fn -> + assert Mix.Tasks.Compile.Elixir.run([]) == :ok + assert catch_exit(Mix.Tasks.Compile.Xref.run(["--warnings-as-errors"])) == {:shutdown, 1} + end + end + end + + test "does not exit if warnings-as-errors and no warnings" do + in_fixture "no_mixfile", fn -> + assert_no_warn fn -> + assert Mix.Tasks.Compile.Elixir.run([]) == :ok + assert Mix.Tasks.Compile.Xref.run(["--warnings-as-errors"]) == :noop + end + end + end + + defp write_no_func do + File.write!("lib/a.ex", """ + defmodule A do + def a, do: B.no_func + end + """) + end + + defp assert_warn_no_func(fun) do + assert capture_io(:stderr, fun) =~ "no_func" + end + + defp assert_no_warn(fun) do + assert capture_io(:stderr, fun) == "" + end +end diff --git a/lib/mix/test/mix/tasks/compile.yecc_test.exs b/lib/mix/test/mix/tasks/compile.yecc_test.exs index ac1a7a1344f..a551e3252d1 100644 --- a/lib/mix/test/mix/tasks/compile.yecc_test.exs +++ b/lib/mix/test/mix/tasks/compile.yecc_test.exs @@ -15,7 +15,7 @@ defmodule Mix.Tasks.Compile.YeccTest do oops. """ - assert_raise CompileError, fn -> + assert_raise Mix.Error, fn -> capture_io fn -> Mix.Tasks.Compile.Yecc.run ["--force"] end @@ -27,15 +27,15 @@ defmodule Mix.Tasks.Compile.YeccTest do test "compiles src/test_ok.yrl" do in_fixture "compile_yecc", fn -> - assert Mix.Tasks.Compile.Yecc.run([]) == :ok + assert Mix.Tasks.Compile.Yecc.run(["--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled src/test_ok.yrl"]} assert File.regular?("src/test_ok.erl") - assert Mix.Tasks.Compile.Yecc.run([]) == :noop + assert Mix.Tasks.Compile.Yecc.run(["--verbose"]) == :noop refute_received {:mix_shell, :info, ["Compiled src/test_ok.yrl"]} - assert Mix.Tasks.Compile.Yecc.run(["--force"]) == :ok + assert Mix.Tasks.Compile.Yecc.run(["--force", "--verbose"]) == :ok assert_received {:mix_shell, :info, ["Compiled src/test_ok.yrl"]} end end diff --git a/lib/mix/test/mix/tasks/compile_test.exs b/lib/mix/test/mix/tasks/compile_test.exs index 73cc8828901..fdc1f3b05d2 100644 --- a/lib/mix/test/mix/tasks/compile_test.exs +++ b/lib/mix/test/mix/tasks/compile_test.exs @@ -14,29 +14,43 @@ defmodule Mix.Tasks.CompileTest do :ok end - test "mix compile --list with mixfile" do + test "compile --list with mixfile" do Mix.Tasks.Compile.run ["--list"] - assert_received {:mix_shell, :info, ["\nEnabled compilers: yecc, leex, erlang, elixir, app"]} + assert_received {:mix_shell, :info, ["\nEnabled compilers: yecc, leex, erlang, elixir, xref, app, protocols"]} assert_received {:mix_shell, :info, ["mix compile.elixir # " <> _]} end - test "mix compile --list with custom mixfile" do + test "compile --list with custom mixfile" do Mix.Project.push CustomCompilers Mix.Tasks.Compile.run ["--list"] - assert_received {:mix_shell, :info, ["\nEnabled compilers: elixir, app, custom"]} + assert_received {:mix_shell, :info, ["\nEnabled compilers: elixir, app, custom, protocols"]} + end + + test "compile does not require all compilers available on manifest" do + Mix.Project.push CustomCompilers + assert Mix.Tasks.Compile.manifests |> Enum.map(&Path.basename/1) == + [".compile.elixir"] end test "compile a project with mixfile" do in_fixture "no_mixfile", fn -> - Mix.Tasks.Compile.run [] + assert Mix.Tasks.Compile.run(["--verbose"]) == :ok assert File.regular?("_build/dev/lib/sample/ebin/Elixir.A.beam") assert File.regular?("_build/dev/lib/sample/ebin/sample.app") assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} - assert_received {:mix_shell, :info, ["Generated sample.app"]} + assert_received {:mix_shell, :info, ["Generated sample app"]} + + assert File.regular? "_build/dev/consolidated/Elixir.Enumerable.beam" + assert Mix.Tasks.Compile.run(["--verbose"]) == :noop + refute_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + purge [Enumerable] + + assert Mix.Tasks.App.Start.run(["--verbose"]) == :ok + assert Protocol.consolidated?(Enumerable) end end - test "compile a project with multiple compilers and a syntax error in an erlang file" do + test "compile a project with multiple compilers and a syntax error in an Erlang file" do in_fixture "no_mixfile", fn -> import ExUnit.CaptureIO @@ -47,13 +61,31 @@ defmodule Mix.Tasks.CompileTest do """ assert File.regular?("src/a.erl") - assert_raise CompileError, fn -> + assert_raise Mix.Error, fn -> capture_io fn -> Mix.Tasks.Compile.run ["--force"] end end refute File.regular?("ebin/Elixir.A.beam") refute File.regular?("ebin/Elixir.B.beam") - refute File.regular?("ebin/Elixir.C.beam") + end + end + + test "add Logger application metadata" do + import ExUnit.CaptureLog + in_fixture "no_mixfile", fn -> + File.write!("lib/a.ex", """ + defmodule A do + require Logger + def info, do: Logger.info("hello") + end + """) + + assert Mix.Tasks.Compile.run([]) == :ok + try do + assert capture_log([metadata: [:application]], &A.info/0) =~ "application=sample" + after + purge [A] + end end end end diff --git a/lib/mix/test/mix/tasks/deps.git_test.exs b/lib/mix/test/mix/tasks/deps.git_test.exs index 0adbb2dc0b1..ec4257af6f5 100644 --- a/lib/mix/test/mix/tasks/deps.git_test.exs +++ b/lib/mix/test/mix/tasks/deps.git_test.exs @@ -5,43 +5,44 @@ defmodule Mix.Tasks.DepsGitTest do defmodule DepsOnGitApp do def project do - [ app: :deps_on_git_app, - version: "0.1.0", - deps: [ - {:deps_on_git_repo, "0.2.0", git: MixTest.Case.fixture_path("deps_on_git_repo")} - ] ] + [app: :deps_on_git_app, + version: "0.1.0", + deps: [ + {:deps_on_git_repo, "0.2.0", git: fixture_path("deps_on_git_repo")} + ]] end end defmodule GitApp do def project do - [ app: :git_app, - version: "0.1.0", - deps: [ - {:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo")} - ] ] + opts = Process.get(:git_repo_opts) || [] + [app: :git_app, + version: "0.1.0", + deps: [ + {:git_repo, "0.1.0", [git: fixture_path("git_repo")] ++ opts} + ]] end end defmodule GitSubmodulesApp do def project do - [ app: :git_app, - version: "0.1.0", - deps: [ - {:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo"), submodules: true} - ] ] + [app: :git_app, + version: "0.1.0", + deps: [ + {:git_repo, "0.1.0", git: fixture_path("git_repo"), submodules: true} + ]] end end defmodule GitErrorApp do def project do - [ deps: [ - {:git_repo, "0.1.0", git: MixTest.Case.fixture_path("not_git_repo")} - ] ] + [deps: [ + {:git_repo, "0.1.0", git: fixture_path("not_git_repo")} + ]] end end - test "gets and updates git repos with compilation" do + test "gets and updates Git repos with compilation" do Mix.Project.push GitApp in_fixture "no_mixfile", fn -> @@ -57,6 +58,32 @@ defmodule Mix.Tasks.DepsGitTest do end end + test "gets and updates Git repos with submodules" do + Mix.Project.push GitSubmodulesApp + + in_fixture "no_mixfile", fn -> + Mix.Tasks.Deps.Get.run [] + message = "* Getting git_repo (#{fixture_path("git_repo")})" + assert_received {:mix_shell, :info, [^message]} + assert File.read!("mix.lock") =~ "submodules: true" + end + end + + @tag :git_sparse + test "gets and updates Git repos with sparse checkout" do + Process.put(:git_repo_opts, sparse: "sparse_dir") + Mix.Project.push GitApp + + in_fixture "no_mixfile", fn -> + Mix.Tasks.Deps.Get.run [] + message = "* Getting git_repo (#{fixture_path("git_repo")})" + assert_received {:mix_shell, :info, [^message]} + refute File.exists?("deps/git_repo/mix.exs") + assert File.exists?("deps/git_repo/sparse_dir/mix.exs") + assert File.read!("mix.lock") =~ "sparse: \"sparse_dir\"" + end + end + test "handles invalid .git directory" do Mix.Project.push GitApp @@ -78,7 +105,7 @@ defmodule Mix.Tasks.DepsGitTest do File.rm_rf!("deps/git_repo/.git") assert_raise Mix.Error, "Can't continue due to errors on dependencies", fn -> - Mix.Tasks.Deps.Check.run ["git_repo"] + Mix.Tasks.Deps.Loadpaths.run ["git_repo"] end end end @@ -100,12 +127,43 @@ defmodule Mix.Tasks.DepsGitTest do assert File.exists?("deps/git_repo/mix.exs") assert File.rm("deps/git_repo/.fetch") == :ok - Mix.Tasks.Deps.Update.run ["deps_on_git_repo"] + # Compile the dependencies + Mix.Tasks.Deps.Compile.run [] + + # Now update children and make sure it propagates + Mix.Tasks.Deps.Update.run ["git_repo"] assert File.exists?("deps/deps_on_git_repo/.fetch") assert File.exists?("deps/git_repo/.fetch") + + # Compile Git repo but unload it so... + Mix.Tasks.Deps.Compile.run ["git_repo"] + assert File.exists?("_build/dev/lib/git_repo/ebin") + Code.delete_path("_build/dev/lib/git_repo/ebin") + + # Deps on Git repo loads it automatically on compile + Mix.Task.reenable "deps.loadpaths" + Mix.Tasks.Deps.Compile.run ["deps_on_git_repo"] + assert File.exists?("_build/dev/lib/deps_on_git_repo/ebin") end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] + end + + test "compiles many levels deep dependencies" do + Mix.Project.push DepsOnGitApp + + in_fixture "no_mixfile", fn -> + Mix.Tasks.Deps.Get.run [] + refute File.exists?("_build/dev/lib/deps_on_git_repo") + refute File.exists?("_build/dev/lib/git_repo") + + # Compile the parent with children + Mix.Tasks.Deps.Compile.run ["deps_on_git_repo", "--include-children"] + assert File.exists?("_build/dev/lib/deps_on_git_repo") + assert File.exists?("_build/dev/lib/git_repo") + end + after + purge [GitRepo, GitRepo.Mixfile] end test "recompiles the project when a dep is fetched" do @@ -116,17 +174,12 @@ defmodule Mix.Tasks.DepsGitTest do assert File.exists?("deps/git_repo/.fetch") # We can compile just fine - Mix.Tasks.Compile.run [] - assert_received {:mix_shell, :info, ["Compiled lib/git_repo.ex"]} - assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + assert Mix.Tasks.Compile.run(["--verbose"]) == :ok # Clear up to prepare for the update - File.rm("_build/dev/lib/git_repo/ebin/Elixir.GitRepo.beam") - File.rm("_build/dev/lib/git_repo/.compile.elixir") - File.rm("deps/git_repo/.fetch") Mix.Task.clear Mix.shell.flush - purge [A, B, C, GitRepo] + purge [A, B, GitRepo] # Update will mark the update required Mix.Tasks.Deps.Update.run ["git_repo"] @@ -135,17 +188,17 @@ defmodule Mix.Tasks.DepsGitTest do # mix deps.compile is required... Mix.Tasks.Deps.run [] - msg = " the dependency build is outdated, please run `mix deps.compile`" + msg = " the dependency build is outdated, please run \"mix deps.compile\"" assert_received {:mix_shell, :info, [^msg]} # But also ran automatically - Mix.Tasks.Compile.run [] - assert_received {:mix_shell, :info, ["Compiled lib/git_repo.ex"]} + Mix.Tasks.Compile.run ["--verbose"] assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]} + assert File.exists?("_build/dev/lib/git_repo/.compile.fetch") :ok end after - purge [A, B, C, GitRepo, GitRepo.Mix] + purge [A, B, GitRepo, GitRepo.Mixfile] end test "all up to date dependencies" do @@ -160,17 +213,17 @@ defmodule Mix.Tasks.DepsGitTest do assert_received {:mix_shell, :info, ["All dependencies up to date"]} end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] end test "updates the lock when the repo updates" do Mix.Project.push GitApp - # Get git repo first revision - [last, first|_] = get_git_repo_revs + # Get Git repo first revision + [last, first | _] = get_git_repo_revs() in_fixture "no_mixfile", fn -> - Mix.Dep.Lock.write [git_repo: {:git, fixture_path("git_repo"), first, []}] + Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), first, []}} Mix.Tasks.Deps.Get.run [] refute File.exists?("deps/git_repo/lib/git_repo.ex") @@ -188,24 +241,24 @@ defmodule Mix.Tasks.DepsGitTest do refute File.read!("mix.lock") =~ last end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] end test "updates the repo when the lock updates" do Mix.Project.push GitApp - [last, first|_] = get_git_repo_revs + [last, first | _] = get_git_repo_revs() in_fixture "no_mixfile", fn -> - Mix.Dep.Lock.write [git_repo: {:git, fixture_path("git_repo"), first, []}] + Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), first, []}} Mix.Tasks.Deps.Get.run [] refute File.exists?("deps/git_repo/lib/git_repo.ex") assert File.read!("mix.lock") =~ first # Update the lock and now we should get an error - Mix.Dep.Lock.write [git_repo: {:git, fixture_path("git_repo"), last, []}] + Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), last, []}} assert_raise Mix.Error, fn -> - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] end # Flush the errors we got, move to a clean slate @@ -224,16 +277,73 @@ defmodule Mix.Tasks.DepsGitTest do refute_received {:mix_shell, :error, _} end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] + end + + @tag :git_sparse + test "updates the repo when sparse is turned off" do + Process.put(:git_repo_opts, sparse: "sparse_dir") + Mix.Project.push GitApp + + in_fixture "no_mixfile", fn -> + Mix.Tasks.Deps.Get.run [] + refute File.exists?("deps/git_repo/lib/git_repo.ex") + + # Flush the errors we got, move to a clean slate + Mix.shell.flush + Mix.Task.clear + Process.delete(:git_repo_opts) + Mix.Project.pop + Mix.Project.push GitApp + + # Calling get should update the dependency + Mix.Tasks.Deps.Get.run [] + refute File.read!("mix.lock") =~ "sparse_dir" + assert File.exists?("deps/git_repo/lib/git_repo.ex") + + message = "* Updating git_repo (#{fixture_path("git_repo")})" + assert_received {:mix_shell, :info, [^message]} + + # Check we got no error + refute_received {:mix_shell, :error, _} + end + end + + @tag :git_sparse + test "updates the repo when sparse is turned on" do + Mix.Project.push GitApp + + in_fixture "no_mixfile", fn -> + Mix.Tasks.Deps.Get.run [] + assert File.exists?("deps/git_repo/lib/git_repo.ex") + + # Flush the errors we got, move to a clean slate + Mix.shell.flush + Mix.Task.clear + Process.put(:git_repo_opts, sparse: "sparse_dir") + Mix.Project.pop + Mix.Project.push GitApp + + # Calling get should update the dependency + Mix.Tasks.Deps.Get.run [] + assert File.read!("mix.lock") =~ "sparse_dir" + refute File.exists?("deps/git_repo/lib/git_repo.ex") + + message = "* Updating git_repo (#{fixture_path("git_repo")})" + assert_received {:mix_shell, :info, [^message]} + + # Check we got no error + refute_received {:mix_shell, :error, _} + end end test "updates the repo and the lock when the mixfile updates" do Mix.Project.push GitApp - [last, first|_] = get_git_repo_revs + [last, first | _] = get_git_repo_revs() in_fixture "no_mixfile", fn -> # Move to the first version - Mix.Dep.Lock.write [git_repo: {:git, fixture_path("git_repo"), first, []}] + Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), first, []}} Mix.Tasks.Deps.Get.run [] assert File.read!("mix.lock") =~ first @@ -242,7 +352,7 @@ defmodule Mix.Tasks.DepsGitTest do refresh deps: [{:git_repo, "0.1.0", git: fixture_path("git_repo"), ref: last}] Mix.Tasks.Deps.run [] - msg = " lock outdated: the lock is outdated compared to the options in your mixfile" + msg = " lock outdated: the lock is outdated compared to the options in your mixfile. To fetch locked version run \"mix deps.get\"" assert_received {:mix_shell, :info, [^msg]} # Check an update was triggered @@ -256,7 +366,7 @@ defmodule Mix.Tasks.DepsGitTest do refute_received {:mix_shell, :error, _} end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] end test "does not attempt to compile projects that could not be retrieved" do @@ -266,32 +376,50 @@ defmodule Mix.Tasks.DepsGitTest do exception = assert_raise Mix.Error, fn -> Mix.Tasks.Deps.Get.run [] end - assert Exception.message(exception) =~ "Command `git clone" + assert Exception.message(exception) =~ "Command \"git --git-dir=.git fetch" end end test "does not load bad mixfiles on get" do Mix.Project.push GitApp - [last, _, bad|_] = get_git_repo_revs + [last, _, bad | _] = get_git_repo_revs() in_fixture "no_mixfile", fn -> - Mix.Dep.Lock.write [git_repo: {:git, fixture_path("git_repo"), bad, []}] + Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), bad, []}} catch_error(Mix.Tasks.Deps.Get.run []) - Mix.Dep.Lock.write [git_repo: {:git, fixture_path("git_repo"), last, []}] + Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), last, []}} Mix.Tasks.Deps.Get.run [] assert File.read!("mix.lock") =~ last end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] + end + + test "updates on Git opts change" do + Mix.Project.push GitApp + + in_fixture "no_mixfile", fn -> + Process.put(:git_repo_opts, tag: "without_module") + refresh([]) + Mix.Tasks.Deps.Get.run [] + refute File.regular?("deps/git_repo/lib/git_repo.ex") + + Process.put(:git_repo_opts, tag: "with_module") + refresh([]) + Mix.Tasks.Deps.Get.run [] + assert File.regular?("deps/git_repo/lib/git_repo.ex") + end + after + purge [GitRepo, GitRepo.Mixfile] end test "does not load bad mixfiles on update" do Mix.Project.push GitApp - [last, _, bad|_] = get_git_repo_revs + [last, _, bad | _] = get_git_repo_revs() in_fixture "no_mixfile", fn -> - Mix.Dep.Lock.write [git_repo: {:git, fixture_path("git_repo"), bad, []}] + Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), bad, []}} catch_error(Mix.Tasks.Deps.Get.run []) Mix.Tasks.Deps.Update.run ["git_repo"] @@ -299,7 +427,7 @@ defmodule Mix.Tasks.DepsGitTest do assert File.read!("mix.lock") =~ last end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] end defp refresh(post_config) do @@ -308,9 +436,9 @@ defmodule Mix.Tasks.DepsGitTest do Mix.Project.push(name, file) end - defp get_git_repo_revs do + defp get_git_repo_revs() do File.cd! fixture_path("git_repo"), fn -> - Regex.split ~r(\r?\n), System.cmd("git log --format=%H") + Regex.split ~r(\r?\n), System.cmd("git", ["log", "--format=%H"]) |> elem(0) end end end diff --git a/lib/mix/test/mix/tasks/deps.path_test.exs b/lib/mix/test/mix/tasks/deps.path_test.exs index 708638cea6e..6c2109c3f03 100644 --- a/lib/mix/test/mix/tasks/deps.path_test.exs +++ b/lib/mix/test/mix/tasks/deps.path_test.exs @@ -15,6 +15,19 @@ defmodule Mix.Tasks.DepsPathTest do end end + defmodule MismatchDepsApp do + def project do + [ + app: :raw_sample, + version: "0.1.0", + deps: [ + {:cooked_repo, "0.1.0", path: "custom/raw_repo"} + ] + ] + end + end + + @tag apps: [:raw_sample] test "does not mark for compilation on get/update" do Mix.Project.push DepsApp @@ -24,31 +37,25 @@ defmodule Mix.Tasks.DepsPathTest do end end + @tag apps: [:raw_sample] test "compiles ands runs even if lock does not match" do Mix.Project.push DepsApp in_fixture "deps_status", fn -> - Mix.Dep.Lock.write [raw_repo: "abcdef"] + Mix.Dep.Lock.write %{raw_repo: "abcdef"} Mix.Tasks.Run.run ["-e", "Mix.shell.info RawRepo.hello"] assert_received {:mix_shell, :info, ["==> raw_repo"]} assert_received {:mix_shell, :info, ["world"]} end end - defmodule InvalidPathDepsApp do - def project do - [ - app: :rebar_as_dep, - version: "0.1.0", - deps: [{:rebar_dep, path: MixTest.Case.tmp_path("rebar_dep")}] - ] - end - end + @tag apps: [:raw_sample] + test "uses the name of the app, not the path basename" do + Mix.Project.push MismatchDepsApp - test "raises on non-mix path deps" do - Mix.Project.push InvalidPathDepsApp - assert_raise Mix.Error, ~r/:path option can only be used with mix projects/, fn -> - Mix.Tasks.Deps.Get.run [] + in_fixture "deps_status", fn -> + Mix.Tasks.Deps.Compile.run [] + assert File.exists?("_build/dev/lib/cooked_repo/ebin") end end end diff --git a/lib/mix/test/mix/tasks/deps.tree_test.exs b/lib/mix/test/mix/tasks/deps.tree_test.exs new file mode 100644 index 00000000000..4f09ff35fc8 --- /dev/null +++ b/lib/mix/test/mix/tasks/deps.tree_test.exs @@ -0,0 +1,141 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.Deps.TreeTest do + use MixTest.Case + + defmodule ConvergedDepsApp do + def project do + [ + app: :sample, + version: "0.1.0", + deps: [ + {:deps_on_git_repo, "0.2.0", git: fixture_path("deps_on_git_repo")}, + {:git_repo, ">= 0.1.0", git: MixTest.Case.fixture_path("git_repo")} + ] + ] + end + end + + defmodule OverriddenDepsApp do + def project do + [ + app: :sample, + version: "0.1.0", + deps: [ + {:deps_on_git_repo, ~r"0.2.0", git: fixture_path("deps_on_git_repo"), only: :test}, + {:git_repo, git: MixTest.Case.fixture_path("git_repo"), override: true} + ] + ] + end + end + + test "shows the dependency tree", context do + Mix.Project.push ConvergedDepsApp + + in_tmp context.test, fn -> + Mix.Tasks.Deps.Tree.run(["--format", "pretty"]) + assert_received {:mix_shell, :info, ["sample"]} + assert_received {:mix_shell, :info, ["├── git_repo >= 0.1.0 (" <> _]} + assert_received {:mix_shell, :info, ["└── deps_on_git_repo 0.2.0 (" <> _]} + refute_received {:mix_shell, :info, [" └── git_repo (" <> _]} + + Mix.Tasks.Deps.Get.run([]) + Mix.Tasks.Deps.Tree.run(["--format", "pretty"]) + assert_received {:mix_shell, :info, ["sample"]} + assert_received {:mix_shell, :info, ["├── git_repo >= 0.1.0 (" <> _]} + assert_received {:mix_shell, :info, ["└── deps_on_git_repo 0.2.0 (" <> _]} + assert_received {:mix_shell, :info, [" └── git_repo (" <> _]} + end + after + purge [DepsOnGitRepo.Mixfile, GitRepo.Mixfile] + end + + test "show the dependency tree for umbrella apps" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + Mix.Task.run "deps.tree", ["--format", "pretty"] + assert_received {:mix_shell, :info, ["foo"]} + assert_received {:mix_shell, :info, ["bar"]} + assert_received {:mix_shell, :info, ["└── foo (../foo)"]} + end) + end + end + + test "shows the given dependency", context do + Mix.Project.push ConvergedDepsApp + + in_tmp context.test, fn -> + assert_raise Mix.Error, "could not find dependency unknown", fn -> + Mix.Tasks.Deps.Tree.run(["--format", "pretty", "unknown"]) + end + + Mix.Tasks.Deps.Tree.run(["--format", "pretty", "deps_on_git_repo"]) + assert_received {:mix_shell, :info, ["deps_on_git_repo 0.2.0 (" <> _]} + refute_received {:mix_shell, :info, ["└── git_repo (" <> _]} + end + end + + test "shows overridden deps", context do + Mix.Project.push OverriddenDepsApp + + in_tmp context.test, fn -> + Mix.Tasks.Deps.Tree.run(["--format", "pretty"]) + assert_received {:mix_shell, :info, ["sample"]} + assert_received {:mix_shell, :info, ["├── git_repo (" <> msg]} + assert_received {:mix_shell, :info, ["└── deps_on_git_repo ~r/0.2.0/ (" <> _]} + assert msg =~ "*override*" + end + end + + test "excludes the given deps", context do + Mix.Project.push OverriddenDepsApp + + in_tmp context.test, fn -> + Mix.Tasks.Deps.Tree.run(["--format", "pretty", "--exclude", "deps_on_git_repo"]) + assert_received {:mix_shell, :info, ["sample"]} + assert_received {:mix_shell, :info, ["└── git_repo (" <> _]} + refute_received {:mix_shell, :info, ["└── deps_on_git_repo ~r/0.2.0/ (" <> _]} + end + end + + test "shows a particular environment", context do + Mix.Project.push OverriddenDepsApp + + in_tmp context.test, fn -> + Mix.Tasks.Deps.Tree.run(["--format", "pretty", "--only", "prod"]) + assert_received {:mix_shell, :info, ["sample"]} + assert_received {:mix_shell, :info, ["└── git_repo (" <> _]} + refute_received {:mix_shell, :info, ["└── deps_on_git_repo ~r/0.2.0/ (" <> _]} + end + end + + test "shows the dependency tree in DOT graph format", context do + Mix.Project.push ConvergedDepsApp + + in_tmp context.test, fn -> + Mix.Tasks.Deps.Tree.run(["--format", "dot"]) + + assert File.read!("deps_tree.dot") == """ + digraph "dependency tree" { + "sample" + "sample" -> "git_repo" [label=">= 0.1.0"] + "sample" -> "deps_on_git_repo" [label="0.2.0"] + } + """ + + Mix.Tasks.Deps.Get.run([]) + Mix.Tasks.Deps.Tree.run(["--format", "dot"]) + + assert File.read!("deps_tree.dot") == """ + digraph "dependency tree" { + "sample" + "sample" -> "git_repo" [label=">= 0.1.0"] + "sample" -> "deps_on_git_repo" [label="0.2.0"] + "deps_on_git_repo" -> "git_repo" [label=""] + } + """ + end + after + purge [DepsOnGitRepo.Mixfile, GitRepo.Mixfile] + end +end diff --git a/lib/mix/test/mix/tasks/deps_test.exs b/lib/mix/test/mix/tasks/deps_test.exs index 2c5c3a33578..8fdd1ba19c5 100644 --- a/lib/mix/test/mix/tasks/deps_test.exs +++ b/lib/mix/test/mix/tasks/deps_test.exs @@ -5,53 +5,55 @@ defmodule Mix.Tasks.DepsTest do defmodule DepsApp do def project do - [ app: :deps, version: "0.1.0", - deps: [ - {:ok, "0.1.0", github: "elixir-lang/ok"}, - {:invalidvsn, "0.2.0", path: "deps/invalidvsn"}, - {:invalidapp, "0.1.0", path: "deps/invalidapp"}, - {:noappfile, "0.1.0", path: "deps/noappfile"}, - ] - ] + [app: :deps, version: "0.1.0", + deps: [ + {:ok, "0.1.0", github: "elixir-lang/ok"}, + {:invalidvsn, "0.2.0", path: "deps/invalidvsn"}, + {:invalidapp, "0.1.0", path: "deps/invalidapp"}, + {:noappfile, "0.1.0", path: "deps/noappfile"}, + {:nosemver, "~> 0.1", path: "deps/nosemver"}, + ]] end end defmodule SuccessfulDepsApp do def project do - [ app: :sample, version: "0.1.0", - deps: [ - {:ok, "0.1.0", path: "deps/ok"} - ] - ] + [app: :sample, version: "0.1.0", + deps: [ + {:ok, "0.1.0", path: "deps/ok"} + ]] end end defmodule ReqDepsApp do def project do - [ app: :req_deps, version: "0.1.0", - deps: [ - {:ok, ">= 2.0.0", path: "deps/ok"}, - {:noappfile, path: "deps/noappfile", app: false}, - {:apppath, path: "deps/noappfile", app: "../deps/ok/ebin/ok.app"} - ] - ] + [app: :req_deps, version: "0.1.0", + deps: [ + {:ok, ">= 2.0.0", path: "deps/ok"}, + {:noappfile, path: "deps/noappfile", app: false}, + {:apppath, path: "deps/noappfile", app: "../deps/ok/ebin/ok.app"} + ]] end end + ## deps + test "prints list of dependencies and their status" do Mix.Project.push DepsApp in_fixture "deps_status", fn -> Mix.Tasks.Deps.run [] - assert_received {:mix_shell, :info, ["* ok (git://github.com/elixir-lang/ok.git)"]} - assert_received {:mix_shell, :info, [" the dependency is not available, run `mix deps.get`"]} + assert_received {:mix_shell, :info, ["* ok (https://github.com/elixir-lang/ok.git) (mix)"]} + assert_received {:mix_shell, :info, [" the dependency is not available, run \"mix deps.get\""]} assert_received {:mix_shell, :info, ["* invalidvsn (deps/invalidvsn)"]} assert_received {:mix_shell, :info, [" the app file contains an invalid version: :ok"]} - assert_received {:mix_shell, :info, ["* invalidapp (deps/invalidapp)"]} - assert_received {:mix_shell, :info, [" the app file at _build/dev/lib/invalidapp/ebin/invalidapp.app is invalid"]} + assert_received {:mix_shell, :info, ["* invalidapp (deps/invalidapp) (mix)"]} + assert_received {:mix_shell, :info, [" the app file at \"_build/dev/lib/invalidapp/ebin/invalidapp.app\" is invalid"]} assert_received {:mix_shell, :info, ["* noappfile (deps/noappfile)"]} - assert_received {:mix_shell, :info, [" could not find an app file at _build/dev/lib/noappfile/ebin/noappfile.app" <> _]} + assert_received {:mix_shell, :info, [" could not find an app file at \"_build/dev/lib/noappfile/ebin/noappfile.app\"" <> _]} + assert_received {:mix_shell, :info, ["* nosemver (deps/nosemver)"]} + assert_received {:mix_shell, :info, [" the app file specified a non-Semantic Versioning format: \"0.7\"" <> _]} end end @@ -61,16 +63,16 @@ defmodule Mix.Tasks.DepsTest do in_fixture "deps_status", fn -> Mix.Tasks.Deps.run [] - assert_received {:mix_shell, :info, ["* ok (deps/ok)"]} + assert_received {:mix_shell, :info, ["* ok (deps/ok) (mix)"]} assert_received {:mix_shell, :info, [" the dependency does not match the requirement \">= 2.0.0\", got \"0.1.0\""]} assert_received {:mix_shell, :info, ["* noappfile (deps/noappfile)"]} assert_received {:mix_shell, :info, ["* apppath (deps/noappfile)"]} - refute_received {:mix_shell, :info, [" could not find app file at _build/dev/lib/noappfile/ebin/apppath.app" <> _]} - refute_received {:mix_shell, :info, [" could not find app file at _build/dev/lib/noappfile/ebin/noappfile.app" <> _]} + refute_received {:mix_shell, :info, [" could not find app file at \"_build/dev/lib/noappfile/ebin/apppath.app\"" <> _]} + refute_received {:mix_shell, :info, [" could not find app file at \"_build/dev/lib/noappfile/ebin/noappfile.app\"" <> _]} end end - test "prints elixir req mismatches" do + test "prints Elixir req mismatches" do Mix.Project.push ReqDepsApp in_fixture "deps_status", fn -> @@ -84,9 +86,9 @@ defmodule Mix.Tasks.DepsTest do end """ - Mix.Tasks.Deps.run [] + Mix.Tasks.Deps.Compile.run [:ok] - msg = "warning: the dependency ok requires Elixir \"~> 0.1.0\" " <> + msg = "warning: the dependency :ok requires Elixir \"~> 0.1.0\" " <> "but you are running on v#{System.version}" assert_received {:mix_shell, :error, [^msg]} @@ -98,32 +100,46 @@ defmodule Mix.Tasks.DepsTest do Mix.Project.push DepsApp in_fixture "deps_status", fn -> - File.cd!("deps/ok", fn -> System.cmd("git init") end) + File.cd!("deps/ok", fn -> System.cmd("git", ["init"]) end) Mix.Tasks.Deps.run [] - assert_received {:mix_shell, :info, ["* ok (git://github.com/elixir-lang/ok.git)"]} - assert_received {:mix_shell, :info, [" the dependency is not locked"]} + assert_received {:mix_shell, :info, ["* ok (https://github.com/elixir-lang/ok.git) (mix)"]} + assert_received {:mix_shell, :info, [" the dependency is not locked. To generate the \"mix.lock\" file run \"mix deps.get\""]} - Mix.Dep.Lock.write %{ok: {:git, "git://github.com/elixir-lang/ok.git", "abcdefghi", []}} + Mix.Dep.Lock.write %{ok: {:git, "https://github.com/elixir-lang/ok.git", "abcdefghi", []}} Mix.Tasks.Deps.run [] - assert_received {:mix_shell, :info, ["* ok (git://github.com/elixir-lang/ok.git)"]} + assert_received {:mix_shell, :info, ["* ok (https://github.com/elixir-lang/ok.git) (mix)"]} assert_received {:mix_shell, :info, [" locked at abcdefg"]} - assert_received {:mix_shell, :info, [" lock mismatch: the dependency is out of date"]} + assert_received {:mix_shell, :info, [" lock mismatch: the dependency is out of date. To fetch locked version run \"mix deps.get\""]} - Mix.Dep.Lock.write [ok: {:git, "git://github.com/elixir-lang/another.git", "abcdefghi", []}] + Mix.Dep.Lock.write %{ok: {:git, "git://github.com/elixir-lang/another.git", "abcdefghi", []}} Mix.Tasks.Deps.run [] - assert_received {:mix_shell, :info, ["* ok (git://github.com/elixir-lang/ok.git)"]} - assert_received {:mix_shell, :info, [" lock outdated: the lock is outdated compared to the options in your mixfile"]} + assert_received {:mix_shell, :info, ["* ok (https://github.com/elixir-lang/ok.git) (mix)"]} + assert_received {:mix_shell, :info, [" lock outdated: the lock is outdated compared to the options in your mixfile. To fetch locked version run \"mix deps.get\""]} + end + end + + test "cleans and recompiles artifacts if --force given" do + Mix.Project.push SuccessfulDepsApp + + in_fixture "deps_status", fn -> + Mix.Tasks.Deps.Compile.run [] + File.touch! "_build/dev/lib/ok/clean-me" + + Mix.Tasks.Deps.Compile.run ["--force"] + refute File.exists? "_build/dev/lib/ok/clean-me" end end + ## deps.loadpaths + test "checks list of dependencies and their status with success" do Mix.Project.push SuccessfulDepsApp in_fixture "deps_status", fn -> - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] end end @@ -132,19 +148,19 @@ defmodule Mix.Tasks.DepsTest do in_fixture "deps_status", fn -> assert_raise Mix.Error, fn -> - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] end - assert_received {:mix_shell, :error, ["* ok (git://github.com/elixir-lang/ok.git)"]} - assert_received {:mix_shell, :error, [" the dependency is not available, run `mix deps.get`"]} + assert_received {:mix_shell, :error, ["* ok (https://github.com/elixir-lang/ok.git)"]} + assert_received {:mix_shell, :error, [" the dependency is not available, run \"mix deps.get\""]} assert_received {:mix_shell, :error, ["* invalidvsn (deps/invalidvsn)"]} assert_received {:mix_shell, :error, [" the app file contains an invalid version: :ok"]} assert_received {:mix_shell, :error, ["* invalidapp (deps/invalidapp)"]} - assert_received {:mix_shell, :error, [" the app file at _build/dev/lib/invalidapp/ebin/invalidapp.app is invalid"]} + assert_received {:mix_shell, :error, [" the app file at \"_build/dev/lib/invalidapp/ebin/invalidapp.app\" is invalid"]} # This one is compiled automatically refute_received {:mix_shell, :error, ["* noappfile (deps/noappfile)"]} - refute_received {:mix_shell, :error, [" could not find an app file at _build/dev/lib/noappfile/ebin/noappfile.app" <> _]} + refute_received {:mix_shell, :error, [" could not find an app file at \"_build/dev/lib/noappfile/ebin/noappfile.app\"" <> _]} end end @@ -156,26 +172,42 @@ defmodule Mix.Tasks.DepsTest do File.rm_rf("_build") Mix.Tasks.Deps.Compile.run [] - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] assert File.exists?("_build/dev/lib/ok/ebin/ok.app") assert File.exists?("_build/dev/lib/ok/priv/sample") Mix.Tasks.Compile.run [] + assert to_charlist(Path.expand("_build/dev/lib/ok/ebin/")) in :code.get_path + assert File.exists?("_build/dev/lib/sample/ebin/sample.app") + + # Remove the deps but set build_path, deps won't be pruned, but load paths are + Mix.ProjectStack.post_config [deps: [], build_path: "_build"] + Mix.ProjectStack.clear_cache + Mix.Project.pop + Mix.Project.push SuccessfulDepsApp + + Mix.Tasks.Deps.Loadpaths.run [] + refute to_charlist(Path.expand("_build/dev/lib/ok/ebin/")) in :code.get_path + assert File.exists?("_build/dev/lib/ok/ebin/ok.app") assert File.exists?("_build/dev/lib/sample/ebin/sample.app") + # Remove the deps without build_path, deps will be pruned Mix.ProjectStack.post_config [deps: []] + Mix.ProjectStack.clear_cache Mix.Project.pop Mix.Project.push SuccessfulDepsApp - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] refute File.exists?("_build/dev/lib/ok/ebin/ok.app") assert File.exists?("_build/dev/lib/sample/ebin/sample.app") end end - test "unlocks all deps" do + ## deps.unlock + + test "unlocks all deps", context do Mix.Project.push DepsApp - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> Mix.Dep.Lock.write %{git_repo: "abcdef"} assert Mix.Dep.Lock.read == %{git_repo: "abcdef"} Mix.Tasks.Deps.Unlock.run ["--all"] @@ -183,12 +215,46 @@ defmodule Mix.Tasks.DepsTest do end end - test "unlocks specific deps" do + test "unlocks unused deps", context do + Mix.Project.push DepsApp + in_tmp context.test, fn -> + Mix.Dep.Lock.write %{whatever: "abcdef", ok: "abcdef"} + assert Mix.Dep.Lock.read == %{whatever: "abcdef", ok: "abcdef"} + Mix.Tasks.Deps.Unlock.run ["--unused"] + assert Mix.Dep.Lock.read == %{ok: "abcdef"} + end + end + + test "unlocks specific deps", context do Mix.Project.push DepsApp - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> Mix.Dep.Lock.write %{git_repo: "abcdef", another: "hash"} Mix.Tasks.Deps.Unlock.run ["git_repo", "unknown"] assert Mix.Dep.Lock.read == %{another: "hash"} + error = "warning: unknown dependency is not locked" + assert_received {:mix_shell, :error, [^error]} + end + end + + test "unlocks filtered deps", context do + Mix.Project.push DepsApp + in_tmp context.test, fn -> + Mix.Dep.Lock.write %{git_repo: "abcdef", another: "hash", another_one: "hash"} + Mix.Tasks.Deps.Unlock.run ["--filter", "another"] + assert Mix.Dep.Lock.read == %{git_repo: "abcdef"} + output = """ + Unlocked deps: + * another + * another_one + """ + assert_received {:mix_shell, :info, [^output]} + end + end + + test "fails with message on missing dependencies" do + Mix.Project.push DepsApp + assert_raise Mix.Error, ~r/"mix deps\.unlock" expects dependencies as arguments/, fn -> + Mix.Tasks.Deps.Unlock.run [] end end @@ -238,6 +304,19 @@ defmodule Mix.Tasks.DepsTest do ## Nested dependencies + defmodule ConflictDepsApp do + def project do + [ + app: :raw_sample, + version: "0.1.0", + deps: [ + {:git_repo, "0.1.0", path: "custom/raw_repo"}, + {:bad_deps_repo, "0.1.0", path: "custom/bad_deps_repo"} + ] + ] + end + end + defmodule DivergedDepsApp do def project do [ @@ -264,7 +343,7 @@ defmodule Mix.Tasks.DepsTest do end end - defmodule OverridenDepsApp do + defmodule OverriddenDepsApp do def project do [ app: :raw_sample, @@ -277,7 +356,7 @@ defmodule Mix.Tasks.DepsTest do end end - defmodule NonOverridenDepsApp do + defmodule NonOverriddenDepsApp do def project do [ app: :raw_sample, @@ -295,26 +374,43 @@ defmodule Mix.Tasks.DepsTest do in_fixture "deps_status", fn -> assert_raise Mix.Error, ~r/Unknown dependency invalid for environment dev/, fn -> - Mix.Tasks.Deps.Get.run ["invalid"] + Mix.Tasks.Deps.Update.run ["invalid"] end end end - test "fails on diverged dependencies" do - Mix.Project.push DivergedDepsApp + test "fails on diverged dependencies on get/update" do + Mix.Project.push ConflictDepsApp in_fixture "deps_status", fn -> assert_raise Mix.Error, fn -> - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] end + assert_received {:mix_shell, :error, [" the dependency git_repo in mix.exs is overriding a child dependency" <> _]} - receive do - {:mix_shell, :error, [" different specs were given for the git_repo app:" <> _ = msg]} -> - assert msg =~ "In custom/deps_repo/mix.exs:" - assert msg =~ "{:git_repo, \"0.1.0\", [git: #{inspect fixture_path("git_repo")}]}" - after - 0 -> flunk "expected diverged error message" + assert_raise Mix.Error, fn -> + Mix.Tasks.Deps.Get.run [] + end + assert_received {:mix_shell, :error, [" the dependency git_repo in mix.exs is overriding a child dependency" <> _]} + + assert_raise Mix.Error, fn -> + Mix.Tasks.Deps.Update.run ["--all"] + end + assert_received {:mix_shell, :error, [" the dependency git_repo in mix.exs is overriding a child dependency" <> _]} + end + end + + test "fails on diverged dependencies on check" do + Mix.Project.push DivergedDepsApp + + in_fixture "deps_status", fn -> + assert_raise Mix.Error, fn -> + Mix.Tasks.Deps.Loadpaths.run [] end + + assert_received {:mix_shell, :error, [" different specs were given for the git_repo app:" <> _ = msg]} + assert msg =~ "In custom/deps_repo/mix.exs:" + assert msg =~ "{:git_repo, \"0.1.0\", [env: :prod, git: #{inspect fixture_path("git_repo")}]}" end end @@ -340,16 +436,12 @@ defmodule Mix.Tasks.DepsTest do assert_raise Mix.Error, fn -> Mix.Tasks.Deps.Get.run [] - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] end - receive do - {:mix_shell, :error, [" the dependency git_repo defined" <> _ = msg]} -> - assert msg =~ "In custom/deps_repo/mix.exs:" - assert msg =~ "{:git_repo, \"0.2.0\", [git: #{inspect fixture_path("git_repo")}]}" - after - 0 -> flunk "expected diverged req error message" - end + assert_received {:mix_shell, :error, [" the dependency git_repo 0.1.0" <> _ = msg]} + assert msg =~ "In custom/deps_repo/mix.exs:" + assert msg =~ "{:git_repo, \"0.2.0\", [env: :prod, git: #{inspect fixture_path("git_repo")}]}" end end @@ -375,7 +467,7 @@ defmodule Mix.Tasks.DepsTest do assert_raise Mix.Error, fn -> Mix.Tasks.Deps.Get.run [] - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] end assert_received {:mix_shell, :error, [" the dependency git_repo in mix.exs is overriding" <> _]} @@ -401,11 +493,24 @@ defmodule Mix.Tasks.DepsTest do assert_received {:mix_shell, :info, [^message]} end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] + end + + test "does not check dependencies if --no-deps-check is provided" do + Mix.Project.push SuccessfulDepsApp + + in_fixture "deps_status", fn -> + Mix.Tasks.Deps.Get.run [] + File.rm_rf!("deps/ok") + assert_raise Mix.Error, fn -> + Mix.Tasks.Compile.run [] + end + Mix.Tasks.Compile.run ["--no-deps-check"] + end end test "works with overridden dependencies" do - Mix.Project.push OverridenDepsApp + Mix.Project.push OverriddenDepsApp in_fixture "deps_status", fn -> Mix.Tasks.Deps.Get.run [] @@ -423,47 +528,73 @@ defmodule Mix.Tasks.DepsTest do assert_received {:mix_shell, :info, [^message]} end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] end test "converged dependencies errors if not overriding" do - Mix.Project.push NonOverridenDepsApp + Mix.Project.push NonOverriddenDepsApp in_fixture "deps_status", fn -> assert_raise Mix.Error, fn -> - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] end receive do {:mix_shell, :error, [" the dependency git_repo in mix.exs" <> _ = msg]} -> assert msg =~ "In mix.exs:" - assert msg =~ "{:git_repo, \"0.1.0\", [git: #{inspect fixture_path("git_repo")}]}" + assert msg =~ "{:git_repo, \"0.1.0\", [env: :prod, git: #{inspect fixture_path("git_repo")}]}" after 0 -> flunk "expected overriding error message" end end after - purge [GitRepo, GitRepo.Mix] + purge [GitRepo, GitRepo.Mixfile] + end + + test "checks if dependencies are using old Elixir version" do + Mix.Project.push SuccessfulDepsApp + + in_fixture "deps_status", fn -> + Mix.Tasks.Deps.Compile.run [] + Mix.Tasks.Deps.Loadpaths.run [] + + File.mkdir_p!("_build/dev/lib/ok/ebin") + manifest_data = :erlang.term_to_binary({:v1, "the_future", :scm}) + File.write!("_build/dev/lib/ok/.compile.elixir_scm", manifest_data) + Mix.Task.clear + + msg = " the dependency was built with an out-of-date Elixir version, run \"mix deps.compile\"" + + Mix.Tasks.Deps.run [] + assert_received {:mix_shell, :info, [^msg]} + + # deps.loadpaths will automatically recompile it + Mix.Tasks.Deps.Loadpaths.run [] + + Mix.Tasks.Deps.run [] + refute_received {:mix_shell, :info, [^msg]} + end end - test "checks if dependencies are using old elixir version" do + test "checks if dependencies are using old scm version" do Mix.Project.push SuccessfulDepsApp in_fixture "deps_status", fn -> Mix.Tasks.Deps.Compile.run [] - Mix.Tasks.Deps.Check.run [] + Mix.Tasks.Deps.Loadpaths.run [] File.mkdir_p!("_build/dev/lib/ok/ebin") - File.write!("_build/dev/lib/ok/.compile.lock", "the_future") + manifest_data = :erlang.term_to_binary({:v2, {System.version, :erlang.system_info(:otp_release)}, :scm}) + File.write!("_build/dev/lib/ok/.compile.elixir_scm", manifest_data) Mix.Task.clear - msg = " the dependency is built with an out-of-date elixir version, run `mix deps.compile`" + msg = " the dependency was built with another SCM, run \"mix deps.compile\"" Mix.Tasks.Deps.run [] assert_received {:mix_shell, :info, [^msg]} - # deps.check will automatically recompiled it - Mix.Tasks.Deps.Check.run [] + # deps.loadpaths will automatically recompile it + Mix.Tasks.Deps.Loadpaths.run [] Mix.Tasks.Deps.run [] refute_received {:mix_shell, :info, [^msg]} @@ -476,7 +607,6 @@ defmodule Mix.Tasks.DepsTest do app: :raw_sample, version: "0.1.0", deps: [ - {:deps_repo, "0.1.0", path: "custom/deps_repo", compile: false}, {:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo"), compile: false} ] ] @@ -488,7 +618,6 @@ defmodule Mix.Tasks.DepsTest do in_fixture "deps_status", fn -> Mix.Tasks.Deps.Compile.run [] - refute_received {:mix_shell, :info, ["==> deps_repo"]} refute_received {:mix_shell, :info, ["==> git_repo"]} end end @@ -507,63 +636,126 @@ defmodule Mix.Tasks.DepsTest do end end - test "converges duplicated deps at the same level" do + test "warns and converges duplicated deps at the same level" do Mix.Project.push DupDeps in_fixture "deps_status", fn -> Mix.Tasks.Deps.run [] - msg = "* ok 0.1.0 (deps/ok)" + assert_received {:mix_shell, :error, + ["warning: the dependency :ok is duplicated at the top level, " <> + "please remove one of them"]} + + msg = "* ok 0.1.0 (deps/ok) (mix)" assert_received {:mix_shell, :info, [^msg]} refute_received {:mix_shell, :info, [^msg]} end end + ## deps.clean + defmodule CleanDepsApp do def project do [ app: :raw_sample, version: "0.1.0", deps: [ - {:git_repo, ">= 0.1.0", git: MixTest.Case.fixture_path("git_repo")} + {:git_repo, ">= 0.1.0", git: MixTest.Case.fixture_path("git_repo")}, + {:ok, ">= 2.0.0", path: "deps/ok"} ] ] end end - test "clean all deps" do + test "cleans dependencies" do Mix.Project.push CleanDepsApp in_fixture "deps_status", fn -> - File.mkdir_p!("deps/git_repo") + File.mkdir_p!("_build/dev/lib/raw_sample") File.mkdir_p!("_build/dev/lib/git_repo") + File.mkdir_p!("_build/test/lib/git_repo") + File.mkdir_p!("_build/dev/lib/ok") + File.mkdir_p!("_build/test/lib/ok") - message = "mix deps.clean expects dependencies as arguments or " <> - "the --all option to clean all dependencies" + message = "\"mix deps.clean\" expects dependencies as arguments or " <> + "a flag indicating which dependencies to clean. " <> + "The --all option will clean all dependencies while " <> + "the --unused option cleans unused dependencies" assert_raise Mix.Error, message, fn -> Mix.Tasks.Deps.Clean.run [] end + Mix.Tasks.Deps.Clean.run ["--only", "dev", "--all"] + refute File.exists?("_build/dev/lib/git_repo") + refute File.exists?("_build/dev/lib/ok") + assert File.exists?("_build/test/lib/git_repo") + assert File.exists?("_build/dev/lib/raw_sample") + Mix.Tasks.Deps.Clean.run ["--all"] - refute File.exists?("deps/git_repo") + refute File.exists?("_build/dev/lib/git_repo") + refute File.exists?("_build/test/lib/git_repo") + assert File.exists?("_build/dev/lib/raw_sample") end end - test "cleans dependencies" do + test "cleans unused dependencies" do Mix.Project.push CleanDepsApp in_fixture "deps_status", fn -> + File.mkdir_p!("_build/dev/lib/raw_sample") + File.mkdir_p!("deps/git_repo") File.mkdir_p!("_build/dev/lib/git_repo") - File.mkdir_p!("_build/test/lib/git_repo") + File.mkdir_p!("deps/git_repo_unused") + File.mkdir_p!("_build/dev/lib/git_repo_unused") - Mix.Tasks.Deps.Clean.run ["--only", "dev", "--all"] - refute File.exists?("_build/dev/lib/git_repo") - assert File.exists?("_build/test/lib/git_repo") + Mix.Tasks.Deps.Clean.run ["--unused"] + assert File.exists?("deps/git_repo") + assert File.exists?("_build/dev/lib/git_repo") + refute File.exists?("deps/git_repo_unused") + refute File.exists?("_build/dev/lib/git_repo_unused") + assert File.exists?("_build/dev/lib/raw_sample") + end + end - Mix.Tasks.Deps.Clean.run ["--all"] - refute File.exists?("_build/dev/lib/git_repo") - refute File.exists?("_build/test/lib/git_repo") + test "cleans dependencies build" do + Mix.Project.push CleanDepsApp + + in_fixture "deps_status", fn -> + File.mkdir_p!("deps/raw_sample") + File.mkdir_p!("_build/dev/lib/raw_sample") + + Mix.Tasks.Deps.Clean.run ["raw_sample", "--build"] + assert File.exists?("deps/raw_sample") + refute File.exists?("_build/dev/lib/raw_sample") + end + end + + test "warns on invalid path on clean dependencies" do + Mix.Project.push CleanDepsApp + + in_fixture "deps_status", fn -> + File.mkdir_p!("deps/raw_sample") + File.mkdir_p!("_build/dev/lib/raw_sample") + + Mix.Tasks.Deps.Clean.run ["raw_sample_with_a_typo"] + assert File.exists?("deps/raw_sample") + + msg = "warning: the dependency raw_sample_with_a_typo is not present in the build directory" + assert_received {:mix_shell, :error, [^msg]} + end + end + + test "does not remove dependency source when using :path" do + Mix.Project.push CleanDepsApp + + in_fixture "deps_status", fn -> + assert File.exists?("deps/ok") + + Mix.Tasks.Deps.Clean.run ["raw_sample", "--all"] + refute File.exists?("_build/dev/lib/ok") + refute File.exists?("_build/test/lib/ok") + assert File.exists?("deps/ok") end end end diff --git a/lib/mix/test/mix/tasks/do_test.exs b/lib/mix/test/mix/tasks/do_test.exs index 0c7ef442688..2ad14232451 100644 --- a/lib/mix/test/mix/tasks/do_test.exs +++ b/lib/mix/test/mix/tasks/do_test.exs @@ -3,11 +3,20 @@ Code.require_file "../../test_helper.exs", __DIR__ defmodule Mix.Tasks.DoTest do use MixTest.Case - test "runs given tasks" do - in_fixture "no_mixfile", fn -> + test "runs given tasks", context do + in_tmp context.test, fn -> Mix.Tasks.Do.run ["compile", "--list,", "help"] assert_received {:mix_shell, :info, ["mix help" <> _]} assert_received {:mix_shell, :info, ["mix compile.app" <> _]} end end -end \ No newline at end of file + + test "gather_command returns a list of commands" do + import Mix.Tasks.Do, only: [gather_commands: 1] + assert gather_commands(["compile", "--list,", "help"]) == [["compile", "--list"], ["help"]] + assert gather_commands(["help,", "compile", "--list"]) == [["help"], ["compile", "--list"]] + assert gather_commands(["compile,", "run", "-e", "IO.puts :hello"]) == [["compile"], ["run", "-e", "IO.puts :hello"]] + assert gather_commands(["compile,", "run", "-e", "[1, 2]"]) == [["compile"], ["run", "-e", "[1, 2]"]] + assert gather_commands(["test", ",", "help"]) == [["test"], ["help"]] + end +end diff --git a/lib/mix/test/mix/tasks/escript_test.exs b/lib/mix/test/mix/tasks/escript_test.exs new file mode 100644 index 00000000000..a52fa9e6f2e --- /dev/null +++ b/lib/mix/test/mix/tasks/escript_test.exs @@ -0,0 +1,299 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.EscriptTest do + use MixTest.Case + + defmodule Escript do + def project do + [app: :escript_test, + version: "0.0.1", + escript: [ + main_module: EscriptTest, + name: "escript_test", + embed_elixir: true]] + end + end + + defmodule EscriptWithDebugInfo do + def project do + [app: :escript_test_with_debug_info, + version: "0.0.1", + escript: [ + main_module: EscriptTest, + strip_beam: false]] + end + end + + defmodule EscriptWithPath do + def project do + [app: :escript_test_with_path, + version: "0.0.1", + escript: [ + app: nil, + embed_elixir: true, + main_module: EscriptTest, + path: Path.join("ebin", "escript_test_with_path")]] + end + end + + defmodule EscriptWithDeps do + def project do + [app: :escript_test_with_deps, + version: "0.0.1", + escript: [main_module: EscriptTest], + deps: [{:ok, path: fixture_path("deps_status/deps/ok")}]] + end + end + + defmodule EscriptErlangWithDeps do + def project do + [app: :escript_test_erlang_with_deps, + version: "0.0.1", + language: :erlang, + escript: [main_module: :escript_test], + deps: [{:ok, path: fixture_path("deps_status/deps/ok")}]] + end + + def application do + [applications: [], extra_applications: [:crypto]] + end + end + + defmodule EscriptWithUnknownMainModule do + def project do + [app: :escript_test_with_unknown_main_module, + version: "0.0.1", + escript: [ + main_module: BogusEscriptTest]] + end + end + + defmodule EscriptConsolidated do + def project do + [app: :escript_test_consolidated, + build_embedded: true, + version: "0.0.1", + escript: [main_module: EscriptTest]] + end + end + + test "generate escript" do + Mix.Project.push Escript + + in_fixture "escript_test", fn -> + Mix.Tasks.Escript.Build.run [] + assert_received {:mix_shell, :info, ["Generated escript escript_test with MIX_ENV=dev"]} + assert System.cmd("escript", ["escript_test"]) == {"TEST\n", 0} + assert count_abstract_code("escript_test") == 0 + + Mix.Tasks.Escript.Build.run [] + refute_received {:mix_shell, :info, ["Generated escript escript_test with MIX_ENV=dev"]} + end + end + + test "generate escript with config" do + Mix.Project.push Escript + + in_fixture "escript_test", fn -> + File.mkdir_p! "config" + File.write! "config/config.exs", """ + [foobar: [value: "FROM CONFIG", other: %{}]] + """ + Mix.Tasks.Escript.Build.run [] + assert_received {:mix_shell, :info, ["Generated escript escript_test with MIX_ENV=dev"]} + assert System.cmd("escript", ["escript_test"]) == {"FROM CONFIG\n", 0} + assert count_abstract_code("escript_test") == 0 + end + end + + test "generate escript with debug information" do + Mix.Project.push EscriptWithDebugInfo + + in_fixture "escript_test", fn -> + Mix.Tasks.Escript.Build.run [] + assert_received {:mix_shell, :info, ["Generated escript escript_test_with_debug_info with MIX_ENV=dev"]} + assert System.cmd("escript", ["escript_test_with_debug_info"]) == {"TEST\n", 0} + assert count_abstract_code("escript_test_with_debug_info") > 0 + + Mix.Tasks.Escript.Build.run [] + refute_received {:mix_shell, :info, ["Generated escript escript_test_with_debug_info with MIX_ENV=dev"]} + end + end + + defp count_abstract_code(escript_filename) do + escript_filename + |> read_beams() + |> Enum.count(fn {_, beam} -> get_abstract_code(beam) end) + end + + defp read_beams(escript_filename) do + # :zip.unzip/2 cannot unzip an escript unless we remove the escript header + zip_data = remove_escript_header(File.read!(escript_filename)) + {:ok, tuples} = :zip.unzip(zip_data, [:memory]) + for {filename, beam} <- tuples, Path.extname(filename) == ".beam" do + {filename, beam} + end + end + + defp remove_escript_header(escript_data) do + {offset, _length} = :binary.match(escript_data, "\nPK") + zip_start = offset + 1 + binary_part(escript_data, zip_start, byte_size(escript_data) - zip_start) + end + + defp get_abstract_code(beam) do + case :beam_lib.chunks(beam, [:abstract_code]) do + {:ok, {_, [{:abstract_code, {_, abstract_code}}]}} -> abstract_code + _ -> nil + end + end + + test "generate escript with path" do + Mix.Project.push EscriptWithPath + + in_fixture "escript_test", fn -> + Mix.Tasks.Escript.Build.run [] + assert_received {:mix_shell, :info, ["Generated escript ebin/escript_test_with_path with MIX_ENV=dev"]} + assert System.cmd("escript", ["ebin/escript_test_with_path"]) == {"TEST\n", 0} + end + end + + test "generate escript with deps" do + Mix.Project.push EscriptWithDeps + + in_fixture "escript_test", fn -> + Mix.Tasks.Escript.Build.run [] + assert_received {:mix_shell, :info, ["Generated escript escript_test_with_deps with MIX_ENV=dev"]} + assert System.cmd("escript", ["escript_test_with_deps"]) == {"TEST\n", 0} + end + after + purge [Ok.Mixfile] + end + + test "generate escript with Erlang and deps" do + Mix.Project.push EscriptErlangWithDeps + + in_fixture "escript_test", fn -> + Mix.Tasks.Escript.Build.run [] + assert_received {:mix_shell, :info, ["Generated escript escript_test_erlang_with_deps with MIX_ENV=dev"]} + assert System.cmd("escript", ["escript_test_erlang_with_deps"]) == {"Erlang value", 0} + end + after + purge [Ok.Mixfile] + end + + test "generating escript for umbrella projects fails with a nice error" do + message = "Building escripts for umbrella projects is unsupported" + + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + assert_raise Mix.Error, message, fn -> + Mix.Tasks.Escript.Build.run [] + end + end) + end + end + + test "generate escript with consolidated protocols" do + Mix.Project.push EscriptConsolidated + + in_fixture "escript_test", fn -> + Mix.Tasks.Escript.Build.run [] + assert_received {:mix_shell, :info, ["Generated escript escript_test_consolidated with MIX_ENV=dev"]} + assert System.cmd("escript", ["escript_test_consolidated", "Enumerable"]) == {"true\n", 0} + end + end + + test "escript install and uninstall" do + File.rm_rf! tmp_path(".mix/escripts") + Mix.Project.push Escript + + in_fixture "escript_test", fn -> + # build the escript + Mix.Tasks.Escript.Build.run [] + assert_received {:mix_shell, :info, ["Generated escript escript_test with MIX_ENV=dev"]} + + # check that no escripts are installed + Mix.Tasks.Escript.run [] + assert_received {:mix_shell, :info, ["No escripts currently installed."]} + + # install our escript + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Escript.Install.run [] + + # check that it shows in the list + Mix.Tasks.Escript.run [] + assert_received {:mix_shell, :info, ["* escript_test"]} + refute_received {:mix_shell, :info, ["* escript_test.bat"]} + + # check uninstall confirmation + send self(), {:mix_shell_input, :yes?, false} + Mix.Tasks.Escript.Uninstall.run ["escript_test"] + assert File.regular? tmp_path(".mix/escripts/escript_test") + + # uninstall the escript + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Escript.Uninstall.run ["escript_test"] + refute File.regular? tmp_path(".mix/escripts/escript_test") + refute File.regular? tmp_path(".mix/escripts/escript_test.bat") + + # check that no escripts remain + Mix.Tasks.Escript.run [] + assert_received {:mix_shell, :info, ["No escripts currently installed."]} + end + end + + test "escript invalid install" do + # Install our escript + send self(), {:mix_shell_input, :yes?, true} + assert_raise Mix.Error, + "The given path does not point to an escript, installation aborted", fn -> + Mix.Tasks.Escript.Install.run [__ENV__.file] + end + end + + test "escript invalid main module" do + Mix.Project.push EscriptWithUnknownMainModule + + in_fixture "escript_test", fn -> + assert_raise Mix.Error, "Could not generate escript, module Elixir.BogusEscriptTest defined as " <> + ":main_module could not be loaded", fn -> + Mix.Tasks.Escript.Build.run [] + end + end + end + + test "escript.install from Git" do + in_fixture "git_repo", fn -> + File.write! "lib/git_repo.ex", """ + defmodule GitRepo do + def main(_argv) do + IO.puts "TEST" + end + end + """ + + File.write! "mix.exs", """ + defmodule GitRepo.Mixfile do + use Mix.Project + + def project do + [app: :git_repo, version: "0.1.0", escript: [main_module: GitRepo]] + end + end + """ + + System.cmd("git", ~w[add .]) + System.cmd("git", ~w[commit -m "ok"]) + + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Escript.Install.run ["git", File.cwd!()] + assert_received {:mix_shell, :info, ["Generated escript git_repo with MIX_ENV=prod"]} + + escript_path = Path.join([tmp_path(".mix"), "escripts", "git_repo"]) + assert System.cmd("escript", [escript_path]) == {"TEST\n", 0} + end + after + purge [GitRepo, GitRepo.Mixfile] + end +end diff --git a/lib/mix/test/mix/tasks/escriptize_test.exs b/lib/mix/test/mix/tasks/escriptize_test.exs deleted file mode 100644 index a5839554132..00000000000 --- a/lib/mix/test/mix/tasks/escriptize_test.exs +++ /dev/null @@ -1,79 +0,0 @@ -Code.require_file "../../test_helper.exs", __DIR__ - -defmodule Mix.Tasks.EscriptizeTest do - use MixTest.Case - - defmodule Escript do - def project do - [ app: :escripttest, - version: "0.0.1", - escript: [ - main_module: Escripttest, - name: "escriptest", - embed_elixir: true - ] - ] - end - end - - defmodule EscriptWithPath do - def project do - [ app: :escripttestwithpath, - version: "0.0.1", - escript: [ - app: nil, - embed_elixir: true, - main_module: Escripttest, - path: Path.join("ebin", "escripttestwithpath") - ] - ] - end - end - - defmodule EscriptWithDeps do - def project do - [ app: :escripttestwithdeps, - version: "0.0.1", - escript: [ - main_module: Escripttest, - path: Path.join("ebin", "escripttestwithdeps"), - ], - deps: [{:ok, path: fixture_path("deps_status/deps/ok")}] ] - end - end - - test "generate simple escript" do - Mix.Project.push Escript - - in_fixture "escripttest", fn -> - Mix.Tasks.Escriptize.run [] - assert_received {:mix_shell, :info, ["Generated escript escriptest"]} - assert System.cmd("escript escriptest") == "TEST\n" - - Mix.Tasks.Escriptize.run [] - refute_received {:mix_shell, :info, ["Generated escript escriptest"]} - end - end - - test "generate simple escript with path" do - Mix.Project.push EscriptWithPath - - in_fixture "escripttest", fn -> - Mix.Tasks.Escriptize.run [] - assert_received {:mix_shell, :info, ["Generated escript ebin/escripttestwithpath"]} - assert System.cmd("escript ebin/escripttestwithpath") == "TEST\n" - end - end - - test "generate escript with deps" do - Mix.Project.push EscriptWithDeps - - in_fixture "escripttest", fn -> - Mix.Tasks.Escriptize.run [] - assert_received {:mix_shell, :info, ["Generated escript ebin/escripttestwithdeps"]} - assert System.cmd("escript ebin/escripttestwithdeps") == "TEST\n" - end - after - purge [Ok.Mixfile] - end -end diff --git a/lib/mix/test/mix/tasks/help_test.exs b/lib/mix/test/mix/tasks/help_test.exs index d11758029c6..9d7d2716b0f 100644 --- a/lib/mix/test/mix/tasks/help_test.exs +++ b/lib/mix/test/mix/tasks/help_test.exs @@ -5,8 +5,8 @@ defmodule Mix.Tasks.HelpTest do import ExUnit.CaptureIO - test "help lists all tasks" do - in_fixture "no_mixfile", fn -> + test "help lists all tasks", context do + in_tmp context.test, fn -> Mix.Tasks.Help.run [] assert_received {:mix_shell, :info, ["mix" <> _]} assert_received {:mix_shell, :info, ["mix help" <> _]} @@ -14,40 +14,84 @@ defmodule Mix.Tasks.HelpTest do end end - test "help list default task" do - in_fixture "no_mixfile", fn -> + test "help list default task", context do + in_tmp context.test, fn -> Mix.Tasks.Help.run [] {_, _, [output]} = assert_received {:mix_shell, :info, [_]} - assert output =~ ~r/^mix\s+# Run the default task \(current: mix run\)/m + assert output =~ ~r/^mix\s+# Runs the default task \(current: \"mix run\"\)/m end end - test "help --names" do - in_fixture "no_mixfile", fn -> + defmodule Aliases do + def project do + [aliases: [h: "hello", c: "compile"]] + end + end + + test "help --names", context do + Mix.Project.push Aliases + + in_tmp context.test, fn -> Mix.Tasks.Help.run ["--names"] + assert_received {:mix_shell, :info, ["c"]} assert_received {:mix_shell, :info, ["compile"]} + assert_received {:mix_shell, :info, ["h"]} assert_received {:mix_shell, :info, ["help"]} - assert_received {:mix_shell, :info, ["escriptize"]} + assert_received {:mix_shell, :info, ["escript.build"]} + refute_received {:mix_shell, :info, ["compile.all"]} end end - test "help TASK" do - in_fixture "no_mixfile", fn -> + test "help TASK", context do + in_tmp context.test, fn -> output = - capture_io fn -> + capture_io(fn -> Mix.Tasks.Help.run ["compile"] - end + end) - assert output =~ "# mix compile" + assert output =~ "# mix compile\n" assert output =~ "## Command line options" assert output =~ ~r/^Location:/m + + output = + capture_io(fn -> + Mix.Tasks.Help.run ["compile.all"] + end) + + assert output =~ "# mix compile.all\n" + assert output =~ "There is no documentation for this task" + end + end + + test "help --search PATTERN", context do + in_tmp context.test, fn -> + Mix.Tasks.Help.run ["--search", "deps"] + assert_received {:mix_shell, :info, ["mix deps" <> _]} + assert_received {:mix_shell, :info, ["mix deps.clean" <> _]} + end + end + + test "help --search without pattern" do + assert_raise Mix.Error, "Unexpected arguments, expected \"mix help --search PATTERN\"", fn -> + Mix.Tasks.Help.run ["--search"] + end + end + + test "help --search without results", context do + in_tmp context.test, fn -> + output = + capture_io fn -> + Mix.Tasks.Help.run ["--search", "foo"] + end + + assert output == "" end end test "bad arguments" do - assert_raise Mix.Error, "Unexpected arguments, expected `mix help` or `mix help TASK`", fn -> + assert_raise Mix.Error, "Unexpected arguments, expected \"mix help\" or \"mix help TASK\"", fn -> Mix.Tasks.Help.run ["foo", "bar"] end end diff --git a/lib/mix/test/mix/tasks/iex_test.exs b/lib/mix/test/mix/tasks/iex_test.exs index d63a6c612cf..8ba425b1bb7 100644 --- a/lib/mix/test/mix/tasks/iex_test.exs +++ b/lib/mix/test/mix/tasks/iex_test.exs @@ -3,9 +3,9 @@ Code.require_file "../../test_helper.exs", __DIR__ defmodule Mix.Tasks.IexTest do use MixTest.Case, async: true - test "raises error message about correct usage" do - in_fixture "no_mixfile", fn -> - msg = "To use IEx with Mix, please run: iex -S mix" + test "raises error message about correct usage", context do + in_tmp context.test, fn -> + msg = "To use IEx with Mix, please run \"iex -S mix\"" assert_raise Mix.Error, msg, fn -> Mix.Tasks.Iex.run [] end diff --git a/lib/mix/test/mix/tasks/loadconfig_test.exs b/lib/mix/test/mix/tasks/loadconfig_test.exs index 43186703e98..d6ff73a6539 100644 --- a/lib/mix/test/mix/tasks/loadconfig_test.exs +++ b/lib/mix/test/mix/tasks/loadconfig_test.exs @@ -3,23 +3,11 @@ Code.require_file "../../test_helper.exs", __DIR__ defmodule Mix.Tasks.LoadconfigTest do use MixTest.Case - @apps [:my_app, :other_app] - - setup do - on_exit fn -> - Enum.each @apps, fn app -> - Enum.each Application.get_all_env(app), fn {key, _} -> - Application.delete_env(app, key, persistent: true) - end - end - end - :ok - end - - test "reads and persists project configuration" do + @tag apps: [:my_app] + test "reads and persists project configuration", context do Mix.Project.push MixTest.Case.Sample - in_fixture "no_mixfile", fn -> + in_tmp context.test, fn -> write_config """ [my_app: [key: :project]] """ @@ -39,6 +27,26 @@ defmodule Mix.Tasks.LoadconfigTest do end end + @tag apps: [:config_app] + test "reads from custom config_path", context do + Mix.ProjectStack.post_config [config_path: "fresh.config"] + Mix.Project.push MixTest.Case.Sample + + in_tmp context.test, fn -> + write_config "fresh.config", """ + [config_app: [key: :value]] + """ + assert Application.fetch_env(:config_app, :key) == :error + Mix.Task.run "loadconfig", [] + assert Application.fetch_env(:config_app, :key) == {:ok, :value} + + File.rm "fresh.config" + assert_raise Mix.Config.LoadError, ~r"could not load config fresh\.config", fn -> + Mix.Task.run "loadconfig", [] + end + end + end + defp write_config(path \\ "config/config.exs", contents) do File.mkdir_p! Path.dirname(path) File.write! path, contents diff --git a/lib/mix/test/mix/tasks/local.public_keys_test.exs b/lib/mix/test/mix/tasks/local.public_keys_test.exs new file mode 100644 index 00000000000..703f8ff4dad --- /dev/null +++ b/lib/mix/test/mix/tasks/local.public_keys_test.exs @@ -0,0 +1,54 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.Local.PublicKeysTest do + use MixTest.Case + + @pub """ + -----BEGIN PUBLIC KEY----- + MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmlRL//AgfszL2vkk7G3K + t+vrI2d3aG+fGD4BZ1k1Qd/mS0EoDNfDQCpqtLzdM7oOjtu0BNCBvG3HC0tlAU92 + i3EFqEC7RSHNIGYrQ4g5WwGQMw8+Qp3D32esw7iRZKvtJu8IpS7l8x4KQq1QbfTh + s+3yFOQyytnY6of7w2Rv37WtdA3wKAIP6yPgvl8QzwxHRORX4zNcadsJOuZt3ncv + Bq7pyQN8g2ddF86Ycep/+cU4/onnGW3zHn5pGdvuiHqxGOq+s3+UzljLyLlTCwzz + ncYdESIw29EEIB5xTcT0Q4qRQEuDwcp8/fASTZ7c6lv8AfpRKb69tGWLoXcx/V9P + dQIDAQAB + -----END PUBLIC KEY----- + """ + + setup do + File.rm_rf! tmp_path(".mix/public_keys") + :ok + end + + test "lists public keys" do + Mix.Tasks.Local.PublicKeys.run [] + assert_received {:mix_shell, :info, ["* in-memory public key for Elixir" <> _]} + + Mix.Tasks.Local.PublicKeys.run ["--detailed"] + assert_received {:mix_shell, :info, ["\n-----BEGIN PUBLIC KEY-----\n" <> _]} + end + + test "installs public keys" do + path = tmp_path("sample.pub") + File.write!(path, @pub) + + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Local.PublicKeys.run [path] + assert_received {:mix_shell, :yes?, ["Are you sure you want to install public key" <> _]} + assert_received {:mix_shell, :info, ["* creating " <> _]} + + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Local.PublicKeys.run [path] + assert_received {:mix_shell, :yes?, ["There is already a public key named sample.pub." <> _]} + assert_received {:mix_shell, :info, ["* creating " <> _]} + end + + test "raises on bad public keys on install" do + assert_raise Mix.Error, ~r(Could not decode public key:), fn -> + path = tmp_path("bad.pub") + File.write!(path, "oops") + send self(), {:mix_shell_input, :yes?, true} + Mix.Tasks.Local.PublicKeys.run [path] + end + end +end diff --git a/lib/mix/test/mix/tasks/local_test.exs b/lib/mix/test/mix/tasks/local_test.exs index b10d2c4c050..29701ad92a5 100644 --- a/lib/mix/test/mix/tasks/local_test.exs +++ b/lib/mix/test/mix/tasks/local_test.exs @@ -3,69 +3,6 @@ Code.require_file "../../test_helper.exs", __DIR__ defmodule Mix.Tasks.LocalTest do use MixTest.Case - defmodule ArchiveProject do - def project do - [ app: :archive, version: "0.1.0" ] - end - end - - defmodule ArchiveProject2 do - def project do - [ app: :archive, version: "0.2.0" ] - end - end - - test "archive" do - File.rm_rf! tmp_path("userhome") - System.put_env "MIX_HOME", tmp_path("userhome/.mix") - Mix.Project.push(ArchiveProject) - - in_fixture "archive", fn() -> - # Install it! - Mix.Tasks.Archive.run [] - assert File.regular? "archive-0.1.0.ez" - - send self, {:mix_shell_input, :yes?, true} - Mix.Tasks.Local.Install.run [] - assert File.regular? tmp_path("userhome/.mix/archives/archive-0.1.0.ez") - - archive = tmp_path("userhome/.mix/archives/archive-0.1.0.ez/archive-0.1.0/ebin") - assert to_char_list(archive) in :code.get_path - - # List it! - Mix.Local.append_archives - Mix.Tasks.Local.run [] - assert_received {:mix_shell, :info, ["mix local.sample # A local install sample"]} - - # Run it! - Mix.Task.run "local.sample" - assert_received {:mix_shell, :info, ["sample"]} - - # Install new version! - Mix.Project.push(ArchiveProject2) - Mix.Tasks.Archive.run ["--no_compile"] - assert File.regular? "archive-0.2.0.ez" - - send self, {:mix_shell_input, :yes?, true} - Mix.Tasks.Local.Install.run [] - assert File.regular? tmp_path("userhome/.mix/archives/archive-0.2.0.ez") - - # We don't do the assertion below on Windows because - # the archive is open by Erlang code server and the archive - # is not effectively removed until the Erlang process exits. - unless match? {:win32, _}, :os.type do - refute File.regular? tmp_path("userhome/.mix/archives/archive-0.1.0.ez") - end - - Mix.Local.append_archives - - # Remove it! - send self, {:mix_shell_input, :yes?, true} - Mix.Tasks.Local.Uninstall.run ["archive"] - refute File.regular? tmp_path("userhome/.mix/archives/archive-0.2.0.ez") - end - end - test "MIX_PATH" do File.rm_rf! tmp_path("mixpath") System.put_env "MIX_PATH", tmp_path("mixpath/ebin") diff --git a/lib/mix/test/mix/tasks/new_test.exs b/lib/mix/test/mix/tasks/new_test.exs index b439dac7800..c8b065304e5 100644 --- a/lib/mix/test/mix/tasks/new_test.exs +++ b/lib/mix/test/mix/tasks/new_test.exs @@ -5,20 +5,23 @@ defmodule Mix.Tasks.NewTest do test "new" do in_tmp "new", fn -> - Mix.Tasks.New.run ["hello_world", "--bare"] + Mix.Tasks.New.run ["hello_world"] assert_file "hello_world/mix.exs", fn(file) -> assert file =~ "app: :hello_world" - assert file =~ "version: \"0.0.1\"" + assert file =~ "version: \"0.1.0\"" end - assert_file "hello_world/README.md", ~r/# HelloWorld/ + assert_file "hello_world/README.md", ~r/# HelloWorld\n/ assert_file "hello_world/.gitignore" assert_file "hello_world/lib/hello_world.ex", ~r/defmodule HelloWorld do/ - assert_file "hello_world/test/test_helper.exs", ~r/HelloWorld.start/ - assert_file "hello_world/test/hello_world_test.exs", ~r/defmodule HelloWorldTest do/ + assert_file "hello_world/test/test_helper.exs", ~r/ExUnit.start()/ + assert_file "hello_world/test/hello_world_test.exs", fn(file) -> + assert file =~ ~r/defmodule HelloWorldTest do/ + assert file =~ "assert HelloWorld.hello() == :world" + end assert_received {:mix_shell, :info, ["* creating mix.exs"]} assert_received {:mix_shell, :info, ["* creating lib/hello_world.ex"]} @@ -31,20 +34,25 @@ defmodule Mix.Tasks.NewTest do assert_file "hello_world/mix.exs", fn(file) -> assert file =~ "app: :hello_world" - assert file =~ "version: \"0.0.1\"" - assert file =~ "mod: {HelloWorld, []}" + assert file =~ "version: \"0.1.0\"" + assert file =~ "mod: {HelloWorld.Application, []}" end - assert_file "hello_world/README.md", ~r/# HelloWorld/ + assert_file "hello_world/README.md", ~r/# HelloWorld\n/ assert_file "hello_world/.gitignore" assert_file "hello_world/lib/hello_world.ex", fn(file) -> assert file =~ "defmodule HelloWorld do" + assert file =~ "def hello do" + end + + assert_file "hello_world/lib/hello_world/application.ex", fn(file) -> + assert file =~ "defmodule HelloWorld.Application do" assert file =~ "use Application" assert file =~ "Supervisor.start_link(children, opts)" end - assert_file "hello_world/test/test_helper.exs", ~r/HelloWorld.start/ + assert_file "hello_world/test/test_helper.exs", ~r/ExUnit.start()/ assert_file "hello_world/test/hello_world_test.exs", ~r/defmodule HelloWorldTest do/ assert_received {:mix_shell, :info, ["* creating mix.exs"]} @@ -52,6 +60,28 @@ defmodule Mix.Tasks.NewTest do end end + test "new with --app" do + in_tmp "new app", fn -> + Mix.Tasks.New.run ["HELLO_WORLD", "--app", "hello_world"] + + assert_file "HELLO_WORLD/mix.exs", fn(file) -> + assert file =~ "app: :hello_world" + assert file =~ "version: \"0.1.0\"" + end + + assert_file "HELLO_WORLD/README.md", ~r/# HelloWorld\n/ + assert_file "HELLO_WORLD/.gitignore" + + assert_file "HELLO_WORLD/lib/hello_world.ex", ~r/defmodule HelloWorld do/ + + assert_file "HELLO_WORLD/test/test_helper.exs", ~r/ExUnit.start()/ + assert_file "HELLO_WORLD/test/hello_world_test.exs", ~r/defmodule HelloWorldTest do/ + + assert_received {:mix_shell, :info, ["* creating mix.exs"]} + assert_received {:mix_shell, :info, ["* creating lib/hello_world.ex"]} + end + end + test "new with --umbrella" do in_tmp "new umbrella", fn -> Mix.Tasks.New.run ["hello_world", "--umbrella"] @@ -60,7 +90,7 @@ defmodule Mix.Tasks.NewTest do assert file =~ "apps_path: \"apps\"" end - assert_file "hello_world/README.md", ~r/# HelloWorld/ + assert_file "hello_world/README.md", ~r/# HelloWorld\n/ assert_file "hello_world/.gitignore" assert_received {:mix_shell, :info, ["* creating mix.exs"]} @@ -88,17 +118,63 @@ defmodule Mix.Tasks.NewTest do end test "new with invalid args" do - in_tmp "new with invalid args", fn -> - assert_raise Mix.Error, "Project path must start with a letter and have only lowercase letters, numbers and underscore", fn -> + in_tmp "new with an invalid application name", fn -> + assert_raise Mix.Error, ~r"Application name must start with a letter and ", fn -> Mix.Tasks.New.run ["007invalid"] end - assert_raise Mix.Error, "Expected PATH to be given, please use `mix new PATH`", fn -> + assert_raise Mix.Error, ~r"only lowercase letters, numbers and underscore", fn -> + Mix.Tasks.New.run ["invAlid"] + end + end + + in_tmp "new with an invalid application name from the app option", fn -> + assert_raise Mix.Error, ~r"Application name must start with a letter and ", fn -> + Mix.Tasks.New.run ["valid", "--app", "007invalid"] + end + end + + in_tmp "new with an invalid module name from the module options", fn -> + assert_raise Mix.Error, ~r"Module name must be a valid Elixir alias", fn -> + Mix.Tasks.New.run ["valid", "--module", "not.valid"] + end + end + + in_tmp "new with an already taken application name", fn -> + assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn -> + Mix.Tasks.New.run ["mix"] + end + end + + in_tmp "new with an already taken application name from the app option", fn -> + assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn -> + Mix.Tasks.New.run ["valid", "--app", "mix"] + end + end + + in_tmp "new with an already taken module name from the module options", fn -> + assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn -> + Mix.Tasks.New.run ["valid", "--module", "Mix"] + end + end + + in_tmp "new without a specified path", fn -> + assert_raise Mix.Error, "Expected PATH to be given, please use \"mix new PATH\"", fn -> Mix.Tasks.New.run [] end end end + test "new with existent directory" do + in_tmp "new_with_existent_directory", fn -> + File.mkdir_p!("my_app") + send self(), {:mix_shell_input, :yes?, false} + assert_raise Mix.Error, "Please select another directory for installation", fn -> + Mix.Tasks.New.run ["my_app"] + end + end + end + defp assert_file(file) do assert File.regular?(file), "Expected #{file} to exist, but does not" end @@ -106,7 +182,7 @@ defmodule Mix.Tasks.NewTest do defp assert_file(file, match) do cond do Regex.regex?(match) -> - assert_file file, &(&1 =~ match) + assert_file file, &(assert &1 =~ match) is_function(match, 1) -> assert_file(file) match.(File.read!(file)) diff --git a/lib/mix/test/mix/tasks/profile.cprof_test.exs b/lib/mix/test/mix/tasks/profile.cprof_test.exs new file mode 100644 index 00000000000..ca22e17da07 --- /dev/null +++ b/lib/mix/test/mix/tasks/profile.cprof_test.exs @@ -0,0 +1,109 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.Profile.CprofTest do + use MixTest.Case + + import ExUnit.CaptureIO + + alias Mix.Tasks.Profile.Cprof + + @moduletag apps: [:sample] + @expr "Enum.each(1..5, &String.Chars.Integer.to_string/1)" + + setup do + Mix.Project.push MixTest.Case.Sample + end + + test "profiles evaluated expression", context do + in_tmp context.test, fn -> + assert capture_io(fn -> + Cprof.run(["-e", @expr]) + end) =~ ~r(String\.Chars\.Integer\.to_string\/1 *\d) + end + end + + test "profiles the script", context do + in_tmp context.test, fn -> + profile_script_name = "profile_script.ex" + + File.write! profile_script_name, @expr + + assert capture_io(fn -> + Cprof.run([profile_script_name]) + end) =~ ~r(String\.Chars\.Integer\.to_string\/1 *\d) + end + end + + test "filters based on limit", context do + in_tmp context.test, fn -> + refute capture_io(fn -> + Cprof.run(["--limit", "5", "-e", @expr]) + end) =~ ~r(:erlang\.trace_pattern\/3 *\d) + end + end + + test "filters based on module", context do + in_tmp context.test, fn -> + refute capture_io(fn -> + Cprof.run(["--module", "Enum", "-e", @expr]) + end) =~ ~r(String\.Chars\.Integer\.to_string\/1 *\d) + end + end + + test "Module matching", context do + in_tmp context.test, fn -> + refute capture_io(fn -> + Cprof.run(["--matching", "Enum", "-e", @expr]) + end) =~ ~r(String\.Chars\.Integer\.to_string\/1 *\d) + end + end + + test "Module.function matching", context do + in_tmp context.test, fn -> + refute capture_io(fn -> + Cprof.run(["--matching", "Enum.each", "-e", @expr]) + end) =~ ~r(anonymous fn\/3 in Enum\.each\/2 *\d) + end + end + + test "Module.function/arity matching", context do + in_tmp context.test, fn -> + assert capture_io(fn -> + Cprof.run(["--matching", "Enum.each/8", "-e", @expr]) + end) =~ ~r(Profile done over 0 matching functions) + end + end + + test "errors on missing files", context do + in_tmp context.test, fn -> + assert_raise Mix.Error, "No files matched pattern \"non-existent\" given to --require", fn -> + capture_io(fn -> Cprof.run ["-r", "non-existent"] end) + end + + assert_raise Mix.Error, "No files matched pattern \"non-existent\" given to --require", fn -> + capture_io(fn -> Cprof.run ["-pr", "non-existent"] end) + end + + assert_raise Mix.Error, "No such file: non-existent", fn -> + capture_io(fn -> Cprof.run ["non-existent"] end) + end + + File.mkdir_p!("lib") + assert_raise Mix.Error, "No such file: lib", fn -> + capture_io(fn -> Cprof.run ["lib"] end) + end + end + end + + test "warmup", context do + in_tmp context.test, fn -> + assert capture_io(fn -> + Cprof.run(["-e", @expr]) + end) =~ "Warmup..." + + refute capture_io(fn -> + Cprof.run(["-e", "Enum.each(1..5, fn(_) -> MapSet.new end)", "--no-warmup"]) + end) =~ "Warmup..." + end + end +end diff --git a/lib/mix/test/mix/tasks/profile.fprof_test.exs b/lib/mix/test/mix/tasks/profile.fprof_test.exs new file mode 100644 index 00000000000..c650fc8065d --- /dev/null +++ b/lib/mix/test/mix/tasks/profile.fprof_test.exs @@ -0,0 +1,102 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.Profile.FprofTest do + use MixTest.Case + + import ExUnit.CaptureIO + + alias Mix.Tasks.Profile.Fprof + + @moduletag apps: [:sample] + + setup do + Mix.Project.push MixTest.Case.Sample + end + + test "profiles evaluated expression", context do + in_tmp context.test, fn -> + assert capture_io(fn -> + Fprof.run(["-e", "Enum.each(1..5, fn(_) -> MapSet.new end)"]) + end) =~ ~r(MapSet\.new/0 *5 *\d+\.\d{3} *\d+\.\d{3}) + end + end + + test "profiles the script", context do + in_tmp context.test, fn -> + profile_script_name = "profile_script.ex" + + File.write! profile_script_name, """ + Enum.each(1..5, fn(_) -> MapSet.new end) + """ + + assert capture_io(fn -> + Fprof.run([profile_script_name]) + end) =~ ~r(MapSet\.new/0 *5 *\d+\.\d{3} *\d+\.\d{3}) + end + end + + test "expands callers", context do + in_tmp context.test, fn -> + assert capture_io(fn -> + Fprof.run(["-e", "Enum.each(1..5, fn(_) -> MapSet.new end)", "--callers"]) + end) =~ ~r(MapSet\.new/0 *5 *\d+\.\d{3} *\d+\.\d{3} +<--) + end + end + + test "expands processes", context do + in_tmp context.test, fn -> + output = capture_io(fn -> + Fprof.run(["-e", "spawn(fn -> :ok end); Enum.each(1..5, fn(_) -> MapSet.new end)", "--details"]) + end) + assert output =~ ~r(#{:erlang.pid_to_list(self())} +\d+ +\d+\.\d{3}) + assert output =~ ~r(spawned by #{:erlang.pid_to_list(self())}) + assert output =~ ~r(as :erlang.apply) + assert output =~ ~r(initial calls:) + end + end + + test "sort options", context do + in_tmp context.test, fn -> + assert capture_io(fn -> + Fprof.run(["-e", "Enum.each(1..5, fn(_) -> MapSet.new end)", "--sort", "acc"]) + end) =~ ~r(MapSet\.new/0 *5 *\d+\.\d{3} *\d+\.\d{3}) + + assert capture_io(fn -> + Fprof.run(["-e", "Enum.each(1..5, fn(_) -> MapSet.new end)", "--sort", "own"]) + end) =~ ~r(MapSet\.new/0 *5 *\d+\.\d{3} *\d+\.\d{3}) + end + end + + test "errors on missing files", context do + in_tmp context.test, fn -> + assert_raise Mix.Error, "No files matched pattern \"non-existent\" given to --require", fn -> + capture_io(fn -> Fprof.run ["-r", "non-existent"] end) + end + + assert_raise Mix.Error, "No files matched pattern \"non-existent\" given to --require", fn -> + capture_io(fn -> Fprof.run ["-pr", "non-existent"] end) + end + + assert_raise Mix.Error, "No such file: non-existent", fn -> + capture_io(fn -> Fprof.run ["non-existent"] end) + end + + File.mkdir_p!("lib") + assert_raise Mix.Error, "No such file: lib", fn -> + capture_io(fn -> Fprof.run ["lib"] end) + end + end + end + + test "warmup", context do + in_tmp context.test, fn -> + assert capture_io(fn -> + Fprof.run(["-e", "Enum.each(1..5, fn(_) -> MapSet.new end)"]) + end) =~ "Warmup..." + + refute capture_io(fn -> + Fprof.run(["-e", "Enum.each(1..5, fn(_) -> MapSet.new end)", "--no-warmup"]) + end) =~ "Warmup..." + end + end +end diff --git a/lib/mix/test/mix/tasks/run_test.exs b/lib/mix/test/mix/tasks/run_test.exs index 5425b857159..a5a13cf2f0d 100644 --- a/lib/mix/test/mix/tasks/run_test.exs +++ b/lib/mix/test/mix/tasks/run_test.exs @@ -5,22 +5,14 @@ defmodule Mix.Tasks.RunTest do import ExUnit.CaptureIO - defmodule GetApp do - def project do - [ app: :get_app, - version: "0.1.0", - deps: [ - {:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo")} - ] ] - end - end + @moduletag apps: [:sample] setup do Mix.Project.push MixTest.Case.Sample end - test "loads configuration" do - in_fixture "no_mixfile", fn -> + test "loads configuration", context do + in_tmp context.test, fn -> assert capture_io(fn -> Mix.Task.run "run", ["--config", fixture_path("configs/good_config.exs"), @@ -31,29 +23,27 @@ defmodule Mix.Tasks.RunTest do Application.delete_env(:my_app, :key) end - test "run requires files before evaling commands" do + test "run requires files before evaling commands", context do git_repo = fixture_path("git_repo/lib/git_repo.ex") - in_fixture "no_mixfile", fn -> - Mix.Tasks.Run.run ["-r", git_repo, "-e", "send self, {:hello, GitRepo.hello}"] + in_tmp context.test, fn -> + Mix.Tasks.Run.run ["-r", git_repo, "-e", "send self(), {:hello, GitRepo.hello}"] assert_received {:hello, "World"} - Mix.Tasks.Run.run ["-pr", git_repo, "-e", "send self, {:hello, GitRepo.hello}"] + Mix.Tasks.Run.run ["-pr", git_repo, "-e", "send self(), {:hello, GitRepo.hello}"] assert_received {:hello, "World"} end after purge [GitRepo] end - test "run errors on missing files" do - git_repo = fixture_path("git_repo/lib/git_repo.ex") - - in_fixture "no_mixfile", fn -> + test "run errors on missing files", context do + in_tmp context.test, fn -> assert_raise Mix.Error, "No files matched pattern \"non-existent\" given to --require", fn -> Mix.Tasks.Run.run ["-r", "non-existent"] end - assert_raise Mix.Error, "No files matched pattern \"non-existent\" given to --parallel-require", fn -> + assert_raise Mix.Error, "No files matched pattern \"non-existent\" given to --require", fn -> Mix.Tasks.Run.run ["-pr", "non-existent"] end @@ -61,7 +51,7 @@ defmodule Mix.Tasks.RunTest do Mix.Tasks.Run.run ["non-existent"] end - assert File.dir?("lib") + File.mkdir_p!("lib") assert_raise Mix.Error, "No such file: lib", fn -> Mix.Tasks.Run.run ["lib"] end @@ -70,11 +60,11 @@ defmodule Mix.Tasks.RunTest do purge [GitRepo] end - test "run rewrites System.argv" do - in_fixture "no_mixfile", fn -> + test "run rewrites System.argv", context do + in_tmp context.test, fn -> file = "argv.exs" - File.write! file, "send self, {:argv, System.argv}" + File.write! file, "send self(), {:argv, System.argv}" unload_file = fn -> Code.unload_files [Path.expand(file)] end @@ -87,13 +77,13 @@ defmodule Mix.Tasks.RunTest do assert_received {:argv, ["foo", "-e", "bar"]} unload_file.() - Mix.Tasks.Run.run ["-e", "send self, {:argv, System.argv}", file, "foo", "-x", "bar"] + Mix.Tasks.Run.run ["-e", "send self(), {:argv, System.argv}", file, "foo", "-x", "bar"] assert_received {:argv, [^file, "foo", "-x", "bar"]} unload_file.() Mix.Tasks.Run.run [ - "-e", "send self, :evaled", - "-e", "send self, {:argv, System.argv}", + "-e", "send self(), :evaled", + "-e", "send self(), {:argv, System.argv}", "--no-compile", file, "-x", "bar" ] assert_received :evaled diff --git a/lib/mix/test/mix/tasks/test_test.exs b/lib/mix/test/mix/tasks/test_test.exs index 59fa20b0078..ba6f6ec6c1f 100644 --- a/lib/mix/test/mix/tasks/test_test.exs +++ b/lib/mix/test/mix/tasks/test_test.exs @@ -5,12 +5,12 @@ defmodule Mix.Tasks.TestTest do import Mix.Tasks.Test, only: [ex_unit_opts: 1] - test "ex_unit_opts returns ex unit options" do + test "ex_unit_opts/1 returns ex unit options" do assert ex_unit_opts([unknown: "ok", seed: 13]) == [autorun: false, seed: 13] end - test "ex_unit_opts returns includes and excludes" do + test "ex_unit_opts/1 returns includes and excludes" do assert ex_unit_opts([include: "focus", include: "key:val"]) == [autorun: false, include: [:focus, key: "val"]] @@ -18,11 +18,190 @@ defmodule Mix.Tasks.TestTest do [autorun: false, exclude: [:focus, key: "val"]] end - test "ex_unit_opts translates only into includes and excludes" do + test "ex_unit_opts/1 translates :only into includes and excludes" do assert ex_unit_opts([only: "focus"]) == - [autorun: false, exclude: [:test], include: [:focus]] + [autorun: false, include: [:focus], exclude: [:test]] assert ex_unit_opts([only: "focus", include: "special"]) == - [autorun: false, exclude: [:test], include: [:focus, :special]] + [autorun: false, include: [:focus, :special], exclude: [:test]] + end + + test "ex_unit_opts/1 translates :color into list containing an enabled key/value pair" do + assert ex_unit_opts([color: false]) == [autorun: false, colors: [enabled: false]] + assert ex_unit_opts([color: true]) == [autorun: false, colors: [enabled: true]] + end + + test "ex_unit_opts/1 translates :formatter into list of modules" do + assert ex_unit_opts([formatter: "A.B"]) == [autorun: false, formatters: [A.B]] + end + + test "--stale: runs all tests for first run, then none on second" do + in_fixture "test_stale", fn -> + assert_stale_run_output "2 tests, 0 failures" + + assert_stale_run_output """ + No stale tests. + """ + end + end + + test "--stale: runs tests that depend on modified modules" do + in_fixture "test_stale", fn -> + assert_stale_run_output "2 tests, 0 failures" + + set_all_mtimes() + File.touch!("lib/b.ex") + + assert_stale_run_output "1 test, 0 failures" + + set_all_mtimes() + File.touch!("lib/a.ex") + + assert_stale_run_output "2 tests, 0 failures" + end + end + + test "--stale: doesn't write manifest when there are failures" do + in_fixture "test_stale", fn -> + assert_stale_run_output "2 tests, 0 failures" + + set_all_mtimes() + File.write!("lib/b.ex", """ + defmodule B do + def f, do: :error + end + """) + + assert_stale_run_output "1 test, 1 failure" + + assert_stale_run_output "1 test, 1 failure" + end + end + + test "--stale: runs tests that have changed" do + in_fixture "test_stale", fn -> + assert_stale_run_output "2 tests, 0 failures" + + set_all_mtimes() + File.touch!("test/a_test_stale.exs") + + assert_stale_run_output "1 test, 0 failures" + end + end + + test "--stale: runs tests that have changed test_helpers" do + in_fixture "test_stale", fn -> + assert_stale_run_output "2 tests, 0 failures" + + set_all_mtimes() + File.touch!("test/test_helper.exs") + + assert_stale_run_output "2 tests, 0 failures" + end + end + + test "--stale: runs all tests no matter what with --force" do + in_fixture "test_stale", fn -> + assert_stale_run_output "2 tests, 0 failures" + + assert_stale_run_output ~w[--force], "2 tests, 0 failures" + end + end + + test "logs test absence for a project with no test paths" do + in_fixture "test_stale", fn -> + File.rm_rf! "test" + + assert_run_output "There are no tests to run" + end + end + + test "--listen-on-stdin: runs tests after input" do + in_fixture "test_stale", fn -> + port = mix_port(~w[test --stale --listen-on-stdin]) + + assert receive_until_match(port, "seed", []) =~ "2 tests" + + Port.command(port, "\n") + + assert receive_until_match(port, "No stale tests.", []) =~ "Restarting..." + end + end + + test "--listen-on-stdin: does not exit on compilation failure" do + in_fixture "test_stale", fn -> + File.write!("lib/b.ex", """ + defmodule B do + def f, do: error_not_a_var + end + """) + + port = mix_port(~w[test --listen-on-stdin]) + + assert receive_until_match(port, "error", []) =~ "lib/b.ex" + + File.write!("lib/b.ex", """ + defmodule B do + def f, do: A.f + end + """) + + Port.command(port, "\n") + + assert receive_until_match(port, "seed", []) =~ "2 tests" + + File.write!("test/b_test_stale.exs", """ + defmodule BTest do + use ExUnit.Case + + test "f" do + assert B.f() == error_not_a_var + end + end + """) + + Port.command(port, "\n") + + assert receive_until_match(port, "undefined function error_not_a_var", []) =~ "test/b_test_stale.exs" + + File.write!("test/b_test_stale.exs", """ + defmodule BTest do + use ExUnit.Case + + test "f" do + assert B.f() == :ok + end + end + """) + + Port.command(port, "\n") + + assert receive_until_match(port, "seed", []) =~ "2 tests" + end + end + + defp receive_until_match(port, expected, acc) do + receive do + {^port, {:data, output}} -> + acc = [acc | output] + + if output =~ expected do + IO.iodata_to_binary(acc) + else + receive_until_match(port, expected, acc) + end + end + end + + defp set_all_mtimes(time \\ {{2010, 1, 1}, {0, 0, 0}}) do + Enum.each(Path.wildcard("**", match_dot: true), &File.touch!(&1, time)) + end + + defp assert_stale_run_output(opts \\ [], expected) do + assert_run_output(["--stale" | opts], expected) + end + + defp assert_run_output(opts \\ [], expected) do + assert mix(["test" | opts]) =~ expected end end diff --git a/lib/mix/test/mix/tasks/xref_test.exs b/lib/mix/test/mix/tasks/xref_test.exs new file mode 100644 index 00000000000..92e09fcdbd3 --- /dev/null +++ b/lib/mix/test/mix/tasks/xref_test.exs @@ -0,0 +1,662 @@ +Code.require_file "../../test_helper.exs", __DIR__ + +defmodule Mix.Tasks.XrefTest do + use MixTest.Case + + import ExUnit.CaptureIO + + setup_all do + previous = Application.get_env(:elixir, :ansi_enabled, false) + Application.put_env(:elixir, :ansi_enabled, false) + on_exit fn -> Application.put_env(:elixir, :ansi_enabled, previous) end + end + + setup do + Mix.Project.push MixTest.Case.Sample + :ok + end + + ## Warnings + + test "warnings: reports nothing with no references" do + assert_no_warnings "defmodule A do end" + end + + test "warnings: reports missing functions" do + assert_warnings """ + defmodule A do + def a, do: A.no_func + def b, do: A.a() + end + """, """ + warning: function A.no_func/0 is undefined or private + lib/a.ex:2 + + """ + end + + test "warnings: reports missing functions respecting arity" do + assert_warnings """ + defmodule A do + def a, do: :ok + def b, do: A.a(1) + end + """, """ + warning: function A.a/1 is undefined or private. Did you mean one of: + + * a/0 + + lib/a.ex:3 + + """ + end + + test "warnings: reports missing modules" do + assert_warnings """ + defmodule A do + def a, do: D.no_module + end + """, """ + warning: function D.no_module/0 is undefined (module D is not available) + lib/a.ex:2 + + """ + end + + test "warnings: reports missing captures" do + assert_warnings """ + defmodule A do + def a, do: &A.no_func/0 + end + """, """ + warning: function A.no_func/0 is undefined or private + lib/a.ex:2 + + """ + end + + test "warnings: doesn't report missing funcs at compile time" do + assert_no_warnings """ + Enum.map([], fn _ -> BadReferencer.no_func4() end) + + if function_exported?(List, :flatten, 1) do + List.flatten([1, 2, 3]) + else + List.old_flatten([1, 2, 3]) + end + """ + end + + test "warnings: protocols are checked, ignoring missing built-in impls" do + assert_warnings """ + defprotocol AProtocol do + def func(arg) + end + + defmodule AImplementation do + defimpl AProtocol do + def func(_), do: B.no_func + end + end + """, """ + warning: function B.no_func/0 is undefined or private + lib/a.ex:7 + + """ + end + + test "warnings: handles Erlang ops" do + assert_no_warnings """ + defmodule A do + def a(a, b), do: a and b + def b(a, b), do: a or b + end + """ + end + + test "warnings: handles Erlang modules" do + assert_warnings """ + defmodule A do + def a, do: :not_a_module.no_module + def b, do: :lists.no_func + end + """, """ + warning: function :not_a_module.no_module/0 is undefined (module :not_a_module is not available) + lib/a.ex:2 + + warning: function :lists.no_func/0 is undefined or private + lib/a.ex:3 + + """ + end + + test "warnings: handles multiple modules in one file" do + assert_warnings """ + defmodule A1 do + def a, do: A2.no_func + def b, do: A2.a + end + + defmodule A2 do + def a, do: A1.no_func + def b, do: A1.b + end + """, """ + warning: function A2.no_func/0 is undefined or private + lib/a.ex:2 + + warning: function A1.no_func/0 is undefined or private + lib/a.ex:7 + + """ + end + + test "warnings: doesn't load unloaded modules" do + assert_warnings """ + defmodule A1 do + @compile {:autoload, false} + @on_load :init + def init do + raise "oops" + end + end + + defmodule A2 do + def a, do: A1.no_func + def b, do: A1.init + end + """, """ + warning: function A1.no_func/0 is undefined or private + lib/a.ex:10 + + """ + end + + test "warnings: groups multiple warnings in one file" do + assert_warnings """ + defmodule A do + def a, do: A.no_func + def b, do: A2.no_func + def c, do: A.no_func + def d, do: A2.no_func + end + """, """ + warning: function A.no_func/0 is undefined or private + Found at 2 locations: + lib/a.ex:2 + lib/a.ex:4 + + warning: function A2.no_func/0 is undefined (module A2 is not available) + Found at 2 locations: + lib/a.ex:3 + lib/a.ex:5 + + """ + end + + test "warnings: handles module body conditionals" do + assert_warnings """ + defmodule A do + if function_exported?(List, :flatten, 1) do + List.flatten([1, 2, 3]) + else + List.old_flatten([1, 2, 3]) + end + + if function_exported?(List, :flatten, 1) do + def flatten(arg), do: List.flatten(arg) + else + def flatten(arg), do: List.old_flatten(arg) + end + + if function_exported?(List, :flatten, 1) do + def flatten2(arg), do: List.old_flatten(arg) + else + def flatten2(arg), do: List.flatten(arg) + end + end + """, """ + warning: function List.old_flatten/1 is undefined or private. Did you mean one of: + + * flatten/1 + * flatten/2 + + lib/a.ex:15 + + """ + end + + test "warnings: imports" do + assert_no_warnings """ + defmodule A do + import Record + + def a(a, b), do: extract(a, b) + def b(arg), do: is_record(arg) + end + """ + end + + test "warnings: aliases" do + assert_warnings """ + defmodule A do + alias Enum, as: E + + def a(a, b), do: E.map2(a, b) + def b, do: &E.map2/2 + end + """, """ + warning: function Enum.map2/2 is undefined or private. Did you mean one of: + + * map/2 + + Found at 2 locations: + lib/a.ex:4 + lib/a.ex:5 + + """ + end + + test "warnings: requires" do + assert_no_warnings """ + defmodule A do + require Integer + + def a(a), do: Integer.is_even(a) + end + """ + end + + defp assert_warnings(contents, expected) do + in_fixture "no_mixfile", fn -> + File.write!("lib/a.ex", contents) + + assert capture_io(:stderr, fn -> + assert Mix.Task.run("xref", ["warnings"]) == :error + end) == expected + end + end + + defp assert_no_warnings(contents) do + in_fixture "no_mixfile", fn -> + File.write!("lib/a.ex", contents) + + assert capture_io(:stderr, fn -> + assert Mix.Task.run("xref", ["warnings"]) == :ok + end) == "" + end + end + + ## Unreachable + + test "unreachable: reports missing functions" do + assert_unreachable """ + defmodule A do + def a, do: A.no_func + def b, do: A.a() + end + """, """ + lib/a.ex:2: A.no_func/0 + """ + end + + defp assert_unreachable(contents, expected) do + in_fixture "no_mixfile", fn -> + File.write!("lib/a.ex", contents) + + assert capture_io(fn -> + assert Mix.Task.run("xref", ["unreachable"]) == :error + end) == expected + end + end + + ## Exclude + + test "exclude: excludes specified modules and MFAs" do + defmodule ExcludeSample do + def project do + [app: :sample, + version: "0.1.0", + xref: [exclude: [MissingModule, {MissingModule2, :no_func, 2}]]] + end + end + + Mix.Project.push ExcludeSample + + assert_warnings """ + defmodule A do + def a, do: MissingModule.no_func(1) + def b, do: MissingModule2.no_func(1, 2) + def c, do: MissingModule2.no_func(1) + def d, do: MissingModule3.no_func(1, 2) + end + """, """ + warning: function MissingModule2.no_func/1 is undefined (module MissingModule2 is not available) + lib/a.ex:4 + + warning: function MissingModule3.no_func/2 is undefined (module MissingModule3 is not available) + lib/a.ex:5 + + """ + end + + ## Callers + + test "callers: prints callers of specified Module" do + assert_callers "A", """ + defmodule A do + def a, do: A.a() + def a(arg), do: A.a(arg) + def b, do: A.b() + def c, do: B.a() + end + """, + """ + lib/a.ex:2: A.a/0 + lib/a.ex:3: A.a/1 + lib/a.ex:4: A.b/0 + """ + end + + test "callers: prints callers of specified Module.func" do + assert_callers "A.a", """ + defmodule A do + def a, do: A.a() + def a(arg), do: A.a(arg) + def b, do: A.b() + def c, do: B.a() + end + """, + """ + lib/a.ex:2: A.a/0 + lib/a.ex:3: A.a/1 + """ + end + + test "callers: prints callers of specified Module.func/arity" do + assert_callers "A.a/0", """ + defmodule A do + def a, do: A.a() + def a(arg), do: A.a(arg) + def b, do: A.b() + def c, do: B.a() + end + """, + """ + lib/a.ex:2: A.a/0 + """ + end + + test "callers: lists compile calls and macros" do + assert_callers "A", """ + defmodule A do + defmacro a_macro, do: :ok + def a, do: :ok + end + """, """ + defmodule B do + require A + + A.a_macro() + A.a() + end + """, + """ + lib/b.ex:5: A.a/0 + lib/b.ex:4: A.a_macro/0 + """ + end + + test "callers: handles aliases" do + assert_callers "Enum", """ + defmodule A do + alias Enum, as: E + + E.map([], &E.flatten/1) + + def a(a, b), do: E.map(a, b) + end + """, """ + lib/a.ex:4: Enum.flatten/1 + lib/a.ex:4: Enum.map/2 + lib/a.ex:6: Enum.map/2 + """ + end + + test "callers: handles imports" do + assert_callers "Integer", ~S""" + defmodule A do + import Integer + + &is_even/1 + &parse/1 + + _ = is_even(Enum.random([1])) + _ = parse("2") + + def a(a), do: is_even(a) + def b(a), do: parse(a) + _ = is_even(Enum.random([1])); def c(a), do: is_even(a) + end + """, """ + lib/a.ex:4: Integer.is_even/1 + lib/a.ex:7: Integer.is_even/1 + lib/a.ex:10: Integer.is_even/1 + lib/a.ex:12: Integer.is_even/1 + lib/a.ex:5: Integer.parse/1 + lib/a.ex:8: Integer.parse/1 + lib/a.ex:11: Integer.parse/1 + """ + end + + test "callers: no argument gives error" do + in_fixture "no_mixfile", fn -> + message = "xref doesn't support this command. For more information run \"mix help xref\"" + + assert_raise Mix.Error, message, fn -> + assert Mix.Task.run("xref", ["callers"]) == :error + end + end + end + + test "callers: gives nice error for quotable but invalid callers spec" do + in_fixture "no_mixfile", fn -> + message = + "xref callers CALLEE expects Module, Module.function, or Module.function/arity, got: Module.func(arg)" + + assert_raise Mix.Error, message, fn -> + Mix.Task.run("xref", ["callers", "Module.func(arg)"]) + end + end + end + + test "callers: gives nice error for unquotable callers spec" do + in_fixture "no_mixfile", fn -> + message = + "xref callers CALLEE expects Module, Module.function, or Module.function/arity, got: %" + + assert_raise Mix.Error, message, fn -> + Mix.Task.run("xref", ["callers", "%"]) + end + end + end + + defp assert_callers(callee, contents_a, contents_b \\ "", expected) do + in_fixture "no_mixfile", fn -> + File.write!("lib/a.ex", contents_a) + File.write!("lib/b.ex", contents_b) + + assert capture_io(fn -> + assert Mix.Task.run("xref", ["callers", callee]) == :ok + end) == expected + end + end + + ## Graph + + test "graph: basic usage" do + assert_graph """ + lib/a.ex + └── lib/b.ex + └── lib/a.ex + lib/b.ex + lib/c.ex + lib/d.ex + └── lib/a.ex (compile) + """ + end + + test "graph: exclude" do + assert_graph ~w[--exclude lib/c.ex --exclude lib/b.ex], """ + lib/a.ex + lib/d.ex + └── lib/a.ex (compile) + """ + end + + test "graph: exclude 1" do + assert_graph ~w[--exclude lib/d.ex], """ + lib/a.ex + └── lib/b.ex + └── lib/a.ex + lib/b.ex + lib/c.ex + """ + end + + test "graph: dot format" do + assert_graph ~w[--format dot], true, """ + digraph "xref graph" { + "lib/a.ex" + "lib/a.ex" -> "lib/b.ex" + "lib/b.ex" -> "lib/a.ex" + "lib/b.ex" + "lib/c.ex" + "lib/d.ex" + "lib/d.ex" -> "lib/a.ex" [label="(compile)"] + } + """ + end + + test "graph: source" do + assert_graph ~w[--source lib/a.ex], """ + lib/a.ex + └── lib/b.ex + └── lib/a.ex + """ + end + + test "graph: invalid source" do + assert_raise Mix.Error, "Source could not be found: lib/a2.ex", fn -> + assert_graph ~w[--source lib/a2.ex], "" + end + end + + test "graph: sink" do + assert_graph ~w[--sink lib/b.ex], """ + lib/a.ex + └── lib/b.ex + └── lib/a.ex + lib/d.ex + └── lib/a.ex (compile) + """ + end + + test "graph: invalid sink" do + assert_raise Mix.Error, "Sink could not be found: lib/b2.ex", fn -> + assert_graph ~w[--sink lib/b2.ex], "" + end + end + + test "graph: sink and source is error" do + assert_raise Mix.Error, "mix xref graph expects only one of --source and --sink", fn -> + assert_graph ~w[--source lib/a.ex --sink lib/b.ex], "" + end + end + + test "graph: with dynamic module" do + in_fixture "no_mixfile", fn -> + File.write! "lib/a.ex", """ + B.define() + """ + + File.write! "lib/b.ex", """ + defmodule B do + def define do + defmodule A do + end + end + end + """ + + assert Mix.Task.run("xref", ["graph"]) == :ok + + assert """ + Compiling 2 files (.ex) + Generated sample app + lib/a.ex + lib/b.ex + """ = receive_until_no_messages([]) + end + end + + defp assert_graph(opts \\ [], dot \\ false, expected) do + in_fixture "no_mixfile", fn -> + File.write! "lib/a.ex", """ + defmodule A do + def a do + B.a + end + + def b, do: :ok + end + """ + + File.write! "lib/b.ex", """ + defmodule B do + def a do + A.a + B.a + end + end + """ + + File.write! "lib/c.ex", """ + defmodule C do + end + """ + + File.write! "lib/d.ex", """ + defmodule :d do + A.b + end + """ + + assert Mix.Task.run("xref", opts ++ ["graph"]) == :ok + + result = + if dot do + File.read!("xref_graph.dot") + else + assert "Compiling 4 files (.ex)\nGenerated sample app\n" <> result = + receive_until_no_messages([]) + + result + end + + assert normalize_graph_output(result) == normalize_graph_output(expected) + end + end + + defp normalize_graph_output(graph) do + String.replace(graph, "└──", "`--") + end + + defp receive_until_no_messages(acc) do + receive do + {:mix_shell, :info, [line]} -> receive_until_no_messages([acc, line | "\n"]) + after + 0 -> IO.iodata_to_binary(acc) + end + end +end diff --git a/lib/mix/test/mix/umbrella_test.exs b/lib/mix/test/mix/umbrella_test.exs index 69ad94d70de..b6efecb7e08 100644 --- a/lib/mix/test/mix/umbrella_test.exs +++ b/lib/mix/test/mix/umbrella_test.exs @@ -3,24 +3,49 @@ Code.require_file "../test_helper.exs", __DIR__ defmodule Mix.UmbrellaTest do use MixTest.Case + @moduletag apps: [:foo, :bar] + + test "apps_paths" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + assert Mix.Project.apps_paths == nil + Mix.Project.in_project(:umbrella, ".", fn _ -> + assert Mix.Project.apps_paths == + %{bar: "apps/bar", foo: "apps/foo"} + assert_received {:mix_shell, :error, ["warning: path \"apps/dont_error_on_missing_mixfile\"" <> _]} + refute_received {:mix_shell, :error, ["warning: path \"apps/dont_error_on_files\"" <> _]} + end) + end + end + + test "apps_paths with selection" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", [apps: [:foo, :bar]], fn _ -> + File.mkdir_p! "apps/errors" + File.write! "apps/errors/mix.exs", "raise :oops" + assert Mix.Project.apps_paths == + %{bar: "apps/bar", foo: "apps/foo"} + end) + end + end + test "compiles umbrella" do in_fixture "umbrella_dep/deps/umbrella", fn -> Mix.Project.in_project(:umbrella, ".", fn _ -> Mix.Task.run "deps" - assert_received {:mix_shell, :info, ["* bar (apps/bar)"]} - assert_received {:mix_shell, :info, ["* foo (apps/foo)"]} + assert_received {:mix_shell, :info, ["* bar (apps/bar) (mix)"]} + assert_received {:mix_shell, :info, ["* foo (apps/foo) (mix)"]} # Ensure we can compile and run checks Mix.Task.run "deps.compile" - Mix.Task.run "deps.check" - Mix.Task.run "compile" + Mix.Task.run "deps.loadpaths" + Mix.Task.run "compile", ["--verbose"] assert_received {:mix_shell, :info, ["==> bar"]} - assert_received {:mix_shell, :info, ["Compiled lib/bar.ex"]} - assert_received {:mix_shell, :info, ["Generated bar.app"]} + assert_received {:mix_shell, :info, ["Generated bar app"]} + assert File.regular?("_build/dev/lib/bar/ebin/Elixir.Bar.beam") assert_received {:mix_shell, :info, ["==> foo"]} - assert_received {:mix_shell, :info, ["Compiled lib/foo.ex"]} - assert_received {:mix_shell, :info, ["Generated foo.app"]} + assert_received {:mix_shell, :info, ["Generated foo app"]} + assert File.regular?("_build/dev/lib/foo/ebin/Elixir.Foo.beam") # Ensure foo was loaded and in the same env as Mix.env assert_received {:mix_shell, :info, [":foo env is dev"]} @@ -32,6 +57,42 @@ defmodule Mix.UmbrellaTest do end end + test "compiles umbrella with protocol consolidation" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + Mix.Task.run "compile", ["--verbose"] + assert_received {:mix_shell, :info, ["Generated bar app"]} + assert_received {:mix_shell, :info, ["Generated foo app"]} + assert File.regular? "_build/dev/consolidated/Elixir.Enumerable.beam" + purge [Enumerable] + + assert Mix.Tasks.App.Start.run [] + assert Protocol.consolidated?(Enumerable) + end) + end + end + + test "recursively compiles umbrella with protocol consolidation" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project(:umbrella, ".", fn _ -> + defmodule Elixir.Mix.Tasks.Umbrella.Recur do + use Mix.Task + @recursive true + def run(_), do: Mix.Task.run "compile", ["--verbose"] + end + + Mix.Task.run "umbrella.recur" + assert_received {:mix_shell, :info, ["Generated bar app"]} + assert_received {:mix_shell, :info, ["Generated foo app"]} + assert File.regular? "_build/dev/consolidated/Elixir.Enumerable.beam" + purge [Enumerable] + + assert Mix.Tasks.App.Start.run [] + assert Protocol.consolidated?(Enumerable) + end) + end + end + defmodule UmbrellaDeps do def project do [apps_path: "apps", @@ -48,11 +109,115 @@ defmodule Mix.UmbrellaTest do File.mkdir_p!("_build/dev/lib/foo/ebin") File.mkdir_p!("_build/dev/lib/bar/ebin") - Mix.Task.run "deps.loadpaths", ["--no-deps-check"] - Mix.Task.run "loadpaths", ["--no-elixir-version-check"] - assert to_char_list(Path.expand("_build/dev/lib/some_dep/ebin")) in :code.get_path - assert to_char_list(Path.expand("_build/dev/lib/foo/ebin")) in :code.get_path - assert to_char_list(Path.expand("_build/dev/lib/bar/ebin")) in :code.get_path + Mix.Task.run "loadpaths", ["--no-deps-check", "--no-elixir-version-check"] + assert to_charlist(Path.expand("_build/dev/lib/some_dep/ebin")) in :code.get_path + assert to_charlist(Path.expand("_build/dev/lib/foo/ebin")) in :code.get_path + assert to_charlist(Path.expand("_build/dev/lib/bar/ebin")) in :code.get_path + end + end + + test "loads umbrella child dependencies in all environments" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project :umbrella, ".", fn _ -> + File.write! "apps/bar/mix.exs", """ + defmodule Bar.Mixfile do + use Mix.Project + + def project do + [app: :bar, + version: "0.1.0", + deps: [{:git_repo, git: MixTest.Case.fixture_path("git_repo"), only: :other}]] + end + end + """ + + # Does not fetch when filtered + Mix.Tasks.Deps.Get.run ["--only", "dev"] + refute_received {:mix_shell, :info, ["* Getting git_repo" <> _]} + + # But works across all environments + Mix.Tasks.Deps.Get.run [] + assert_received {:mix_shell, :info, ["* Getting git_repo" <> _]} + + # Does not show by default + Mix.Tasks.Deps.run [] + refute_received {:mix_shell, :info, ["* git_repo" <> _]} + + # But shows on proper environment + Mix.env(:other) + Mix.Tasks.Deps.run [] + assert_received {:mix_shell, :info, ["* git_repo " <> _]} + end + end + after + Mix.env(:test) + end + + test "loads umbrella sibling dependencies with :in_umbrella" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project :umbrella, ".", fn _ -> + File.write! "apps/bar/mix.exs", """ + defmodule Bar.Mixfile do + use Mix.Project + + def project do + [app: :bar, + version: "0.1.0", + deps: [{:foo, in_umbrella: true}]] + end + end + """ + + # Running from umbrella should not cause conflicts + Mix.Tasks.Deps.Get.run [] + Mix.Tasks.Run.run [] + end + end + end + + test "finds umbrella sibling dependencies conflicts with :in_umbrella" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project :umbrella, ".", fn _ -> + File.write! "apps/bar/mix.exs", """ + defmodule Bar.Mixfile do + use Mix.Project + + def project do + [app: :bar, + version: "0.1.0", + deps: [{:foo, in_umbrella: true, env: :unknown}]] + end + end + """ + + assert_raise Mix.Error, fn -> + Mix.Tasks.Deps.Get.run [] + end + + assert_received {:mix_shell, :error, ["Dependencies have diverged:"]} + assert_received {:mix_shell, :error, [" the dependency foo in mix.exs is overriding a child dependency" <> _]} + end + end + end + + ## Umbrellas as a dependency + + test "list deps for umbrella as dependency" do + in_fixture("umbrella_dep", fn -> + Mix.Project.in_project(:umbrella_dep, ".", fn _ -> + Mix.Task.run "deps" + assert_received {:mix_shell, :info, ["* umbrella (deps/umbrella) (mix)"]} + assert_received {:mix_shell, :info, ["* foo (apps/foo) (mix)"]} + end) + end) + end + + test "compile for umbrella as dependency" do + in_fixture "umbrella_dep", fn -> + Mix.Project.in_project(:umbrella_dep, ".", fn _ -> + Mix.Task.run "deps.compile" + assert Bar.bar == "hello world" + end) end end @@ -78,32 +243,32 @@ defmodule Mix.UmbrellaTest do in_fixture "umbrella_dep/deps/umbrella", fn -> Mix.Project.in_project :umbrella, ".", fn _ -> File.write! "apps/foo/mix.exs", """ - defmodule Foo.Mix do + defmodule Foo.Mixfile do use Mix.Project def project do # Ensure we have the proper environment :dev = Mix.env - [ app: :foo, - version: "0.1.0", - deps: [{:bar, in_umbrella: true}] ] + [app: :foo, + version: "0.1.0", + deps: [{:bar, in_umbrella: true}]] end end """ File.write! "apps/bar/mix.exs", """ - defmodule Bar.Mix do + defmodule Bar.Mixfile do use Mix.Project def project do # Ensure we have the proper environment :dev = Mix.env - [ app: :bar, - version: "0.1.0", - deps: [{:a, path: "deps/a"}, - {:b, path: "deps/b"}] ] + [app: :bar, + version: "0.1.0", + deps: [{:a, path: "deps/a"}, + {:b, path: "deps/b"}]] end end """ @@ -113,65 +278,176 @@ defmodule Mix.UmbrellaTest do end end - test "list deps for umbrella as dependency" do - in_fixture("umbrella_dep", fn -> - Mix.Project.in_project(:umbrella_dep, ".", fn _ -> - Mix.Task.run "deps" - assert_received {:mix_shell, :info, ["* umbrella (deps/umbrella)"]} - assert_received {:mix_shell, :info, ["* foo (apps/foo)"]} - end) - end) - end + test "uses dependency aliases" do + in_fixture "umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project :umbrella, ".", fn _ -> + File.write! "apps/bar/mix.exs", """ + defmodule Bar.Mixfile do + use Mix.Project - test "compile for umbrella as dependency" do - in_fixture "umbrella_dep", fn -> - Mix.Project.in_project(:umbrella_dep, ".", fn _ -> - Mix.Task.run "deps.compile" - assert "hello world" == Bar.bar - end) + def project do + [app: :bar, + version: "0.1.0", + aliases: ["compile.all": fn _ -> Mix.shell.info "no compile bar" end]] + end + end + """ + + Mix.Task.run "compile", ["--verbose"] + assert_receive {:mix_shell, :info, ["no compile bar"]} + refute_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]} + end end end test "recompiles after path dependency changes" do in_fixture("umbrella_dep/deps/umbrella/apps", fn -> Mix.Project.in_project(:bar, "bar", fn _ -> - Mix.Task.run "compile" - assert Mix.Tasks.Compile.Elixir.run([]) == :noop - assert_receive {:mix_shell, :info, ["Compiled lib/foo.ex"]} + Mix.Task.run "compile", ["--verbose"] + assert_received {:mix_shell, :info, ["Generated foo app"]} + assert_received {:mix_shell, :info, ["Generated bar app"]} + assert File.regular?("_build/dev/lib/foo/ebin/Elixir.Foo.beam") + assert File.regular?("_build/dev/lib/bar/ebin/Elixir.Bar.beam") + + # Noop by default + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :noop + + # Noop when there is no runtime dependency + ensure_touched("_build/dev/lib/foo/ebin/Elixir.Foo.beam", + File.stat!("_build/dev/lib/bar/.compile.elixir").mtime) + ensure_touched("_build/dev/lib/foo/.compile.elixir", + File.stat!("_build/dev/lib/bar/.compile.elixir").mtime) + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :noop + + # Add runtime dependency + File.write!("lib/bar.ex", """ + defmodule Bar do + def bar, do: Foo.foo + end + """) + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok assert_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]} - purge [Foo, Bar] - Mix.Task.clear - # Ensure we can measure a timestamp difference - ensure_touched("../foo/lib/foo.ex", - File.stat!("_build/dev/lib/bar/.compile.lock").mtime) + # Noop for runtime dependencies + ensure_touched("_build/dev/lib/foo/ebin/Elixir.Foo.beam", + File.stat!("_build/dev/lib/bar/.compile.elixir").mtime) + ensure_touched("_build/dev/lib/foo/.compile.elixir", + File.stat!("_build/dev/lib/bar/.compile.elixir").mtime) + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :noop + + # Add compile time dependency + File.write!("lib/bar.ex", "defmodule Bar, do: Foo.foo") + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok + assert_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]} - Mix.Task.run "compile" - assert Mix.Tasks.Compile.Elixir.run([]) == :noop - assert_receive {:mix_shell, :info, ["Compiled lib/foo.ex"]} + # Recompiles for compile time dependencies + ensure_touched("_build/dev/lib/foo/ebin/Elixir.Foo.beam", + File.stat!("_build/dev/lib/bar/.compile.elixir").mtime) + ensure_touched("_build/dev/lib/foo/.compile.elixir", + File.stat!("_build/dev/lib/bar/.compile.elixir").mtime) + assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == :ok assert_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]} - purge [Foo, Bar] end) end) end - defmodule Selective do - def project do - [apps_path: "apps", - apps: [:foo, :bar]] - end + test "reconsolidates after path dependency changes" do + in_fixture("umbrella_dep/deps/umbrella/apps", fn -> + Mix.Project.in_project(:bar, "bar", fn _ -> + # Add a protocol dependency + File.write!("../foo/lib/foo.ex", """ + defprotocol Foo do + def foo(arg) + end + defimpl Foo, for: List do + def foo(list), do: list + end + """) + Mix.Task.run("compile") + assert File.regular?("_build/dev/consolidated/Elixir.Foo.beam") + assert Mix.Tasks.Compile.Protocols.run([]) == :noop + + # Mark protocol as outdated + File.touch!("_build/dev/consolidated/Elixir.Foo.beam", + {{2010, 1, 1}, {0, 0, 0}}) + + ensure_touched("_build/dev/lib/foo/ebin/Elixir.Foo.beam", + File.stat!("_build/dev/consolidated/.compile.protocols").mtime) + assert Mix.Tasks.Compile.Protocols.run([]) == :ok + + # Check new timestamp + assert File.stat!("_build/dev/consolidated/Elixir.Foo.beam").mtime > + {{2010, 1, 1}, {0, 0, 0}} + end) + end) end - test "can select which apps to use" do + test "reconsolidates using umbrella parent information on shared _build" do in_fixture("umbrella_dep/deps/umbrella", fn -> - Mix.Project.push Selective + File.write!("apps/bar/lib/bar.ex", """ + defprotocol Bar do + def bar(arg) + end + defimpl Bar, for: List do + def bar(list), do: list + end + """) + + Mix.Project.in_project(:foo, "apps/foo", [build_path: "../../_build"], fn _ -> + Mix.Task.run("compile.protocols") + refute Code.ensure_loaded?(Bar) + end) - File.mkdir_p! "apps/errors/lib" - File.write! "apps/errors/lib/always_fail.ex", "raise ~s[oops]" + Mix.Project.in_project(:umbrella, ".", fn _ -> + Mix.Task.run("compile.protocols") + Mix.Task.run("app.start") + assert Protocol.consolidated?(Bar) + end) + end) + end - assert Mix.Task.run("compile.elixir") == [:ok, :ok] - assert_received {:mix_shell, :info, ["Compiled lib/bar.ex"]} - assert_received {:mix_shell, :info, ["Compiled lib/foo.ex"]} + test "reconsolidates using umbrella child information on shared _build" do + in_fixture("umbrella_dep/deps/umbrella", fn -> + File.write!("apps/bar/lib/bar.ex", """ + defprotocol Bar do + def foo(arg) + end + defimpl Bar, for: List do + def foo(list), do: list + end + """) + + Mix.Project.in_project(:umbrella, ".", fn _ -> + Mix.Task.run("compile.protocols") + end) + + # Emulate the dependency being removed + Mix.Project.in_project(:foo, "apps/foo", [build_path: "../../_build", deps: []], fn _ -> + File.rm_rf "../../_build/dev/lib/bar" + Mix.Task.run("compile.protocols") + end) + end) + end + + test "apps cannot refer to themselves as a dep" do + in_fixture("umbrella_dep/deps/umbrella", fn -> + Mix.Project.in_project :umbrella, ".", fn _ -> + File.write! "apps/bar/mix.exs", """ + defmodule Bar.Mixfile do + use Mix.Project + + def project do + [app: :bar, + version: "0.1.0", + deps: [{:bar, in_umbrella: true}]] + end + end + """ + + assert_raise Mix.Error, "App bar lists itself as a dependency", fn -> + Mix.Task.run("deps.get", ["--verbose"]) == [:ok, :ok] + end + end end) end end diff --git a/lib/mix/test/mix/utils_test.exs b/lib/mix/test/mix/utils_test.exs index d8a3b905da9..f3c6f1fb516 100644 --- a/lib/mix/test/mix/utils_test.exs +++ b/lib/mix/test/mix/utils_test.exs @@ -7,57 +7,38 @@ defmodule Mix.UtilsTest do use MixTest.Case doctest Mix.Utils - test :command_to_module do + test "command to module" do assert Mix.Utils.command_to_module("cheers", Mix.Tasks) == {:module, Mix.Tasks.Cheers} assert Mix.Utils.command_to_module("unknown", Mix.Tasks) == {:error, :nofile} end - test :module_name_to_command do + test "module name to command" do assert Mix.Utils.module_name_to_command(Mix.Tasks.Foo, 2) == "foo" assert Mix.Utils.module_name_to_command("Mix.Tasks.Foo", 2) == "foo" assert Mix.Utils.module_name_to_command("Mix.Tasks.Foo.Bar", 2) == "foo.bar" + assert Mix.Utils.module_name_to_command("Mix.Tasks.FooBar.Bing", 2) == "foo_bar.bing" + assert Mix.Utils.module_name_to_command("Mix.Tasks.FooBar.BingBang", 2) == "foo_bar.bing_bang" end - test :command_to_module_name do + test "command to module name" do assert Mix.Utils.command_to_module_name("foo") == "Foo" assert Mix.Utils.command_to_module_name("foo.bar") == "Foo.Bar" + assert Mix.Utils.command_to_module_name("foo_bar.baz") == "FooBar.Baz" + assert Mix.Utils.command_to_module_name("foo_bar.baz_bing") == "FooBar.BazBing" end - test :underscore do - assert Mix.Utils.underscore("foo") == "foo" - assert Mix.Utils.underscore("foo_bar") == "foo_bar" - assert Mix.Utils.underscore("Foo") == "foo" - assert Mix.Utils.underscore("FooBar") == "foo_bar" - assert Mix.Utils.underscore("FOOBar") == "foo_bar" - assert Mix.Utils.underscore("FooBAR") == "foo_bar" - assert Mix.Utils.underscore("FoBaZa") == "fo_ba_za" - assert Mix.Utils.underscore("Foo.Bar") == "foo/bar" - assert Mix.Utils.underscore(Foo.Bar) == "foo/bar" - assert Mix.Utils.underscore("") == "" - assert Mix.Utils.underscore("..") == ".." - assert Mix.Utils.underscore("/..") == "/.." - assert Mix.Utils.underscore("foo/../bar") == "foo/../bar" + test "extract files" do + files = Mix.Utils.extract_files [Path.join(fixture_path(), "archive")], "*.ex" + assert length(files) == 1 + assert Path.basename(hd(files)) == "local.sample.ex" end - test :camelize do - assert Mix.Utils.camelize("Foo") == "Foo" - assert Mix.Utils.camelize("FooBar") == "FooBar" - assert Mix.Utils.camelize("foo") == "Foo" - assert Mix.Utils.camelize("foo_bar") == "FooBar" - assert Mix.Utils.camelize("foo_") == "Foo" - assert Mix.Utils.camelize("_foo") == "Foo" - assert Mix.Utils.camelize("foo__bar") == "FooBar" - assert Mix.Utils.camelize("foo/bar") == "Foo.Bar" - assert Mix.Utils.camelize("") == "" - end - test :extract_files do - files = Mix.Utils.extract_files [Path.join(fixture_path, "archive")], "*.ex" - assert length(files) == 1 - assert Path.basename(hd(files)) == "local.sample.ex" + test "extract files with empty string returns empty list" do + assert Mix.Utils.extract_files([""], ".ex") == [] end - test :extract_stale do + test "extract stale" do time = {{2030, 1, 1}, {0, 0, 0}} assert Mix.Utils.extract_stale([__ENV__.file], [time]) == [] @@ -67,7 +48,7 @@ defmodule Mix.UtilsTest do assert Mix.Utils.extract_stale([__ENV__.file], [__ENV__.file]) == [] end - test :symlink_or_copy do + test "symlink or copy" do in_fixture "archive", fn -> File.mkdir_p!("_build/archive") result = Mix.Utils.symlink_or_copy(Path.expand("ebin"), Path.expand("_build/archive/ebin")) @@ -75,7 +56,7 @@ defmodule Mix.UtilsTest do end end - test :symlink_or_copy_removes_previous_directories do + test "symlink or copy removes previous directories" do in_fixture "archive", fn -> File.mkdir_p!("_build/archive/ebin") result = Mix.Utils.symlink_or_copy(Path.expand("ebin"), Path.expand("_build/archive/ebin")) @@ -83,19 +64,46 @@ defmodule Mix.UtilsTest do end end - test :symlink_or_copy_erases_wrong_symblinks do - in_fixture "archive", fn -> - File.mkdir_p!("_build/archive") - Mix.Utils.symlink_or_copy(Path.expand("priv"), Path.expand("_build/archive/ebin")) - result = Mix.Utils.symlink_or_copy(Path.expand("ebin"), Path.expand("_build/archive/ebin")) - assert_ebin_symlinked_or_copied(result) + @windows? match?({:win32, _}, :os.type) + unless @windows? do + test "symlink or copy erases wrong symblinks" do + in_fixture "archive", fn -> + File.mkdir_p!("_build/archive") + Mix.Utils.symlink_or_copy(Path.expand("priv"), Path.expand("_build/archive/ebin")) + result = Mix.Utils.symlink_or_copy(Path.expand("ebin"), Path.expand("_build/archive/ebin")) + assert_ebin_symlinked_or_copied(result) + end end end + test "proxy_config reads from env and returns credentials" do + assert Mix.Utils.proxy_config("http://example.com/") == [] + + System.put_env("http_proxy", "http://nopass@example.com/") + assert Mix.Utils.proxy_config("http://example.com/") == [proxy_auth: {'nopass', ''}] + + System.put_env("HTTP_PROXY", "http://my:proxy@example.com/") + assert Mix.Utils.proxy_config("http://example.com/") == [proxy_auth: {'my', 'proxy'}] + + System.put_env("https_proxy", "https://another:proxy@example.com/") + assert Mix.Utils.proxy_config("https://example.com/") == [proxy_auth: {'another', 'proxy'}] + + System.put_env("HTTPS_PROXY", "https://example.com/") + assert Mix.Utils.proxy_config("https://example.com/") == [] + end + defp assert_ebin_symlinked_or_copied(result) do case result do {:ok, paths} -> assert Path.expand("_build/archive/ebin") in paths - :ok -> assert :file.read_link("_build/archive/ebin") == {:ok, '../../ebin'} + :ok -> + expected_link = + case :os.type do + # relative symlink on Windows are broken, see symlink_or_copy/2 + {:win32, _} -> "ebin" |> Path.expand() |> String.to_charlist() + _ -> '../../ebin' + end + {:ok, actual_link} = :file.read_link("_build/archive/ebin") + assert actual_link == expected_link _ -> flunk "expected symlink_or_copy to return :ok or {:ok, list_of_paths}, got: #{inspect result}" end end diff --git a/lib/mix/test/mix_test.exs b/lib/mix/test/mix_test.exs index 97e492f5d33..9b7c00ff157 100644 --- a/lib/mix/test/mix_test.exs +++ b/lib/mix/test/mix_test.exs @@ -3,13 +3,20 @@ Code.require_file "test_helper.exs", __DIR__ defmodule MixTest do use MixTest.Case - test :shell do + test "shell" do assert Mix.shell == Mix.Shell.Process end - test :env do + test "env" do assert Mix.env == :dev Mix.env(:prod) assert Mix.env == :prod end -end \ No newline at end of file + + test "debug" do + refute Mix.debug? + Mix.debug(true) + assert Mix.debug? + Mix.debug(false) + end +end diff --git a/lib/mix/test/test_helper.exs b/lib/mix/test/test_helper.exs index 6fc3219aa4f..4cf089fbf05 100644 --- a/lib/mix/test/test_helper.exs +++ b/lib/mix/test/test_helper.exs @@ -1,15 +1,22 @@ Mix.start() Mix.shell(Mix.Shell.Process) - +Application.put_env(:mix, :colors, [enabled: false]) ExUnit.start [trace: "--trace" in System.argv] + +unless {1, 7, 4} <= Mix.SCM.Git.git_version do + IO.puts :stderr, "Skipping tests with git sparse checkouts..." + ExUnit.configure(exclude: :git_sparse) +end + defmodule MixTest.Case do use ExUnit.CaseTemplate defmodule Sample do def project do - [ app: :sample, - version: "0.1.0" ] + [app: :sample, + version: "0.1.0", + aliases: [sample: "compile"]] end end @@ -19,30 +26,38 @@ defmodule MixTest.Case do end end - setup do + setup config do + if apps = config[:apps] do + Logger.remove_backend(:console) + end + on_exit fn -> + Application.start(:logger) Mix.env(:dev) Mix.Task.clear Mix.Shell.Process.flush Mix.ProjectStack.clear_cache Mix.ProjectStack.clear_stack - System.put_env("MIX_HOME", tmp_path(".mix")) - delete_tmp_paths + delete_tmp_paths() + + if apps do + for app <- apps do + Application.stop(app) + Application.unload(app) + end + Logger.add_backend(:console, flush: true) + end end :ok end - def elixir_root do - Path.expand("../../..", __DIR__) - end - def fixture_path do Path.expand("fixtures", __DIR__) end def fixture_path(extension) do - Path.join fixture_path, extension + Path.join fixture_path(), extension end def tmp_path do @@ -50,13 +65,13 @@ defmodule MixTest.Case do end def tmp_path(extension) do - Path.join tmp_path, extension + Path.join tmp_path(), to_string(extension) end def purge(modules) do Enum.each modules, fn(m) -> - :code.delete(m) :code.purge(m) + :code.delete(m) end end @@ -79,8 +94,8 @@ defmodule MixTest.Case do def in_fixture(which, tmp, function) do src = fixture_path(which) - dest = tmp_path(tmp) - flag = tmp_path |> String.to_char_list + dest = tmp_path(String.replace(tmp, ":", "_")) + flag = String.to_charlist(tmp_path()) File.rm_rf!(dest) File.mkdir_p!(dest) @@ -93,10 +108,11 @@ defmodule MixTest.Case do File.cd! dest, function after :code.set_path(get_path) - Enum.each (:code.all_loaded -- previous), fn {mod, file} -> - if is_list(file) and :lists.prefix(flag, file) do - purge [mod] - end + + for {mod, file} <- :code.all_loaded -- previous, + file == :in_memory or + (is_list(file) and :lists.prefix(flag, file)) do + purge [mod] end end end @@ -119,21 +135,62 @@ defmodule MixTest.Case do end end + def mix(args, envs \\ []) when is_list(args) do + System.cmd(elixir_executable(), + ["-r", mix_executable(), "--" | args], + stderr_to_stdout: true, + env: envs) |> elem(0) + end + + def mix_port(args, envs \\ []) when is_list(args) do + Port.open({:spawn_executable, elixir_executable()}, [ + {:args, ["-r", mix_executable(), "--" | args]}, + {:env, envs}, + :binary, + :use_stdio, + :stderr_to_stdout + ]) + end + + defp mix_executable do + Path.expand("../../../bin/mix", __DIR__) + end + + defp elixir_executable do + Path.expand("../../../bin/elixir", __DIR__) + end + defp delete_tmp_paths do - tmp = tmp_path |> String.to_char_list - to_remove = Enum.filter :code.get_path, fn(path) -> :string.str(path, tmp) != 0 end - Enum.map to_remove, &(:code.del_path(&1)) + tmp = tmp_path() |> String.to_charlist + for path <- :code.get_path, + :string.str(path, tmp) != 0, + do: :code.del_path(path) end end +## Set up Mix home with Rebar + +home = MixTest.Case.tmp_path(".mix") +File.mkdir_p!(home) +System.put_env("MIX_HOME", home) + +rebar = System.get_env("REBAR") || Path.expand("../../../rebar", __DIR__) +File.cp!(rebar, Path.join(home, "rebar")) +rebar = System.get_env("REBAR3") || Path.expand("../../../rebar3", __DIR__) +File.cp!(rebar, Path.join(home, "rebar3")) + ## Copy fixtures to tmp -source = MixTest.Case.fixture_path("rebar_dep") -dest = MixTest.Case.tmp_path("rebar_dep") -File.mkdir_p!(dest) -File.cp_r!(source, dest) +fixtures = ~w(rebar_dep rebar_override) -## Generate git repo fixtures +Enum.each(fixtures, fn fixture -> + source = MixTest.Case.fixture_path(fixture) + dest = MixTest.Case.tmp_path(fixture) + File.mkdir_p!(dest) + File.cp_r!(source, dest) +end) + +## Generate Git repo fixtures # Git repo target = Path.expand("fixtures/git_repo", __DIR__) @@ -147,16 +204,16 @@ unless File.dir?(target) do """ File.cd! target, fn -> - System.cmd("git init") - System.cmd("git config user.email \"mix@example.com\"") - System.cmd("git config user.name \"Mix Repo\"") - System.cmd("git add .") - System.cmd("git commit -m \"bad\"") + System.cmd("git", ~w[init]) + System.cmd("git", ~w[config user.email "mix@example.com"]) + System.cmd("git", ~w[config user.name "mix-repo"]) + System.cmd("git", ~w[add .]) + System.cmd("git", ~w[commit -m "bad"]) end File.write! Path.join(target, "mix.exs"), """ ## Auto-generated fixture - defmodule GitRepo.Mix do + defmodule GitRepo.Mixfile do use Mix.Project def project do @@ -166,8 +223,9 @@ unless File.dir?(target) do """ File.cd! target, fn -> - System.cmd("git add .") - System.cmd("git commit -m \"ok\"") + System.cmd("git", ~w[add .]) + System.cmd("git", ~w[commit -m "ok"]) + System.cmd("git", ~w[tag without_module]) end File.write! Path.join(target, "lib/git_repo.ex"), """ @@ -179,13 +237,38 @@ unless File.dir?(target) do end """ + ## Sparse + subdir = Path.join(target, "sparse_dir") + File.mkdir_p!(Path.join(subdir, "lib")) + + File.write! Path.join(subdir, "mix.exs"), """ + ## Auto-generated fixture + defmodule GitSparseRepo.Mixfile do + use Mix.Project + + def project do + [app: :git_sparse_repo, version: "0.1.0"] + end + end + """ + + File.write! Path.join(subdir, "lib/git_sparse_repo.ex"), """ + ## Auto-generated fixture + defmodule GitSparseRepo do + def hello do + "World" + end + end + """ + File.cd! target, fn -> - System.cmd("git add .") - System.cmd("git commit -m \"lib\"") + System.cmd("git", ~w[add .]) + System.cmd("git", ~w[commit -m "lib"]) + System.cmd("git", ~w[tag with_module]) end end -# Deps on git repo +# Deps on Git repo target = Path.expand("fixtures/deps_on_git_repo", __DIR__) unless File.dir?(target) do @@ -193,27 +276,32 @@ unless File.dir?(target) do File.write! Path.join(target, "mix.exs"), """ ## Auto-generated fixture - defmodule DepsOnGitRepo.Mix do + defmodule DepsOnGitRepo.Mixfile do use Mix.Project def project do - [ app: :deps_on_git_repo, - version: "0.2.0", - deps: [{:git_repo, git: MixTest.Case.fixture_path("git_repo")}] ] + [app: :deps_on_git_repo, + version: "0.2.0", + deps: [{:git_repo, git: MixTest.Case.fixture_path("git_repo")}]] end end """ + File.write! Path.join(target, "lib/deps_on_git_repo.ex"), """ + ## Auto-generated fixture + GitRepo.hello + """ + File.cd! target, fn -> - System.cmd("git init") - System.cmd("git config user.email \"mix@example.com\"") - System.cmd("git config user.name \"Mix Repo\"") - System.cmd("git add .") - System.cmd("git commit -m \"ok\"") + System.cmd("git", ~w[init]) + System.cmd("git", ~w[config user.email "mix@example.com"]) + System.cmd("git", ~w[config user.name "mix-repo"]) + System.cmd("git", ~w[add .]) + System.cmd("git", ~w[commit -m "ok"]) end end -# Git rebar +# Git Rebar target = Path.expand("fixtures/git_rebar", __DIR__) unless File.dir?(target) do @@ -236,14 +324,55 @@ unless File.dir?(target) do """ File.cd! target, fn -> - System.cmd("git init") - System.cmd("git config user.email \"mix@example.com\"") - System.cmd("git config user.name \"Mix Repo\"") - System.cmd("git add .") - System.cmd("git commit -m \"ok\"") + System.cmd("git", ~w[init]) + System.cmd("git", ~w[config user.email "mix@example.com"]) + System.cmd("git", ~w[config user.name "mix-repo"]) + System.cmd("git", ~w[add .]) + System.cmd("git", ~w[commit -m "ok"]) end end -Enum.each [:invalidapp, :invalidvsn, :noappfile, :ok], fn(dep) -> +Enum.each [:invalidapp, :invalidvsn, :noappfile, :nosemver, :ok], fn(dep) -> File.mkdir_p! Path.expand("fixtures/deps_status/deps/#{dep}/.git", __DIR__) end + +## Generate helper modules + +path = MixTest.Case.tmp_path("beams") +File.rm_rf!(path) +File.mkdir_p!(path) + +write_beam = fn {:module, name, bin, _} -> + path + |> Path.join(Atom.to_string(name) <> ".beam") + |> File.write!(bin) +end + +defmodule Mix.Tasks.Hello do + use Mix.Task + @shortdoc "This is short documentation, see" + + @moduledoc """ + A test task. + """ + + def run([]) do + "Hello, World!" + end + + def run(["--parser" | args]) do + OptionParser.parse!(args, strict: [int: :integer]) + end + + def run(args) do + "Hello, #{Enum.join(args, " ")}!" + end +end |> write_beam.() + +defmodule Mix.Tasks.Invalid do +end |> write_beam.() + +defmodule Mix.Tasks.Acronym.HTTP do + use Mix.Task + def run(_), do: "An HTTP Task" +end |> write_beam.() diff --git a/man/common b/man/common new file mode 100644 index 00000000000..3a6c79cb412 --- /dev/null +++ b/man/common @@ -0,0 +1,63 @@ +.It Fl e Ar expression +Evaluates the specified expression. +.It Fl r Ar file +Requires the specified file. In other words, the file is checked for existence at the start of +.Nm . +.It Fl S Ar script +Runs the specified script. +.It Fl pa Ar directory +Adds the specified directory to the beginning of the code path. If the directory already exists, it will be removed from its old position and put to the beginning. +.Pp +See also the function +.Sy Code.prepend_path/1 . +.It Fl pr Ar file +Does the same thing as +.Fl r +.Pq see above +but in parallel. +.It Fl pz Ar directory +Adds the specified directory to the end of the code path. If the directory already exists, it will be neither removed from its old position nor put to the end. +.Pp +See also the function +.Sy Code.append_path/1 . +.It Fl -app Ar application +Starts the specified application and all its dependencies. +.It Fl -erl Ar parameters +Serves the same purpose as ELIXIR_ERL_OPTIONS +.Pq see the Sy ENVIRONMENT No section +.It Fl -cookie Ar value +Specifies the magic cookie value. If the value isn't specified via the option when the node starts, it will be taken from the file +.Pa ~/.erlang.cookie +.Pq see the Sy FILES No section . +Distributed nodes can interact with each other only when their magic cookies are equal. +.Pp +See also the function +.Sy Node.set_cookie/2 . +.It Fl -detached +Runs the Erlang runtime system detached from the controlling terminal. +.It Fl -hidden +Starts a hidden node. +.Pp +Connections between nodes are transitive. For example, if node A is connected to node B, and node B is connected to node C, then node A is connected to node C. The option +.Fl -hidden +allows creating a node which can be connected to another node, escaping redundant connections. +.Pp +The function +.Sy Node.list/0 +allows getting the list of nodes connected to the target node, however the list won't include hidden nodes. Depending on the input parameter, the function +.Sy Node.list/1 +allows getting the list which contains only hidden nodes +.Pq the parameter Ar :hidden +or both hidden and not hidden nodes +.Pq the parameter Ar :connected . +.It Fl -sname Ar name +Gives a node a short name and starts it. Short names take the form of +.Ar name Ns +@host, where host is the name of the target host +.Pq Xr hostname 1 +which runs the node. The nodes with short names can interact with each other only in the same local network. +.It Fl -name Ar name +Gives a node a long name and starts it. Long names take the form of +.Ar name Ns +@host, where host is the IP address of the host which runs the node. In contrast to the nodes with short names, the nodes with long names aren't limited by boundaries of a local network +.Pq see above . diff --git a/man/elixir.1.in b/man/elixir.1.in new file mode 100644 index 00000000000..e552fc066ec --- /dev/null +++ b/man/elixir.1.in @@ -0,0 +1,51 @@ +.Dd April 10, 2015 +.Dt ELIXIR 1 +.Os +.Sh NAME +.Nm elixir +.Nd The Elixir script runner +.Sh SYNOPSIS +.Nm +.Op Ar OPTIONS +.Ar +.Sh DESCRIPTION +The program starts the runtime system typically for the execution of one or more scripts. It is similar to +.Xr iex 1 , +but +.Nm +exits when the executed script does. +.Sh OPTIONS +Note that many of the options mentioned here were borrowed from the Erlang shell, therefore +.Xr erl 1 +can be used as an additional source of information on the options. +.Bl -tag -width Ds +{COMMON} +.It Fl -no-halt +Does not halt the Erlang VM after execution. +.It Fl - +Separates the options passed to the compiler from the options passed to the executed code. +.El +.Sh ENVIRONMENT +.Bl -tag -width Ds +.It Ev ELIXIR_ERL_OPTIONS +Allows passing parameters to the Erlang runtime. +.El +.Sh FILES +.Bl -tag -width Ds +.It Pa ~/.erlang.cookie +Stores the magic cookie value which is used only when it wasn't specified via the option +.Fl -cookie +.Pq see above . +If the file doesn't exist when a node starts, it will be created. +.El +.Sh SEE ALSO +.Xr elixirc 1 , +.Xr iex 1 , +.Xr mix 1 +.Sh AUTHOR +This manual page contributed by Evgeny Golyshev. +.Sh INTERNET RESOURCES +.Bl -tag -width Ds +.It Main website: http://elixir-lang.org +.It Documentation: http://elixir-lang.org/docs.html +.El diff --git a/man/elixirc.1 b/man/elixirc.1 new file mode 100644 index 00000000000..abef97a9411 --- /dev/null +++ b/man/elixirc.1 @@ -0,0 +1,52 @@ +.Dd April 10, 2015 +.Dt ELIXIRC 1 +.Os +.Sh NAME +.Nm elixirc +.Nd The Elixir compiler +.Sh SYNOPSIS +.Nm +.Op Ar OPTIONS +.Ar +.Sh DESCRIPTION +The compiler is intended for compilation one or more files containing the Elixir source code. The files should have the extension +.Em .ex . +.Sh OPTIONS +.Bl -tag -width Ds +.It Fl o Ar directory +Places the output file in the specified directory. If the directory is not specified via the option, the current working directory will be used for the purpose. +.It Fl -erl Ar parameters +Serves the same purpose as ELIXIR_ERL_OPTIONS +.Pq see the Sy ENVIRONMENT No section . +.It Fl -ignore-module-conflict +Disables warnings when a module was previously defined. +.It Fl -no-debug-info +Disables producing debugging information. +.It Fl -no-docs +Disables generating documentation. +.It Fl -warnings-as-errors +Makes all warnings into errors. +.It Fl -verbose +Activates verbose mode. +.It Fl - +Separates the options passed to the compiler from the options passed to the executed code. +.El +.Sh ENVIRONMENT +.Bl -tag -width Ds +.It Ev ELIXIR_ERL_OPTIONS +Allows passing parameters to the Erlang runtime. +.It Ev ERL_COMPILER_OPTIONS +Allows passing parameters to the Erlang compiler +.Pq see Xr erlc 1 . +.El +.Sh SEE ALSO +.Xr elixir 1 , +.Xr iex 1 , +.Xr mix 1 +.Sh AUTHOR +This manual page contributed by Evgeny Golyshev. +.Sh INTERNET RESOURCES +.Bl -tag -width Ds +.It Main website: http://elixir-lang.org +.It Documentation: http://elixir-lang.org/docs.html +.El diff --git a/man/iex.1.in b/man/iex.1.in new file mode 100644 index 00000000000..b387e689d08 --- /dev/null +++ b/man/iex.1.in @@ -0,0 +1,61 @@ +.Dd April 10, 2015 +.Dt IEX 1 +.Os +.Sh NAME +.Nm iex +.Nd The Elixir shell +.Sh SYNOPSIS +.Nm +.Op Ar OPTIONS +.Sh DESCRIPTION +The interactive shell is used for evaluation, debugging and introspection of the Elixir runtime system. It is also possible to use the program for testing the work of small pieces of code escaping the stage of saving the code in a file. +.Sh OPTIONS +Note that many of the options mentioned here were borrowed from the Erlang shell, therefore +.Xr erl 1 +can be used as an additional source of information on the options. +.Bl -tag -width Ds +{COMMON} +.It Fl -dot-iex Ar file +Loads the specified file instead of +.Pa .iex.exs +.Pq see the Sy FILES No section . +.It Fl -remsh Ar node +Connects to the specified node which was started with the +.Fl -sname +or +.Fl -name +options +.Pq see above . +.It Fl - +Separates the options passed to the compiler from the options passed to the executed code. +.El +.Sh ENVIRONMENT +.Bl -tag -width Ds +.It Ev ELIXIR_ERL_OPTIONS +Allows passing parameters to the Erlang runtime. +.El +.Sh FILES +.Bl -tag -width Ds +.It Pa ~/.erlang.cookie +Stores the magic cookie value which is used only when it wasn't specified via the option +.Fl -cookie +.Pq see above . +If the file doesn't exist when a node starts, it will be created. +.It Pa .iex.exs +After +.Nm +starts, it seeks the file +.Pa .iex.exs +and, in a case of success, executes the code from the file in the context of the shell. At first the search starts in the current working directory, then, if necessary, it continues in the home directory. +.El +.Sh SEE ALSO +.Xr elixir 1 , +.Xr elixirc 1 , +.Xr mix 1 +.Sh AUTHOR +This manual page contributed by Evgeny Golyshev. +.Sh INTERNET RESOURCES +.Bl -tag -width Ds +.It Main website: http://elixir-lang.org +.It Documentation: http://elixir-lang.org/docs.html +.El diff --git a/man/mix.1 b/man/mix.1 new file mode 100644 index 00000000000..f5d3b57bb11 --- /dev/null +++ b/man/mix.1 @@ -0,0 +1,147 @@ +.Dd May 27, 2015 +.Dt MIX 1 +.Os +.Sh NAME +.Nm mix +.Nd The software project management tool +.Sh SYNOPSIS +.Nm +.Op Ar TASK +.Op Ar project_name +.Nm elixir +.Op Ar OPTIONS +.Fl S Nm +.Op Ar TASK +.Op Ar project_name +.Sh DESCRIPTION +.Nm +is intended for both organizing code into projects and their maintenance. For the latter the tool offers some advanced features like dependency management, packaging, preparing documentation, testing and so on. +.Pp +Have a look at the +.Sy SYNOPSIS +section and the second way of running +.Nm +it offers. The point is that the tool is none other than the Elixir script, therefore it can be invoked via +.Xr elixir 1 +in the same way as any other script. It's useful when you want to run +.Nm +with particular options. +.Sh DEFINITIONS +All the +.Nm +functionality is represented by a set of tasks. A +.Em task +is a piece of code written in Elixir and intended for solving a particular problem. Like programs, many tasks accept input parameters and/or support options which slightly modify their behaviour, but others do not. There are two types of tasks: those that are available after installation this or that archive +.Pq local tasks +and those that are offered by +.Nm +.Pq built-in tasks . +The +.Sy run +task will be executed by default if none other has been specified. +.Pp +In spite of the fact that the greater part of +.Nm +is tasks, the man page doesn't contain the help information related to each of them because +.Nm +is self-descriptive. Thus, using the +.Sy help +task, you can get both the full list of local/built-in tasks and the information related to a particular task. +.Pp +An +.Em archive , +in terms of Erlang, is the ZIP file with the +.Em .ez +extension which contains a precompiled Erlang application with all its dependencies[1]. +.Pp +An +.Em application +is an entity that helps to combine sets of components into a single unit to simplify their reusing in other systems[2]. +.Sh ENVIRONMENT +.Bl -tag -width Ds +.It Ev MIX_ARCHIVES +Allows specifying the directory into which the archives should be installed +.Pq see Sy mix help archive.install . +The +.Em ~/.mix/archives +directory is used for this purpose by default. +.It Ev MIX_DEBUG +When set, outputs debug information about each task before running it. +.It Ev MIX_ENV +Allows specifying which environment should be used. The +.Em dev +environment is used by default if none other has been specified. +.Pp +Sometimes you have to use a particular set of configuration parameter values or perform particular steps when you compile or run a project +.Pq or in some other cases . +The +.Nm +environments allow grouping values of configuration parameters and steps to switch between them by specifying the necessary environment via MIX_ENV. +.It Ev MIX_EXS +Allows changing the full path to the +.Em mix.exs +file +.Pq see Sy FILES No section . +The most obvious use case is to have more than one copy of +.Em mix.exs +in a project, but it's worth noting that MIX_EXS should be used only if the +.Nm +environments +.Pq see above +are not enough to solve the problem. +.It Ev MIX_HOME +Stores configuration files and scripts shared by multiple +.Nm +implementations. +.Pp +See the +.Sy Mix.Utils.mix_home/0 +function. +.It Ev MIX_PATH +Allows expanding the code path. If the MIX_PATH environment variable has a value which consists of multiple paths, they must be colon-separated +.Pq for Unix-like operating systems +or semicolon-separated +.Pq for Windows . +.Pp +As has already been mentioned above, there are two types of tasks: local and built-in. These tasks are always visible for +.Nm +because the directories, in which they are located, are a part of code path. If a task belongs to neither the one type nor the other, MIX_PATH helps you say to +.Nm +where it should search the task. +.Pp +Use the +.Sy :code.get_path/0 +function to get the list of paths which are a part of the code path by default and the +.Sy Mix.Utils.mix_paths/0 +function to get the list of paths specified in the MIX_PATH value. +.It Ev MIX_QUIET +When set, does not print information messages to the terminal. +.El +.Sh FILES +.Bl -tag -width Ds +.It mix.exs +Contains the most significant information related to the project, such as its name, version, list of dependencies and so on. As a rule, the file is named +.Em mix.exs +and located at the top of the project's source tree, but you can change the full path to it using the MIX_EXS environment variable +.Pq see the Sy ENVIRONMENT No section . +.It mix.lock +Allows locking down the project dependencies with a proper version range before performing any updates. It is useful when you know that your project is incompatible with newer versions of certain dependencies. The file is located at the top of the project's source tree as well as +.Em mix.exs +.Pq see above . +.El +.Sh REFERENCES +.Bl -tag -width Ds +.It [1] http://www.erlang.org/doc/man/code.html#id103620 +.It [2] http://www.erlang.org/doc/design_principles/applications.html +.El +.Sh SEE ALSO +.Xr elixir 1 , +.Xr elixirc 1 , +.Xr iex 1 +.Sh AUTHOR +This manual page contributed by Evgeny Golyshev. +.Sh INTERNET RESOURCES +.Bl -tag -width Ds +.It Main website: http://elixir-lang.org +.It Documentation: http://elixir-lang.org/docs.html +.El diff --git a/rebar.config b/rebar.config index 444ebef8ce9..9154464fa23 100644 --- a/rebar.config +++ b/rebar.config @@ -1,12 +1,12 @@ -%% Using elixir as a rebar dependency +%% Using Elixir as a Rebar dependency %% This configuration file only exists so Elixir can be used -%% as a rebar dependency, the same happens for the file +%% as a Rebar dependency, the same happens for the file %% src/elixir.app.src. %% In practice, Elixir is structured as OTP where many applications %% are placed in the lib directory. Since this structure is not -%% supported by default by rebar, after adding Elixir as a dependency +%% supported by default by Rebar, after adding Elixir as a dependency %% you need to explicitly add it to lib_dirs: %% %% {lib_dirs, [ @@ -14,8 +14,8 @@ %% ]}. %% -%% Run make as the proper compilation step -{post_hooks, [{compile,"make compile"}]}. +%% Run "make" as the proper compilation step +{post_hooks, [{compile, "make compile"}]}. %% This prevents rebar_elixir_plugin from recompiling Elixir {ex_opts, [{src_dirs, [".PHONY"]}]}. diff --git a/rebar3 b/rebar3 new file mode 100755 index 00000000000..a31d4c46e40 Binary files /dev/null and b/rebar3 differ diff --git a/src/elixir.app.src b/src/elixir.app.src index 2006fe1de53..859b5136ab4 100644 --- a/src/elixir.app.src +++ b/src/elixir.app.src @@ -1,11 +1,11 @@ {application, elixir, [{description, "elixir"}, - {vsn, "0.14.3-dev"}, + {vsn, "1.5.0-dev"}, {modules, [ elixir ]}, - {registered, [elixir_code_server]}, - {applications, [kernel,stdlib]}, + {registered, [elixir_config, elixir_code_server]}, + {applications, [kernel,stdlib,compiler]}, {mod, {elixir,[]}}, - {env, [{uri, #{}}]} + {env, [{ansi_enabled, false}]} ]}.