mirror of
https://github.com/UzixLS/migresia.git
synced 2025-07-18 14:51:33 +03:00
Refactor to use precompiled migrations.
This commit is contained in:
39
README.md
39
README.md
@ -11,9 +11,8 @@ When migrating the database:
|
||||
|
||||
1. List all available migrations.
|
||||
2. Check which migrations haven't yet been applied.
|
||||
3. Compile all unapplied migrations.
|
||||
4. If the compilation went OK, load all compiled migrations.
|
||||
5. For each loaded migration:
|
||||
3. Load required migrations.
|
||||
4. For each loaded migration:
|
||||
|
||||
* Execute the `up` / `down`\* function as required to bring the database to the desired version.
|
||||
* Mark the migration as applied if it has been executed successfully.
|
||||
@ -24,17 +23,13 @@ Migresia stores all applied migrations in table `schema_migrations` which it cre
|
||||
|
||||
## Migrations
|
||||
|
||||
Each migration is an Erlang source `*.erl` file stored in folder `priv/migrations/`, under a given application, or in a folder configured with application environment option: `rel_relative_dir`.
|
||||
|
||||
Migresia compiles the migrations automatically when it applies them, so they should be always distributed as source files. This is mainly to allow keeping applied migrations under a version control but not have to compile them every time when building the application or creating a release.
|
||||
|
||||
When developing a new migration just use `migresia:check/1` to let Migresia try to compile them and report any problems.
|
||||
Each migration is an Erlang `*.beam` module stored in `ebin/` folder, under a given application, or in a custom folder which you can pass to migresia api as argument.
|
||||
|
||||
#### Migration names
|
||||
|
||||
The name of each migration starts with a timestamp, which is treated as its version, and ends with a short information of the purpose of that migration. For example:
|
||||
The name of each migration starts with a `db_` prefix followed by timestamp, which is treated as its version, and ends with a short information of the purpose of that migration. For example:
|
||||
|
||||
20130731163300_convert_permissions.erl
|
||||
20130731163300_convert_permissions.beam
|
||||
|
||||
The timestamp is always 14 numbers, but otherwise can contain any value. Migresia doesn't interpret it in any way, but it uses it to sort the migrations according to the value of that timestamp, assuming timestamps with the lower values are younger.
|
||||
|
||||
@ -48,38 +43,22 @@ Migresia doesn't provide any special support for transactions. If any operations
|
||||
|
||||
Migresia provides a behaviour which all migrations should implement: `db_migration`. It expects two functions: `up/0` and `down/0`. Please note, that at this moment migrations can't be applied backward, so the `down/0` function is unused. However, both functions are present for completeness as it is expected that in the future Migresia will support migrating databases in both directions.
|
||||
|
||||
Very often migrations need to know the record definitions of Mnesia tables to which the migrations will be applied. The preferred way of doing this is by creating in the Migresia `include` folder a symbolic link to the include file that contains the required record definition. By default the Migresia repository ignores all include files in the Migresia `include` directory. The linked file should then be included using `include_lib`, for example:
|
||||
|
||||
-module('20130731163300_convert_permissions').
|
||||
-behaviour(db_migration).
|
||||
|
||||
-export([up/0, down/0]).
|
||||
|
||||
-include_lib("migresia/include/tables.hrl").
|
||||
|
||||
up() ->
|
||||
Permissions = #permissions{superuser = true},
|
||||
mnesia:dirty_write(#user_account{name = <<"root">>, value = Permissions}).
|
||||
|
||||
down() ->
|
||||
throw(<<"Irreversible migration">>).
|
||||
|
||||
## API Calls
|
||||
|
||||
Migresia exports 5 functions. All functions can take an optional first parameter of an application name (as an atom) if you want your migrations to be under your application rather than Migresia:
|
||||
Migresia exports 3 functions. All functions take an first parameter of an application name (as an atom) in which scope migrations will be searched.
|
||||
|
||||
##### `migresia:check/1`
|
||||
|
||||
This function does two things:
|
||||
|
||||
* Compile and loads all unapplied migrations.
|
||||
* Loads all unapplied migrations.
|
||||
* Returns a list of all unapplied migrations, or a reason why it was unable to get this list.
|
||||
|
||||
This is the method that should be used to check migrations for compilation errors as well as to verify which migrations will be applied if `migresia:migrate/1` is executed to migrate the database.
|
||||
This is the method that should be used to verify which migrations will be applied if `migresia:migrate/1` is executed to migrate the database.
|
||||
|
||||
##### `migresia:migrate/1`
|
||||
|
||||
Works almost exactly like `migresia:check/1` but in the end, instead of printing information which migrations are going to be applied, just applies them by executing the required `up` or `down` functions.
|
||||
Works almost exactly like `migresia:check/1` but in the end, instead of returning information which migrations are going to be applied, just applies them by executing the required `up` or `down` functions.
|
||||
|
||||
##### `migresia:list_disc_copy_nodes/0`
|
||||
|
||||
|
@ -25,16 +25,13 @@
|
||||
-module(migresia).
|
||||
|
||||
-export([start_all_mnesia/0,
|
||||
list_nodes/0,
|
||||
list_migrations/0,
|
||||
ensure_started/1,
|
||||
check/1,
|
||||
migrate/0,
|
||||
migrate/1,
|
||||
rollback/1,
|
||||
rollback/2,
|
||||
rollback_last/0,
|
||||
rollback_last/1]).
|
||||
list_nodes/0,
|
||||
list_migrations/0,
|
||||
ensure_started/1,
|
||||
check/1,
|
||||
migrate/1,
|
||||
rollback/2,
|
||||
rollback_last/1]).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
@ -49,7 +46,7 @@ start_all_mnesia() ->
|
||||
Err
|
||||
end.
|
||||
|
||||
list_nodes() ->
|
||||
list_nodes() ->
|
||||
mnesia:table_info(schema, disc_copies).
|
||||
|
||||
list_migrations() ->
|
||||
@ -68,7 +65,7 @@ handle_err(Results, Bad) ->
|
||||
if Bad /= [] -> io:format(" => Error, bad nodes: ~p~n", [Bad]) end,
|
||||
{error, mnesia_not_started}.
|
||||
|
||||
-spec ensure_started(atom()) -> ok | {error, any()}.
|
||||
-spec ensure_started(atom()) -> ok | {error, any()}.
|
||||
ensure_started(App) ->
|
||||
case application:start(App) of
|
||||
ok -> ok;
|
||||
@ -88,14 +85,10 @@ check(App) ->
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
-type migration_dir() :: default | file:filename().
|
||||
-type migration_dir() :: file:filename().
|
||||
-type migration_source() :: atom() | {rel_relative_dir, migration_dir()}.
|
||||
-type migration_sources() :: migration_source(). %% | [migration_source()].
|
||||
|
||||
-spec migrate() -> ok | {error, any()}.
|
||||
migrate() ->
|
||||
migrate({rel_relative_dir, default}).
|
||||
|
||||
-spec migrate(migration_sources()) -> ok | {error, any()}.
|
||||
migrate(Srcs) ->
|
||||
try
|
||||
@ -121,10 +114,6 @@ apply_ups(Srcs, Loaded) ->
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
-spec rollback(integer()) -> ok | {error, any()}.
|
||||
rollback(Time) ->
|
||||
rollback({rel_relative_dir, default}, Time).
|
||||
|
||||
-spec rollback(migration_sources(), integer()) -> ok | {error, any()}.
|
||||
rollback(Srcs, Time) ->
|
||||
try
|
||||
@ -148,10 +137,5 @@ apply_downs(Srcs, Loaded, Time) ->
|
||||
rpc:multicall(nodes(), migresia_migrations, list_applied_ups, [Srcs, Time]),
|
||||
lists:foreach(fun migresia_migrations:execute_down/1, Loaded).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
-spec rollback_last() -> ok | {error, any()}.
|
||||
rollback_last() -> rollback(migresia_migrations:get_ts_before_last()).
|
||||
|
||||
-spec rollback_last(migration_sources()) -> ok | {error, any()}.
|
||||
rollback_last(Srcs) -> rollback(Srcs, migresia_migrations:get_ts_before_last()).
|
||||
|
@ -25,16 +25,14 @@
|
||||
-module(migresia_migrations).
|
||||
|
||||
-export([init_migrations/0,
|
||||
list_migrations/0,
|
||||
list_unapplied_ups/1,
|
||||
list_applied_ups/2,
|
||||
get_default_dir/0,
|
||||
get_priv_dir/1,
|
||||
get_ts_before_last/0,
|
||||
execute_up/1,
|
||||
execute_down/1]).
|
||||
list_migrations/0,
|
||||
list_unapplied_ups/1,
|
||||
list_applied_ups/2,
|
||||
get_ts_before_last/0,
|
||||
execute_up/1,
|
||||
execute_down/1]).
|
||||
|
||||
-define(DIR, <<"migrations">>).
|
||||
-define(FILEPREFIX, "db_").
|
||||
-define(TABLE, schema_migrations).
|
||||
%% Surely no migrations before the first commit in migresia
|
||||
-define(FIRST_TS, 20130404041545).
|
||||
@ -63,27 +61,17 @@ list_migrations() ->
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
-spec list_unapplied_ups(migresia:migration_sources()) -> mod_bin_list().
|
||||
list_unapplied_ups({rel_relative_dir, default}) ->
|
||||
list_unapplied_ups({rel_relative_dir, get_default_dir()});
|
||||
list_unapplied_ups({rel_relative_dir, DirName}) ->
|
||||
get_unapplied(get_release_dir(DirName));
|
||||
list_unapplied_ups(App) when is_atom(App) ->
|
||||
get_unapplied(get_priv_dir(App)).
|
||||
get_unapplied(get_lib_dir(App)).
|
||||
|
||||
-spec list_applied_ups(migresia:migration_sources(), integer()) ->
|
||||
mod_bin_list().
|
||||
list_applied_ups({rel_relative_dir, default}, Time) ->
|
||||
list_applied_ups({rel_relative_dir, get_default_dir()}, Time);
|
||||
mod_bin_list().
|
||||
list_applied_ups({rel_relative_dir, DirName}, Time) ->
|
||||
get_applied(get_release_dir(DirName), Time);
|
||||
list_applied_ups(App, Time) when is_atom(App) ->
|
||||
get_applied(get_priv_dir(App), Time).
|
||||
|
||||
get_default_dir() ->
|
||||
case application:get_env(migresia, rel_relative_dir) of
|
||||
{ok, Val} -> Val;
|
||||
undefined -> ?DIR
|
||||
end.
|
||||
get_applied(get_lib_dir(App), Time).
|
||||
|
||||
get_release_dir(DirName) ->
|
||||
case filelib:is_dir(DirName) of
|
||||
@ -103,16 +91,16 @@ try_to_cwd(DirName) ->
|
||||
false -> throw({error, badcwd})
|
||||
end.
|
||||
|
||||
-spec get_priv_dir(atom()) -> string() | binary().
|
||||
get_priv_dir(App) ->
|
||||
-spec get_lib_dir(atom()) -> string() | binary().
|
||||
get_lib_dir(App) ->
|
||||
case application:load(App) of
|
||||
ok -> check_priv_dir(App);
|
||||
{error, {already_loaded, App}} -> check_priv_dir(App);
|
||||
ok -> check_lib_dir(App);
|
||||
{error, {already_loaded, App}} -> check_lib_dir(App);
|
||||
{error, _} = Err -> throw(Err)
|
||||
end.
|
||||
|
||||
check_priv_dir(App) ->
|
||||
Dir = filename:join(code:priv_dir(App), ?DIR),
|
||||
check_lib_dir(App) ->
|
||||
Dir = code:lib_dir(App, ebin),
|
||||
case filelib:is_dir(Dir) of
|
||||
true -> Dir;
|
||||
false -> throw({error, enoent})
|
||||
@ -128,37 +116,40 @@ get_applied(Dir, Time) ->
|
||||
|
||||
list_unapplied(FromDir, FromDB) ->
|
||||
Unapplied = [X || {Ts, _} = X <- FromDir,
|
||||
length(FromDB) =:= 0 orelse Ts > lists:max(FromDB)],
|
||||
length(FromDB) =:= 0 orelse Ts > lists:max(FromDB)],
|
||||
lists:keysort(1, Unapplied).
|
||||
|
||||
list_applied(FromDir, FromDB, Time) ->
|
||||
Applied = [X || {Ts, _} = X <- FromDir,
|
||||
lists:member(Ts, FromDB), Ts > Time],
|
||||
lists:member(Ts, FromDB), Ts > Time],
|
||||
lists:reverse(lists:keysort(1, Applied)).
|
||||
|
||||
load_migrations(Dir, FilterFun) ->
|
||||
Migrations = check_dir(file:list_dir(Dir)),
|
||||
case check_table() of
|
||||
{error, _} = Err -> throw(Err);
|
||||
Applied -> compile_and_load(Dir, FilterFun, Migrations, Applied)
|
||||
{error, _} = Err ->
|
||||
throw(Err);
|
||||
Applied ->
|
||||
ToLoad = FilterFun(Migrations, Applied),
|
||||
lists:map(fun({_, X}) ->
|
||||
load_migration(Dir, binary_to_list(erlang:binary_part(X, 0, size(X) - 5)))
|
||||
end, ToLoad)
|
||||
end.
|
||||
|
||||
check_dir({error, _} = Err) -> throw(Err);
|
||||
check_dir({ok, Filenames}) -> normalize_names(Filenames, []).
|
||||
|
||||
normalize_names([<<Short:14/bytes, ".erl">>|T], Acc) ->
|
||||
normalize_names([<<?FILEPREFIX, Short:14/bytes, ".beam">> = Name|T], Acc) ->
|
||||
Int = list_to_integer(binary_to_list(Short)),
|
||||
normalize_names(T, [{Int, Short}|Acc]);
|
||||
normalize_names([<<Short:14/bytes, $_, R/binary>> = Name|T], Acc)
|
||||
when size(R) >= 4
|
||||
andalso erlang:binary_part(R, size(R) - 4, 4) == <<".erl">> ->
|
||||
Base = erlang:binary_part(Name, 0, size(Name) - 4),
|
||||
normalize_names(T, [{Int, Name}|Acc]);
|
||||
normalize_names([<<?FILEPREFIX, Short:14/bytes, $_, R/binary>> = Name|T], Acc)
|
||||
when size(R) >= 5
|
||||
andalso erlang:binary_part(R, size(R) - 5, 5) == <<".beam">> ->
|
||||
Int = list_to_integer(binary_to_list(Short)),
|
||||
normalize_names(T, [{Int, Base}|Acc]);
|
||||
normalize_names(T, [{Int, Name}|Acc]);
|
||||
normalize_names([Name|T], Acc) when is_list(Name) ->
|
||||
normalize_names([list_to_binary(Name)|T], Acc);
|
||||
normalize_names([Name|T], Acc) ->
|
||||
io:format("Ignoring: ~p~n", [Name]),
|
||||
normalize_names([_Name|T], Acc) ->
|
||||
normalize_names(T, Acc);
|
||||
normalize_names([], Acc) ->
|
||||
lists:sort(Acc).
|
||||
@ -180,39 +171,13 @@ check_table() ->
|
||||
end
|
||||
end.
|
||||
|
||||
compile_and_load(Dir, FilterFun, ToApply, Applied) ->
|
||||
ToLoad = FilterFun(ToApply, Applied),
|
||||
lists:map(fun(X) -> compile_and_load1(Dir, X) end, ToLoad).
|
||||
|
||||
compile_and_load1(Dir, {Short, Name}) ->
|
||||
{Module, Short, Binary} = compile_file(Dir, Short, Name),
|
||||
load_migration(Module, Short, Binary).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
compile_file(Dir, Short, Name) ->
|
||||
File = filename:join(Dir, Name),
|
||||
io:format("Compiling: ~s~n", [File]),
|
||||
case compile:file(binary_to_list(File), [verbose, binary, report]) of
|
||||
{ok, Module, Binary} ->
|
||||
{Module, Short, Binary};
|
||||
{ok, Module, Binary, Warnings} ->
|
||||
io:format("Warnings: ~p~n", [Warnings]),
|
||||
{Module, Short, Binary};
|
||||
{error, Err, Warn} ->
|
||||
io:format("Errors: ~p~nWarnings: ~p~nAborting...~n", [Err, Warn]),
|
||||
throw({error, compile_error});
|
||||
error ->
|
||||
io:format("Errors encountered, Aborting...~n", []),
|
||||
throw({error, compile_error})
|
||||
end.
|
||||
|
||||
load_migration(Module, Short, Binary) ->
|
||||
case code:load_binary(Module, Module, Binary) of
|
||||
load_migration(Dir, Filename) ->
|
||||
Filepath = filename:join(Dir, Filename),
|
||||
case code:load_abs(Filepath) of
|
||||
{module, Module} ->
|
||||
{Module, Short};
|
||||
{Module, code:get_object_code(Module)};
|
||||
{error, _} = Err ->
|
||||
io:format("Error when loading module '~p'.~n", [Module]),
|
||||
io:format("Error when loading module ~p.~n", [Filepath]),
|
||||
throw(Err)
|
||||
end.
|
||||
|
||||
|
Reference in New Issue
Block a user