ic_pp.erl
来自「OTP是开放电信平台的简称」· ERL 代码 · 共 1,740 行 · 第 1/5 页
ERL
1,740 行
%% ``The contents of this file are subject to the Erlang Public License,%% Version 1.1, (the "License"); you may not use this file except in%% compliance with the License. You should have received a copy of the%% Erlang Public License along with this software. If not, it can be%% retrieved via the world wide web at http://www.erlang.org/.%% %% Software distributed under the License is distributed on an "AS IS"%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See%% the License for the specific language governing rights and limitations%% under the License.%% %% The Initial Developer of the Original Code is Ericsson Utvecklings AB.%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings%% AB. All Rights Reserved.''%% %% $Id$%%-module(ic_pp).-export([run/2]).-define(is_number(X), X >= $0, X =< $9).-define(is_upper(X), X >= $A, X =< $Z).-define(is_lower(X), X >= $a, X =< $z).-define(is_underline(X), X == $_).-define(is_tab(X), X == 9).-define(is_space(X), X == 32).-define(tab, 9).-define(space, 32).%%======================================================================================%%======================================================================================%%======================================================================================%% Preprocessor%%%% This preprocessor is equivalent to the gcc-preprocessor. It takes a file name and %% a list of preprocessor flags as an input and returns a processed text file.%%%% The processing is done in two phases. %% In the first phase the input file is tokenised into a list where all comments are %% replaced by a space and all "backslash-newline" sequences are removed.%% %% In the second phase all macros are expanded.%% %% %% NOTE: #if, #else, and #elif are not yet implemented. %% Only '#if 0' is implemented to be possible to keep old code as a comment for%% future refence by putting '#if 0' before it and '#endif' after it. %%%%======================================================================================%%======================================================================================%%======================================================================================%%======================================================================================%% Variables which are used throughout the program:%% ------------------------------------------------%% %% Command A preprocessor command%% Current Temporary variable used when tokenising the file%% Defs The currently valid macro definitions%% Err The current list of errors = [{file, line number, error text}, ...]%% File The tokenised file (or what remains of it when expanding the macros)%% Flags The preprocessor flags%% FN or FileName Tbe name of the current file%% IfCou Used for ifdef/ifndef/endif values: check_all | {endif, Endif, IfLine}%% Endif = number of matching endif's yet to be found %% Ifline = the line number for the the first found ifdef/ifndef%% IncDir Directories to be searched for included files%% IncFile Stack of included files%% IncLine The line numer of an include%% L The current line number%% Name Name of a macro%% Nl Number of encountered newlines%% No_of_para Numer of parameters of the currently expanded macro%% Out The result of the second step%% Parameters The parameters of the currently expanded macro%% PrevFile The name of the "parent" file which includes the currently expanded file%% Rem Remaining of the file currently being expanded%% Removed The tokens removed, used when removing tokens to the end of a line%% Result The current result of something%% SelfRef List of variables which shoud not be expanded at the rescan to avoid%% endless loops due to self referencing%% Str Temporary string%% Text A variable used for string handling, e.g at error handling%% Tokens Temoprary list when tokenising%% War The current list of warnings = [{file, line number, warning text}, ...]%% X Temporary variable used when the value is not important %% Y Temporary variable used when the value is not important %% %%======================================================================================%%======================================================================================%%======================================================================================%%======================================================================================%% The main entry for the preprocessor%%%%%% Output {ok, Out, War} | {error, Err}%%======================================================================================%%======================================================================================%%======================================================================================run(FileName, Flags) when atom(FileName) -> run(atom_to_list(FileName), Flags);run(FileName, Flags) -> IncDir = include_dir(Flags), case catch file:read_file(FileName) of {ok, Bin} -> FileList = binary_to_list(Bin), run(FileList, FileName, IncDir, Flags); {error, _} -> Text = "No such file or directory", {error, [FileName ++ ": " ++ Text]} end. run(FileList, FileName, IncDir, Flags) -> %%---------------------------------------------------------- %% Run the first phase, i.e tokenise the file %%---------------------------------------------------------- File = tokenise(FileList, FileName), %%---------------------------------------------------------- %% Run the second phase, i.e expand macros %%---------------------------------------------------------- {Out, Err, War, _Defs, IfCou} = expand(File, FileName, IncDir, Flags), %%---------------------------------------------------------- %% Check if all #if #ifdef #ifndef have a matching #endif %%---------------------------------------------------------- IfError = case IfCou of {endif, Endif, IfLine} when Endif > 0 -> [{FileName, IfLine, "unterminated `#if' conditional"}]; _ -> [] end, Err2 = Err++IfError, case Err2 of [] -> {ok, lists:flatten(lists:reverse(Out)), lists:reverse(War)}; _ -> {error, lists:reverse(Err2)} end.%%======================================================================================%% The entry for all included files%%%%%% Output {Out, Defs, Err, War}%%======================================================================================run_include(FileName, FileList, _Out, Defs, Err, War, IncLine, IncFile, IncDir) -> %%---------------------------------------------------------- %% Run the first phase, i.e tokenise the file %%---------------------------------------------------------- [PrevFile | _T] = IncFile, {File, FileInfoStart, FileInfoEnd} = tokenise(FileList, FileName, IncLine, PrevFile), %%---------------------------------------------------------- %% Run the second phase, i.e expand macros %%---------------------------------------------------------- %% Try first pass without file info start/end {OutT, ErrT, WarT, DefsT, IfCouT} = expand(File, Defs, Err, War, [FileName|IncFile], IncDir), {Out2, Err2, War2, Defs2, IfCou2} = case only_nls(OutT) of true -> %% The file is defined before {["\n"], ErrT, WarT, DefsT, IfCouT}; false -> %% The file is not defined before, try second pass expand([FileInfoStart|File]++FileInfoEnd, Defs, Err, War, [FileName|IncFile], IncDir) end, %%---------------------------------------------------------- %% Check if all #if #ifdef #ifndef have a matching #endif %%---------------------------------------------------------- IfError = case IfCou2 of {endif, Endif, IfLine} when Endif > 0 -> [{FileName, IfLine, "unterminated `#if' conditional"}]; _ -> [] end, {Out2, Defs2, Err2++IfError, War2}.%% Return true if there is no data %% other than new linesonly_nls([]) -> true;only_nls(["\n"|Rem]) -> only_nls(Rem);only_nls(["\r","\n"|Rem]) -> only_nls(Rem);only_nls([_|_Rem]) -> false.%%===================================================================================%%===================================================================================%%===================================================================================%% Tokenise the file%%%%%% Output: File%%%% Description:%% The input file is tokenised into a list where all comments are replaced%% by a space and all "backslash-newline" sequences are removed.%%%% A file information is added at start and end of an included file to set the%% current file name and line number.%%%%%% A token consists of:%% --------------------%%%% {char, Char} special characters like ()[]{},!%& etc%% {command,Command} a macro command%% {expanded,Str} an expanded variable, used to prevent infinite loops %% at self reference%% {file_info,FI} start and end information of a file%% FI is a string of the following format: %% "# Line FileName Int" were Int is %% 1 if start of an included file, %% 2 when returning to "parent" file%% {nl, L} newline%% {number,Num) variable, a string starting with a number%% {self_ref,Var} to allow reference to a variable again, used when expanding %% self refering macros%% space a space%% space_exp a space, special notation to prevent not wanted concatination %% {string, Str} a (tail of a) string constant%% {string_part, Str} a head of a string constant defined on several consecutive lines%% {sys_head, Str} (tail of) the file name of included system file%% {sys_head_part , Str} the file name of included system file%% {var,Var} variable, a string starting with minuscular or capital letter or%% an underline%% %% Note, comments are not removed within a character or string constant%% or inside an include-definition where the file name is delimited with < >%%===================================================================================%%===================================================================================%%===================================================================================tokenise(File, FileName) -> {Result, _L} = token(File, 2, [], not_set, 0), FI_start = lists:reverse(lists:flatten(io_lib:format("# 1 ~p~n",[FileName]))), FileInfoStart = {file_info, FI_start}, [FileInfoStart | Result].tokenise(File, FileName, IncLine, PrevFile) -> {Result, _L} = token(File, 2, [], not_set, 0), FI_start = lists:reverse(lists:flatten(io_lib:format("# 1 ~p 1~n",[FileName]))), FileInfoStart = {file_info, FI_start}, FI_end = lists:reverse(lists:flatten(io_lib:format("# ~p ~p 2~n~n",[IncLine-1,PrevFile]))), FileInfoEnd = [{file_info, FI_end}], {Result, FileInfoStart, FileInfoEnd}.% [FileInfoStart | Result] ++ FileInfoEnd.%%===================================================================================%% token(InputFile, L, Result, Gen)%% Gen information of the first token on the line, default = not_set%%%% Output: File%%===================================================================================%%==================================================================%% Normal line%%==================================================================%%---------------------------------------%% All file tokenised%%---------------------------------------token([], L, [{nl,NL}|Result], _Gen, _BsNl) when L == NL+1-> {lists:reverse([{nl,NL}|Result]), L};token([], L, Result, _Gen, _BsNl) -> {lists:reverse([{nl,L-1}|Result]), L};%%---------------------------------------%% String%%---------------------------------------token(File, L, Result, string, BsNl) -> case token_string(File, []) of {Rem, Str, nl} -> Result1 = [{nl, L}, {string,Str} | Result], token(Rem, L+1, Result1, string, BsNl); {Rem, Str} -> token(Rem, L, [{string,Str}|Result], not_set, BsNl) end;token([$"|File], L, Result, Gen, BsNl) -> case token_string(File, []) of {Rem, Str, nl} -> Result1 = [{nl, L}, {string_part,Str} | Result], token(Rem, L+1, Result1, string, BsNl); {Rem, Str} -> token(Rem, L, [{string,Str}|Result], Gen, BsNl) end;%%---------------------------------------%% Include with < >%%---------------------------------------token(File, L, Result, include, BsNl) -> case token_include(File, []) of {Rem, Str, nl} -> Result1 = [{nl, L}, {sys_head,Str} | Result], token(Rem, L+1, Result1, include, BsNl); {Rem, Str} -> token(Rem, L, [{sys_head,Str}|Result], not_set, BsNl) end;token([$<|File], L, [space,{command,"include"}|Result], Gen, BsNl) -> case token_include(File, []) of {Rem, Str, nl} -> Result1 = [{nl, L}, {sys_head_part,Str}, space, {command,"include"} |Result], token(Rem, L+1,Result1, include, BsNl); {Rem, Str} -> Result1 = [{sys_head,Str}, space, {command,"include"} |Result], token(Rem, L, Result1, Gen, BsNl) end;token([$<|File], L, [{command,"include"}|Result], Gen, BsNl) -> case token_include(File, []) of {Rem, Str, nl} -> Result1 = [{nl, L}, {sys_head_part,Str}, space, {command,"include"} |Result], token(Rem, L+1,Result1, include, BsNl); {Rem, Str} -> Result1 = [{sys_head,Str}, space, {command,"include"} |Result], token(Rem, L, Result1, Gen, BsNl) end;
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?