diff --git a/verilog/CST/expression_test.cc b/verilog/CST/expression_test.cc index e4d254bf0b..e683c1b2c9 100644 --- a/verilog/CST/expression_test.cc +++ b/verilog/CST/expression_test.cc @@ -32,7 +32,7 @@ namespace verilog { namespace { -static constexpr VerilogPreprocess::Config kDefaultPreprocess; +static VerilogPreprocess::Config kDefaultPreprocess; using verible::SyntaxTreeSearchTestCase; using verible::TextStructureView; diff --git a/verilog/analysis/BUILD b/verilog/analysis/BUILD index f628ae249b..9a816a8250 100644 --- a/verilog/analysis/BUILD +++ b/verilog/analysis/BUILD @@ -79,10 +79,13 @@ cc_library( hdrs = ["flow_tree.h"], deps = [ "//common/text:token-stream-view", + "//common/util:interval-set", "//common/util:logging", "//common/util:status-macros", "//verilog/parser:verilog-token-enum", + "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", ], ) diff --git a/verilog/analysis/extractors_test.cc b/verilog/analysis/extractors_test.cc index 7df644efa0..ded7c77a08 100644 --- a/verilog/analysis/extractors_test.cc +++ b/verilog/analysis/extractors_test.cc @@ -26,7 +26,7 @@ namespace verilog { namespace analysis { namespace { -static constexpr VerilogPreprocess::Config kDefaultPreprocess; +static VerilogPreprocess::Config kDefaultPreprocess; TEST(CollectInterfaceNamesTest, NonModuleTests) { const std::pair> kTestCases[] = { diff --git a/verilog/analysis/flow_tree.cc b/verilog/analysis/flow_tree.cc index 68bdf0dd55..df45cfce94 100644 --- a/verilog/analysis/flow_tree.cc +++ b/verilog/analysis/flow_tree.cc @@ -1,4 +1,4 @@ -// Copyright 2017-2022 The Verible Authors. +// Copyright 2017-2023 The Verible Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -126,6 +126,10 @@ absl::Status FlowTree::MacroFollows( return absl::InvalidArgumentError("Error macro name can't be extracted."); } auto macro_iterator = conditional_iterator + 1; + if (macro_iterator->token_enum() == TK_SPACE) { + // FIXME: It's not always there? + macro_iterator++; + } if (macro_iterator->token_enum() != PP_Identifier) { return absl::InvalidArgumentError("Expected identifier for macro name."); } @@ -142,6 +146,10 @@ absl::Status FlowTree::AddMacroOfConditional( "Error no macro follows the conditional directive."); } auto macro_iterator = conditional_iterator + 1; + if (macro_iterator->token_enum() == TK_SPACE) { + // FIXME: It's not always there? + macro_iterator++; + } auto macro_identifier = macro_iterator->text(); if (conditional_macro_id_.find(macro_identifier) == conditional_macro_id_.end()) { @@ -162,6 +170,10 @@ int FlowTree::GetMacroIDOfConditional( return -1; } auto macro_iterator = conditional_iterator + 1; + if (macro_iterator->token_enum() == TK_SPACE) { + // FIXME: It's not always there? + macro_iterator++; + } auto macro_identifier = macro_iterator->text(); // It is always assumed that the macro already exists in the map. return conditional_macro_id_[macro_identifier]; @@ -176,6 +188,83 @@ absl::Status FlowTree::GenerateVariants(const VariantReceiver &receiver) { return DepthFirstSearch(receiver, source_sequence_.begin()); } +absl::StatusOr FlowTree::MinCoverDefineVariants() { + auto status = GenerateControlFlowTree(); + if (!status.ok()) return status; + verible::IntervalSet covered; // Tokens covered by + // MinCoverDefineVariants. + verible::IntervalSet last_covered; // Tokens covered + // by the previous iterations. + DefineVariants define_variants; // The result – all define variants that + // should cover the entire source + DefineSet visited; // Visited defines are ones that are assumed to be defined + // or undefined (decided in a previous iteration) + const int64_t tok_count = static_cast(source_sequence_.size()); + while (!covered.Contains({0, tok_count})) { + DefineSet defines; // Define sets are moved into the define variants list, + // so we make a new one each iteration + visited.clear(); // We keep the visited set to avoid unnecessary + // allocations, but clear it each iteration + TokenSequenceConstIterator tok_it = source_sequence_.begin(); + while (tok_it < source_sequence_.end()) { + covered.Add(std::distance(source_sequence_.begin(), tok_it)); + if (tok_it->token_enum() == PP_ifdef || + tok_it->token_enum() == PP_ifndef || + tok_it->token_enum() == PP_elsif) { + const auto macro_id_it = tok_it + 2; + auto macro_text = macro_id_it->text(); + bool negated = tok_it->token_enum() == PP_ifndef; + // If this macro was already visited (either defined/undefined), we + // to stick to the same branch. TODO: handle `defines + if (visited.contains(macro_text)) { + bool assume_condition_is_true = + (negated ^ defines.contains(macro_text)); + tok_it = edges_[tok_it][assume_condition_is_true ? 0 : 1]; + } else { + // First time we see this macro; mark as visited + visited.insert(macro_text); + const auto if_it = edges_[tok_it][0]; + const auto if_idx = std::distance(source_sequence_.begin(), if_it); + const auto else_it = edges_[tok_it][1]; + const auto else_idx = + std::distance(source_sequence_.begin(), else_it); + if (!covered.Contains({if_idx, else_idx})) { + // If the `ifdef is not covered, we assume the condition is true + if (!negated) defines.insert(macro_text); + tok_it = if_it; + } else { + // Else we assume the condition is false + if (negated) defines.insert(macro_text); + tok_it = else_it; + } + } + } else { + const auto it = edges_.find(tok_it); + if (it == edges_.end() || it->second.empty()) { + // If there's no outgoing edge, just move to the next token. + tok_it++; + } else { + // Else jump + tok_it = edges_[tok_it][0]; + } + } + } + define_variants.push_back(std::move(defines)); + // To prevent an infinite loop, if nothing new was covered, break. + if (last_covered == covered) { + // TODO: If there are nested `ifdefs that contradict each other early in + // the source, this will prevent us from traversing the rest of the flow + // tree. It would be better to detect this case, assume that the + // contradicting part is covered, and continue the analysis. + VLOG(4) << "Giving up on finding all define variants"; + break; // FIXME: Perhaps we should error? + } + last_covered = covered; + } + VLOG(4) << "Done generating define variants. Coverage: " << covered; + return define_variants; +} + // Constructs the control flow tree, which determines the edge from each node // (token index) to the next possible childs, And save edge_from_iterator in // edges_. diff --git a/verilog/analysis/flow_tree.h b/verilog/analysis/flow_tree.h index c5ed13a8e3..906813f012 100644 --- a/verilog/analysis/flow_tree.h +++ b/verilog/analysis/flow_tree.h @@ -1,4 +1,4 @@ -// Copyright 2017-2022 The Verible Authors. +// Copyright 2017-2023 The Verible Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -20,8 +20,11 @@ #include #include +#include "absl/container/flat_hash_set.h" #include "absl/status/status.h" +#include "absl/status/statusor.h" #include "common/text/token_stream_view.h" +#include "common/util/interval_set.h" namespace verilog { @@ -80,6 +83,17 @@ class FlowTree { // Generates all possible variants. absl::Status GenerateVariants(const VariantReceiver &receiver); + // Set of macro name defines. + using DefineSet = absl::flat_hash_set; + + // A list of macro name sets; each set represents a variant of the source; + // together they should cover the entire source. + using DefineVariants = std::vector; + + // Returns the minimum set of defines needed to generate token stream variants + // that cover the entire source. + absl::StatusOr MinCoverDefineVariants(); + // Returns all the used macros in conditionals, ordered with the same ID as // used in BitSets. const std::vector &GetUsedMacros() { diff --git a/verilog/analysis/verilog_analyzer_test.cc b/verilog/analysis/verilog_analyzer_test.cc index a56f5ec982..1da34b33c0 100644 --- a/verilog/analysis/verilog_analyzer_test.cc +++ b/verilog/analysis/verilog_analyzer_test.cc @@ -62,7 +62,7 @@ using verible::SyntaxTreeLeaf; using verible::TokenInfo; using verible::TokenInfoTestData; -static constexpr verilog::VerilogPreprocess::Config kDefaultPreprocess; +static verilog::VerilogPreprocess::Config kDefaultPreprocess; bool TreeContainsToken(const ConcreteSyntaxTree& tree, const TokenInfo& token) { const auto* matching_leaf = @@ -509,10 +509,10 @@ TEST(AnalyzeVerilogAutomaticMode, InferredModuleBodyMode) { } TEST(AnalyzeVerilogAutomaticMode, AutomaticWithFallback) { - static constexpr verilog::VerilogPreprocess::Config kNoBranchFilter{ + static verilog::VerilogPreprocess::Config kNoBranchFilter{ .filter_branches = false, }; - static constexpr verilog::VerilogPreprocess::Config kWithBranchFilter{ + static verilog::VerilogPreprocess::Config kWithBranchFilter{ .filter_branches = true, }; diff --git a/verilog/analysis/verilog_project.cc b/verilog/analysis/verilog_project.cc index 4a041d73ad..ca9d103d94 100644 --- a/verilog/analysis/verilog_project.cc +++ b/verilog/analysis/verilog_project.cc @@ -36,7 +36,7 @@ namespace verilog { // All files we process with the verilog project, essentially applications that // build a symbol table (project-tool, kythe-indexer) only benefit from // processing the same sequence of tokens a synthesis tool sees. -static constexpr verilog::VerilogPreprocess::Config kPreprocessConfig{ +static verilog::VerilogPreprocess::Config kPreprocessConfig{ .filter_branches = true, }; diff --git a/verilog/formatting/BUILD b/verilog/formatting/BUILD index b105c9e9ab..b9050fb2b8 100644 --- a/verilog/formatting/BUILD +++ b/verilog/formatting/BUILD @@ -163,6 +163,7 @@ cc_library( "//common/util:vector-tree-iterators", "//verilog/CST:declaration", "//verilog/CST:module", + "//verilog/analysis:flow-tree", "//verilog/analysis:verilog-analyzer", "//verilog/analysis:verilog-equivalence", "//verilog/parser:verilog-token-enum", diff --git a/verilog/formatting/formatter.cc b/verilog/formatting/formatter.cc index dafe89a799..715761181b 100644 --- a/verilog/formatting/formatter.cc +++ b/verilog/formatting/formatter.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/algorithm/container.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "common/formatting/format_token.h" @@ -46,6 +47,7 @@ #include "common/util/vector_tree_iterators.h" #include "verilog/CST/declaration.h" #include "verilog/CST/module.h" +#include "verilog/analysis/flow_tree.h" #include "verilog/analysis/verilog_analyzer.h" #include "verilog/analysis/verilog_equivalence.h" #include "verilog/formatting/align.h" @@ -53,6 +55,7 @@ #include "verilog/formatting/format_style.h" #include "verilog/formatting/token_annotator.h" #include "verilog/formatting/tree_unwrapper.h" +#include "verilog/parser/verilog_lexer.h" #include "verilog/parser/verilog_token_enum.h" #include "verilog/preprocessor/verilog_preprocess.h" @@ -116,19 +119,29 @@ Status VerifyFormatting(const verible::TextStructureView& text_structure, // Note: We cannot just Tokenize() and compare because Analyze() // performs additional transformations like expanding MacroArgs to // expression subtrees. - const auto reanalyzer = VerilogAnalyzer::AnalyzeAutomaticMode( - formatted_output, filename, verilog::VerilogPreprocess::Config()); - const auto relex_status = ABSL_DIE_IF_NULL(reanalyzer)->LexStatus(); - const auto reparse_status = reanalyzer->ParseStatus(); + verilog::FlowTree control_flow_tree(text_structure.TokenStream()); + const auto define_variants = control_flow_tree.MinCoverDefineVariants(); + if (!define_variants.ok()) return define_variants.status(); + for (const FlowTree::DefineSet& defines : *define_variants) { + VerilogPreprocess::Config config{.filter_branches = true}; + for (auto define : defines) { + config.macro_definitions.emplace(define, std::nullopt); + } - if (!relex_status.ok() || !reparse_status.ok()) { - const auto& token_errors = reanalyzer->TokenErrorMessages(); - // Only print the first error. - if (!token_errors.empty()) { - return absl::DataLossError( - absl::StrCat("Error lex/parsing-ing formatted output. " - "Please file a bug.\nFirst error: ", - token_errors.front())); + const auto reanalyzer = VerilogAnalyzer::AnalyzeAutomaticMode( + formatted_output, filename, config); + const auto relex_status = ABSL_DIE_IF_NULL(reanalyzer)->LexStatus(); + const auto reparse_status = reanalyzer->ParseStatus(); + + if (!relex_status.ok() || !reparse_status.ok()) { + const auto& token_errors = reanalyzer->TokenErrorMessages(); + // Only print the first error. + if (!token_errors.empty()) { + return absl::DataLossError( + absl::StrCat("Error lexing/parsing formatted output. " + "Please file a bug.\nFirst error: ", + token_errors.front())); + } } } @@ -199,10 +212,10 @@ static Status ReformatVerilog(absl::string_view original_text, } static absl::StatusOr> ParseWithStatus( - absl::string_view text, absl::string_view filename) { + absl::string_view text, absl::string_view filename, + const verilog::VerilogPreprocess::Config& preprocess_config = {}) { std::unique_ptr analyzer = - VerilogAnalyzer::AnalyzeAutomaticMode( - text, filename, verilog::VerilogPreprocess::Config()); + VerilogAnalyzer::AnalyzeAutomaticMode(text, filename, preprocess_config); { // Lex and parse code. Exit on failure. const auto lex_status = ABSL_DIE_IF_NULL(analyzer)->LexStatus(); @@ -265,13 +278,36 @@ Status FormatVerilog(absl::string_view text, absl::string_view filename, const FormatStyle& style, std::ostream& formatted_stream, const LineNumberSet& lines, const ExecutionControl& control) { - const auto analyzer = ParseWithStatus(text, filename); - if (!analyzer.ok()) return analyzer.status(); + // Prepare define variants + VerilogAnalyzer analyzer(text, filename); + if (Status tokenize_status = analyzer.Tokenize(); !tokenize_status.ok()) { + return tokenize_status; + } + FlowTree control_flow_tree(analyzer.Data().TokenStream()); + const auto define_variants = control_flow_tree.MinCoverDefineVariants(); + if (!define_variants.ok()) return define_variants.status(); + // Proceed with formatting for each variant + std::string text_to_format{text.begin(), text.end()}; + Status format_status; + for (const FlowTree::DefineSet& defines : *define_variants) { + // Set up preprocess config + VerilogPreprocess::Config config{.filter_branches = true}; + for (auto define : defines) { + config.macro_definitions.emplace(define, std::nullopt); + } + + const auto analyzer = ParseWithStatus(text_to_format, filename, config); + if (!analyzer.ok()) return analyzer.status(); + + const verible::TextStructureView& text_structure = analyzer->get()->Data(); + std::string formatted_text; + format_status = FormatVerilog(text_structure, filename, style, + &formatted_text, lines, control); + if (!format_status.ok()) break; + text_to_format = formatted_text; + } - const verible::TextStructureView& text_structure = analyzer->get()->Data(); - std::string formatted_text; - Status format_status = FormatVerilog(text_structure, filename, style, - &formatted_text, lines, control); + const absl::string_view formatted_text = text_to_format; // Commit formatted text to the output stream independent of status. formatted_stream << formatted_text; if (!format_status.ok()) return format_status; diff --git a/verilog/formatting/formatter_test.cc b/verilog/formatting/formatter_test.cc index 6f7d28c22f..2cb1206aa0 100644 --- a/verilog/formatting/formatter_test.cc +++ b/verilog/formatting/formatter_test.cc @@ -54,7 +54,7 @@ absl::Status VerifyFormatting(const verible::TextStructureView& text_structure, namespace { -static constexpr VerilogPreprocess::Config kDefaultPreprocess; +static VerilogPreprocess::Config kDefaultPreprocess; using absl::StatusCode; using testing::HasSubstr; diff --git a/verilog/formatting/token_annotator.cc b/verilog/formatting/token_annotator.cc index 37e12ba74d..ad63eeb1b9 100644 --- a/verilog/formatting/token_annotator.cc +++ b/verilog/formatting/token_annotator.cc @@ -746,6 +746,12 @@ static WithReason BreakDecisionBetween( const FormatStyle& style, const PreFormatToken& left, const PreFormatToken& right, const SyntaxTreeContext& left_context, const SyntaxTreeContext& right_context) { + if (IsPreprocessorControlFlow(verilog_tokentype(right.TokenEnum()))) { + return {SpacingOptions::kPreserve, + "Use original spacing before prepocessor control (FIXME: for some " + "reason works better than kMustWrap)."}; + } + // For now, leave everything inside [dimensions] alone. if (InDeclaredDimensions(right_context)) { // ... except for the spacing immediately around '[' and ']', diff --git a/verilog/formatting/tree_unwrapper.cc b/verilog/formatting/tree_unwrapper.cc index fe1bfc5958..334e621ec2 100644 --- a/verilog/formatting/tree_unwrapper.cc +++ b/verilog/formatting/tree_unwrapper.cc @@ -146,6 +146,9 @@ enum TokenScannerState { // While encountering any string of consecutive newlines kHaveNewline, + // While encountering tokens related to preprocessor control flow + kPreprocessorControlFlow, + // Transition from newline to non-newline kNewPartition, @@ -163,6 +166,8 @@ TokenScannerStateStrings() { {"kStart", TokenScannerState::kStart}, {"kHaveNewline", TokenScannerState::kHaveNewline}, {"kNewPartition", TokenScannerState::kNewPartition}, + {"kPreprocessorControlFlow", + TokenScannerState::kPreprocessorControlFlow}, {"kEndWithNewline", TokenScannerState::kEndWithNewline}, {"kEndNoNewline", TokenScannerState::kEndNoNewline}, }); @@ -198,12 +203,15 @@ class TreeUnwrapper::TokenScanner { // Calls TransitionState using current_state_ and the tranition token_Type void UpdateState(verilog_tokentype token_type) { current_state_ = TransitionState(current_state_, token_type); + on_pp_identifier = token_type == PP_Identifier; seen_any_nonspace_ |= IsComment(token_type); } // Returns true if this is a state that should start a new token partition. bool ShouldStartNewPartition() const { return current_state_ == State::kNewPartition || + (current_state_ == State::kPreprocessorControlFlow && + !on_pp_identifier) || (current_state_ == State::kEndWithNewline && seen_any_nonspace_); } @@ -212,6 +220,7 @@ class TreeUnwrapper::TokenScanner { // The current state of the TokenScanner. State current_state_ = kStart; + bool on_pp_identifier = false; bool seen_any_nonspace_ = false; // Transitions the TokenScanner given a TokenState and a verilog_tokentype @@ -241,8 +250,15 @@ TokenScannerState TreeUnwrapper::TokenScanner::TransitionState( const State& old_state, verilog_tokentype token_type) { VLOG(4) << "state transition on: " << old_state << ", token: " << verilog_symbol_name(token_type); + if (IsPreprocessorControlToken(token_type)) return kPreprocessorControlFlow; State new_state = old_state; switch (old_state) { + case kPreprocessorControlFlow: { + if (token_type != PP_Identifier) { + new_state = kNewPartition; + } + break; + } case kStart: { if (IsNewlineOrEOF(token_type)) { new_state = kHaveNewline; @@ -1326,6 +1342,11 @@ void TreeUnwrapper::SetIndentationsAndCreatePartitions( } case NodeEnum::kPortActualList: // covers named and positional ports { +#ifdef __clang__ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wunreachable-code" +#endif + break; // FIXME const int indent = suppress_indentation ? 0 : style_.NamedPortIndentation(); const auto policy = Context().IsInside(NodeEnum::kDataDeclaration) || @@ -1334,6 +1355,9 @@ void TreeUnwrapper::SetIndentationsAndCreatePartitions( : PartitionPolicyEnum::kFitOnLineElseExpand; VisitIndentedSection(node, indent, policy); break; +#ifdef __clang__ +#pragma clang diagnostic pop +#endif } case NodeEnum::kPortDeclarationList: { @@ -2995,6 +3019,11 @@ void TreeUnwrapper::ReshapeTokenPartitions( // TODO(fangism): always,initial,final should be handled the same way case NodeEnum::kInitialStatement: case NodeEnum::kFinalStatement: { +#ifdef __clang__ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wunreachable-code" +#endif + break; // FIXME // Check if 'initial' is separated from 'begin' by EOL_COMMENT. If so, // adjust indentation and do not merge leaf into previous leaf. const verible::UnwrappedLine& unwrapped_line = partition.Value(); @@ -3007,6 +3036,9 @@ void TreeUnwrapper::ReshapeTokenPartitions( &partition.Children()[(partition.Children().size() - 1)], style.indentation_spaces); break; +#ifdef __clang__ +#pragma clang diagnostic pop +#endif } // In these cases, merge the 'begin' partition of the statement block diff --git a/verilog/preprocessor/verilog_preprocess.cc b/verilog/preprocessor/verilog_preprocess.cc index b8e2b21f40..6326a0836f 100644 --- a/verilog/preprocessor/verilog_preprocess.cc +++ b/verilog/preprocessor/verilog_preprocess.cc @@ -44,10 +44,13 @@ namespace verilog { using verible::TokenGenerator; using verible::TokenStreamView; using verible::container::FindOrNull; +using verible::container::FindWithDefault; using verible::container::InsertOrUpdate; VerilogPreprocess::VerilogPreprocess(const Config& config) - : VerilogPreprocess(config, nullptr) {} + : VerilogPreprocess(config, nullptr) { + preprocess_data_.macro_definitions = config_.macro_definitions; +} VerilogPreprocess::VerilogPreprocess(const Config& config, FileOpener opener) : config_(config), file_opener_(std::move(opener)) { @@ -55,6 +58,7 @@ VerilogPreprocess::VerilogPreprocess(const Config& config, FileOpener opener) // place a toplevel 'conditional' that is always selected. // Thus we only need to test in `else and `endif to see if we underrun due // to unbalanced statements. + preprocess_data_.macro_definitions = config_.macro_definitions; conditional_block_.push( BranchBlock(true, true, verible::TokenInfo::EOFToken())); } @@ -288,8 +292,8 @@ absl::Status VerilogPreprocess::HandleMacroIdentifier( // Finding the macro definition. const absl::string_view sv = (*iter)->text(); - const auto* found = - FindOrNull(preprocess_data_.macro_definitions, sv.substr(1)); + const auto found = FindWithDefault(preprocess_data_.macro_definitions, + sv.substr(1), std::nullopt); if (!found) { preprocess_data_.errors.emplace_back( **iter, @@ -302,7 +306,7 @@ absl::Status VerilogPreprocess::HandleMacroIdentifier( verible::MacroCall macro_call; RETURN_IF_ERROR( ConsumeAndParseMacroCall(iter, generator, ¯o_call, *found)); - RETURN_IF_ERROR(ExpandMacro(macro_call, found)); + RETURN_IF_ERROR(ExpandMacro(macro_call, *found)); } auto& lexed = preprocess_data_.lexed_macros_backup.back(); if (!forward) return absl::OkStatus(); @@ -378,16 +382,16 @@ absl::Status VerilogPreprocess::ExpandText( // `MACRO([param1],[param2],...) absl::Status VerilogPreprocess::ExpandMacro( const verible::MacroCall& macro_call, - const verible::MacroDefinition* macro_definition) { + const verible::MacroDefinition& macro_definition) { const auto& actual_parameters = macro_call.positional_arguments; std::map subs_map; - if (macro_definition->IsCallable()) { - RETURN_IF_ERROR(macro_definition->PopulateSubstitutionMap(actual_parameters, - &subs_map)); + if (macro_definition.IsCallable()) { + RETURN_IF_ERROR( + macro_definition.PopulateSubstitutionMap(actual_parameters, &subs_map)); } - VerilogLexer lexer(macro_definition->DefinitionText().text()); + VerilogLexer lexer(macro_definition.DefinitionText().text()); verible::TokenSequence lexed_sequence; verible::TokenSequence expanded_lexed_sequence; // Populating the lexed token sequence. @@ -420,7 +424,7 @@ absl::Status VerilogPreprocess::ExpandMacro( for (auto& u : expanded_child) expanded_lexed_sequence.push_back(u); continue; } - if (macro_definition->IsCallable()) { + if (macro_definition.IsCallable()) { // Check if the last token is a formal parameter const auto* replacement = FindOrNull(subs_map, last_token.text()); if (replacement) { diff --git a/verilog/preprocessor/verilog_preprocess.h b/verilog/preprocessor/verilog_preprocess.h index 69d9759835..e9c5ce13ca 100644 --- a/verilog/preprocessor/verilog_preprocess.h +++ b/verilog/preprocessor/verilog_preprocess.h @@ -69,7 +69,8 @@ struct VerilogPreprocessError { // Information that results from preprocessing. struct VerilogPreprocessData { using MacroDefinition = verible::MacroDefinition; - using MacroDefinitionRegistry = std::map; + using MacroDefinitionRegistry = + std::map>; using TokenSequence = std::vector; // Resulting token stream after preprocessing @@ -94,6 +95,8 @@ struct VerilogPreprocessData { class VerilogPreprocess { using TokenStreamView = verible::TokenStreamView; using MacroDefinition = verible::MacroDefinition; + using MacroDefinitionRegistry = + std::map>; using MacroParameterInfo = verible::MacroParameterInfo; public: @@ -115,7 +118,9 @@ class VerilogPreprocess { // Expand macro definition bodies, this will relexes the macro body. bool expand_macros = false; - // TODO(hzeller): Provide a map of command-line provided +define+'s + + MacroDefinitionRegistry macro_definitions; + // TODO(hzeller): Provide a way to +define+ from the command line. }; explicit VerilogPreprocess(const Config& config); @@ -182,7 +187,7 @@ class VerilogPreprocess { void RegisterMacroDefinition(const MacroDefinition&); absl::Status ExpandText(const absl::string_view&); absl::Status ExpandMacro(const verible::MacroCall&, - const verible::MacroDefinition*); + const verible::MacroDefinition&); absl::Status HandleInclude(TokenStreamView::const_iterator, const StreamIteratorGenerator&); diff --git a/verilog/preprocessor/verilog_preprocess_test.cc b/verilog/preprocessor/verilog_preprocess_test.cc index cb72019ca2..af609c442c 100644 --- a/verilog/preprocessor/verilog_preprocess_test.cc +++ b/verilog/preprocessor/verilog_preprocess_test.cc @@ -37,7 +37,7 @@ namespace { using testing::ElementsAre; using testing::Pair; using testing::StartsWith; -using verible::container::FindOrNull; +using verible::container::FindWithDefault; using verible::file::CreateDir; using verible::file::JoinPath; using verible::file::testing::ScopedTestFile; @@ -171,8 +171,8 @@ TEST(VerilogPreprocessTest, OneMacroDefinitionNoParamsNoValue) { const auto& definitions = tester.PreprocessorData().macro_definitions; EXPECT_THAT(definitions, ElementsAre(Pair("FOOOO", testing::_))); - auto macro = FindOrNull(definitions, "FOOOO"); - ASSERT_NE(macro, nullptr); + auto macro = FindWithDefault(definitions, "FOOOO", std::nullopt); + ASSERT_TRUE(macro.has_value()); EXPECT_EQ(macro->DefinitionText().text(), ""); EXPECT_FALSE(macro->IsCallable()); EXPECT_TRUE(macro->Parameters().empty()); @@ -187,8 +187,8 @@ TEST(VerilogPreprocessTest, OneMacroDefinitionNoParamsSimpleValue) { const auto& definitions = tester.PreprocessorData().macro_definitions; EXPECT_THAT(definitions, ElementsAre(Pair("FOOOO", testing::_))); - auto macro = FindOrNull(definitions, "FOOOO"); - ASSERT_NE(macro, nullptr); + auto macro = FindWithDefault(definitions, "FOOOO", std::nullopt); + ASSERT_TRUE(macro.has_value()); EXPECT_EQ(macro->DefinitionText().text(), "\"bar\""); EXPECT_FALSE(macro->IsCallable()); EXPECT_TRUE(macro->Parameters().empty()); @@ -202,8 +202,8 @@ TEST(VerilogPreprocessTest, OneMacroDefinitionOneParamWithValue) { const auto& definitions = tester.PreprocessorData().macro_definitions; EXPECT_THAT(definitions, ElementsAre(Pair("FOOOO", testing::_))); - auto macro = FindOrNull(definitions, "FOOOO"); - ASSERT_NE(macro, nullptr); + auto macro = FindWithDefault(definitions, "FOOOO", std::nullopt); + ASSERT_TRUE(macro.has_value()); EXPECT_EQ(macro->DefinitionText().text(), "(x+1)"); EXPECT_TRUE(macro->IsCallable()); const auto& params = macro->Parameters(); @@ -221,8 +221,8 @@ TEST(VerilogPreprocessTest, OneMacroDefinitionOneParamDefaultWithValue) { const auto& definitions = tester.PreprocessorData().macro_definitions; EXPECT_THAT(definitions, ElementsAre(Pair("FOOOO", testing::_))); - auto macro = FindOrNull(definitions, "FOOOO"); - ASSERT_NE(macro, nullptr); + auto macro = FindWithDefault(definitions, "FOOOO", std::nullopt); + ASSERT_TRUE(macro.has_value()); EXPECT_EQ(macro->DefinitionText().text(), "(x+3)"); EXPECT_TRUE(macro->IsCallable()); const auto& params = macro->Parameters(); @@ -243,15 +243,15 @@ TEST(VerilogPreprocessTest, TwoMacroDefinitions) { EXPECT_THAT(definitions, ElementsAre(Pair("BAAAAR", testing::_), Pair("FOOOO", testing::_))); { - auto macro = FindOrNull(definitions, "BAAAAR"); - ASSERT_NE(macro, nullptr); + auto macro = FindWithDefault(definitions, "BAAAAR", std::nullopt); + ASSERT_TRUE(macro.has_value()); EXPECT_TRUE(macro->IsCallable()); const auto& params = macro->Parameters(); EXPECT_EQ(params.size(), 2); } { - auto macro = FindOrNull(definitions, "FOOOO"); - ASSERT_NE(macro, nullptr); + auto macro = FindWithDefault(definitions, "FOOOO", std::nullopt); + ASSERT_TRUE(macro.has_value()); EXPECT_TRUE(macro->IsCallable()); const auto& params = macro->Parameters(); EXPECT_EQ(params.size(), 1);