P4C
The P4 Compiler
Loading...
Searching...
No Matches
resolve_negative_extract.h
1
19#ifndef BACKENDS_TOFINO_BF_P4C_PARDE_RESOLVE_NEGATIVE_EXTRACT_H_
20#define BACKENDS_TOFINO_BF_P4C_PARDE_RESOLVE_NEGATIVE_EXTRACT_H_
21
22#include <map>
23#include <sstream>
24#include <utility>
25
26#include "bf-p4c/common/utils.h"
27#include "bf-p4c/device.h"
28#include "bf-p4c/parde/dump_parser.h"
29#include "bf-p4c/parde/parde_visitor.h"
30#include "bf-p4c/parde/parser_dominator_builder.h"
31#include "bf-p4c/parde/parser_info.h"
32#include "ir/ir-generated.h"
33#include "lib/indent.h"
34#include "lib/ordered_map.h"
35
51 const CollectParserInfo &parserInfo;
52
56 std::map<cstring, unsigned> state_to_shift;
57
59 : parserInfo(pi) {}
60
61 unsigned max_buff_size = 0;
62
63 bool preorder(const IR::BFN::PacketRVal *rval) override {
64 auto extract = findContext<IR::BFN::Extract>();
65 if (extract && rval->range.lo < 0) {
66 auto state = findContext<IR::BFN::ParserState>();
67 unsigned shift = (-rval->range.lo + 7) / 8;
68 if (shift > max_buff_size) {
69 LOG1("State " << state->name << " requires " << shift << " B shift");
70 historic_states[state] = std::max(historic_states[state], shift);
71 parsers[state] = findContext<IR::BFN::Parser>();
72 }
73 }
74
75 return false;
76 }
77
78 profile_t init_apply(const IR::Node *node) override {
79 auto rv = ParserInspector::init_apply(node);
80 // Initialize all structures
81 max_buff_size = Device::pardeSpec().byteInputBufferSize();
82 state_to_shift.clear();
83 historic_states.clear();
84 parsers.clear();
85 return rv;
86 }
87
88 void end_apply() override {
89 // Required data capture update all node states
90 for (auto kv : historic_states) {
91 // 1] Distribute the required history value and adjust transitions
92 auto state = kv.first;
93 auto max_idx_value = kv.second;
94
95 unsigned delay_shift =
96 delay_shift_from_predecessor(state, parsers[state], max_idx_value);
97 BUG_CHECK(delay_shift,
98 "In parse state %s: a value that is %d B backwards from the current "
99 "parsing position is being accessed/used. Unable to identify an "
100 "amount to delay the previuos shift by to allow access to this data. "
101 "As a possible workaround try moving around the extracts (possibly by "
102 "using methods advance and lookahead or splitting some headers).",
103 state->name, max_idx_value);
104
105 LOG3("State " << state->name << " needs a value " << max_idx_value
106 << "B back and generates a delay shift of " << delay_shift << "B");
107 }
108
109 LOG3("CollectNegativeExtractOutOfBufferStates has finished.");
110 }
111
112 private:
119 std::map<const IR::BFN::ParserState *, unsigned> historic_states;
120
125 std::map<const IR::BFN::ParserState *, const IR::BFN::Parser *> parsers;
126
135 unsigned delay_shift_from_predecessor(const IR::BFN::ParserState *state,
136 const IR::BFN::Parser *parser, int required_history) {
137 BUG_CHECK(state, "Parser state cannot be null!");
138
139 auto graph = parserInfo.graph(parser);
140 auto preds = graph.predecessors().at(state);
141 if (preds.size() > 1) {
142 error(
143 "Cannot resolve negative extract because of multiple paths to "
144 "the node %1%",
145 state->name);
146 }
147 const auto *pred = *preds.begin();
148
149 unsigned curr_shift = 0;
150 std::for_each(pred->transitions.begin(), pred->transitions.end(),
151 [&curr_shift](const auto *tr) {
152 curr_shift = curr_shift > tr->shift ? curr_shift : tr->shift;
153 });
154 unsigned min_shift = required_history;
155 unsigned max_keep = curr_shift - min_shift;
156
157 // For extracts, identify the earliest start byte for each end byte
158 std::map<int, int> end_to_earliest_start;
159 for (const auto *stmt : pred->statements) {
160 if (const auto *extract = stmt->to<IR::BFN::Extract>()) {
161 if (const auto *rval = extract->source->to<IR::BFN::InputBufferRVal>()) {
162 auto range = rval->range;
163 auto hi_byte = range.hiByte();
164 auto lo_byte = range.loByte();
165 if (end_to_earliest_start.count(hi_byte))
166 end_to_earliest_start[hi_byte] =
167 std::min(lo_byte, end_to_earliest_start[hi_byte]);
168 else
169 end_to_earliest_start[hi_byte] = lo_byte;
170 }
171 }
172 }
173
174 // If we don't do any extracts, or if the extracts end before the max_keep value,
175 // then just return the requested min_shift value.
176 if (end_to_earliest_start.size() == 0 ||
177 end_to_earliest_start.rbegin()->first < static_cast<int>(max_keep)) {
178 state_to_shift[pred->name] = min_shift;
179 return min_shift;
180 }
181
182 // Create a vector of end position -> earliest start position where overlapping ranges
183 // are merged. Unused byte positions map to themselves.
184 //
185 // For example, if the map created above is:
186 // [ (2, 1), (3, 2) ]
187 // then the resulting vector is:
188 //
189 // [ 0, 1, 1, 1 ]
190 int max_end = end_to_earliest_start.rbegin()->first;
191 std::vector<int> end_to_earliest_start_merged(max_end + 1);
192 for (int i = 0; i <= max_end; i++) {
193 end_to_earliest_start_merged[i] = i;
194 int min_idx = i;
195 if (end_to_earliest_start.count(i))
196 min_idx =
197 std::min(end_to_earliest_start[i], end_to_earliest_start_merged[min_idx]);
198 for (int j = std::max(0, min_idx); j <= i; j++)
199 end_to_earliest_start_merged[j] = min_idx;
200 }
201
202 // Identify the byte location at/before the max_keep position where we can split the
203 // state
204 if (end_to_earliest_start_merged[max_keep] > 0) {
205 unsigned keep = static_cast<unsigned>(end_to_earliest_start_merged[max_keep]);
206 unsigned delay_shift = curr_shift - keep;
207 if (state_to_shift.count(pred->name))
208 state_to_shift[pred->name] = std::min(delay_shift, state_to_shift[pred->name]);
209 else
210 state_to_shift[pred->name] = delay_shift;
211 return delay_shift;
212 }
213
214 return 0;
215 }
216 };
217
221 const CollectNegativeExtractOutOfBufferStates &collectNegative;
222
224 : collectNegative(cg) {}
225
226 std::map<cstring, std::vector<const IR::BFN::Extract *>> delayed_statements;
227 std::map<cstring, unsigned> state_delay;
228 std::map<cstring, cstring> state_pred;
229
230 unsigned max_buff_size = 0;
231
232 profile_t init_apply(const IR::Node *node) override {
233 auto rv = ParserModifier::init_apply(node);
234 max_buff_size = Device::pardeSpec().byteInputBufferSize();
235 delayed_statements.clear();
236 state_delay.clear();
237 state_pred.clear();
238 return rv;
239 }
240
241 bool preorder(IR::BFN::ParserState *state) override {
242 // Handle shift to be delayed from current state
243 if (collectNegative.state_to_shift.count(state->name)) {
244 // Shift from the current state to delay until child states
245 unsigned delay_shift = collectNegative.state_to_shift.at(state->name);
246
247 // Current shift amount for the state
248 unsigned curr_shift = 0;
249 std::for_each(state->transitions.begin(), state->transitions.end(),
250 [this, state, &curr_shift, delay_shift](const auto *tr) {
251 curr_shift = curr_shift > tr->shift ? curr_shift : tr->shift;
252 if (tr->next) {
253 this->state_delay[tr->next->name] = delay_shift;
254 this->state_pred[tr->next->name] = state->name;
255 }
256 });
257
258 // Shift to be added to the current state
259 unsigned pending_shift =
260 state_delay.count(state->name) ? state_delay.at(state->name) : 0;
261
262 // Split the statements into statements to delay to child states and statements to
263 // keep in current state
265 for (const auto *stmt : state->statements) {
266 bool keep = true;
267 if (const auto *extract = stmt->to<IR::BFN::Extract>()) {
268 if (const auto *rval = extract->source->to<IR::BFN::InputBufferRVal>()) {
269 if (rval->range.hiByte() >= static_cast<int>(max_buff_size)) {
270 auto *rval_clone = rval->clone();
271 rval_clone->range.hi -= (curr_shift - pending_shift) * 8;
272 rval_clone->range.lo -= (curr_shift - pending_shift) * 8;
273 auto *extract_clone = extract->clone();
274 extract_clone->source = rval_clone;
275 delayed_statements[state->name].push_back(extract_clone);
276 keep = false;
277 }
278 }
279 }
280 if (keep) new_statements.push_back(stmt);
281 }
282 state->statements = new_statements;
283 }
284
285 // Add statements delayed from parent state
286 if (state_delay.count(state->name)) {
287 cstring pred = state_pred[state->name];
289 // Clone the delayed statements -- need a unique copy for each state
290 // in case the statements are adjusted (e.g., made into CLOTs).
291 for (const auto *stmt : delayed_statements[pred])
292 new_statements.push_back(stmt->clone());
293 new_statements.insert(new_statements.end(), state->statements.begin(),
294 state->statements.end());
295 state->statements = new_statements;
296 }
297
298 return true;
299 }
300
301 bool preorder(IR::BFN::Transition *transition) override {
302 auto state = findContext<IR::BFN::ParserState>();
303 BUG_CHECK(state, "State cannot be null!");
304
305 if (collectNegative.state_to_shift.count(state->name)) {
306 transition->shift -= collectNegative.state_to_shift.at(state->name);
307 LOG3("Adjusting transition from " << state->name << ", match { "
308 << transition->value
309 << " } to shift value = " << transition->shift);
310 }
311 if (state_delay.count(state->name)) {
312 transition->shift += state_delay.at(state->name);
313 LOG3("Adjusting transition from " << state->name << ", match { "
314 << transition->value
315 << " } to shift value = " << transition->shift);
316 }
317
318 return true;
319 }
320
321 bool preorder(IR::BFN::PacketRVal *rval) override {
322 auto state = findContext<IR::BFN::ParserState>();
323 auto extract = findContext<IR::BFN::Extract>();
324
325 if (state_delay.count(state->name)) {
326 unsigned shift = state_delay.at(state->name) * 8;
327 rval->range.lo += shift;
328 rval->range.hi += shift;
329 if (extract) {
330 LOG3("Adjusting field " << extract->dest->field->toString() << " to "
331 << shift / 8 << " byte offset (lo = " << rval->range.lo
332 << ", hi = " << rval->range.hi << ")");
333 }
334 }
335
336 return false;
337 }
338 };
339
343 const CollectParserInfo &parserInfo;
344
348 std::map<cstring, unsigned> state_to_shift;
349
354 std::map<cstring, std::map<const IR::BFN::ParserMatchValue *, unsigned>> transition_shift;
355
359 std::map<const IR::BFN::Transition *, unsigned> remainder_before_exit;
360
395 std::map<const IR::BFN::Transition *, std::pair<const IR::BFN::ParserState *, int>>
397
398 explicit CollectNegativeExtractStates(const CollectParserInfo &pi) : parserInfo(pi) {}
399
400 bool preorder(const IR::BFN::PacketRVal *rval) override {
401 auto extract = findContext<IR::BFN::Extract>();
402 if (extract && rval->range.lo < 0) {
403 auto state = findContext<IR::BFN::ParserState>();
404 unsigned shift = (-rval->range.lo + 7) / 8;
405 // state_to_shift[state->name] = std::max(state_to_shift[state->name], shift);
406 LOG1("State " << state->name << " requires " << shift << " B shift");
407 historic_states[state] = std::max(historic_states[state], shift);
408 parsers[state] = findContext<IR::BFN::Parser>();
409 }
410
411 return false;
412 }
413
414 profile_t init_apply(const IR::Node *node) override {
415 auto rv = ParserInspector::init_apply(node);
416 // Initialize all structures
417 transition_shift.clear();
418 state_to_shift.clear();
419 historic_states.clear();
420 state_to_duplicate.clear();
421 parsers.clear();
422 return rv;
423 }
424
425 void end_apply() override {
426 // Required data capture update all node states
427 const unsigned max_buff_size = Device::pardeSpec().byteInputBufferSize();
428 for (auto kv : historic_states) {
429 // 1] Distribute the required history value and adjust transitions
430 auto state = kv.first;
431 auto max_idx_value = kv.second;
432 BUG_CHECK(max_idx_value <= max_buff_size,
433 "In parse state %s: a value that is %d B backwards from the current "
434 "parsing position is being accessed/used. It is only possible to "
435 "access %d B backwards from the current parsing position. As a "
436 "possible workaround try moving around the extracts (possibly "
437 "by using methods advance and lookahead or splitting some headers).",
438 state->name, max_idx_value, max_buff_size);
439
440 distribute_shift_to_node(state, nullptr, parsers[state], max_idx_value);
441
442 // 2] Add the fix amount of shift data (it should be the same value from nodes)
443 //
444 // It is not a problem if the SHIFT value will not cover the whole state because
445 // the future pass will split the state to get more data to parse data.
446 for (auto trans : state->transitions) {
447 LOG1(" state has transitions " << trans);
448 unsigned shift_value = trans->shift + max_idx_value;
449 transition_shift[state->name][trans->value] = shift_value;
450 }
451 }
452
453 for (auto kv : state_to_shift)
454 LOG3(kv.first << " needs " << kv.second << " bytes of shift");
455
456 for (auto kv : transition_shift) {
457 for (auto tr_config : kv.second) {
458 std::stringstream ss;
459 ss << "Transition with match { " << tr_config.first << " } from state "
460 << kv.first << " needs will be set with the shift value "
461 << tr_config.second;
462 LOG3(ss.str());
463 }
464 }
465
466 LOG3("ResolveNegativeExtract has been finished.");
467 }
468
469 private:
476 std::map<const IR::BFN::ParserState *, unsigned> historic_states;
477
482 std::map<const IR::BFN::ParserState *, const IR::BFN::Parser *> parsers;
483
491 unsigned get_transition_shift(const IR::BFN::ParserState *src,
492 const IR::BFN::Transition *tr) {
493 BUG_CHECK(tr != nullptr, "Transition node cannot be null!");
494 if (transition_shift.count(src->name) &&
495 transition_shift.at(src->name).count(tr->value)) {
496 return transition_shift[src->name][tr->value];
497 }
498
499 return tr->shift;
500 }
501
517 void adjust_shift_buffer(const IR::BFN::ParserState *state,
518 const IR::BFN::ParserState *state_child,
519 const IR::BFN::Parser *parser, unsigned tr_shift,
520 unsigned state_shift) {
521 auto graph = parserInfo.graph(parser);
522 for (auto state_trans : state->transitions) {
523 auto state_succ = state_trans->next;
524 transition_shift[state->name][state_trans->value] = tr_shift;
525 LOG4("Adding transition { " << state_trans->value << " } shift value " << tr_shift
526 << " B from state " << state->name << " to state "
527 << state_succ->name);
528
529 if (!state_succ) {
530 // This transition exits parser, but we need to shift `state_shift` bytes
531 // from the packet. Remember this transition, AdjustShift will add
532 // auxiliary state which is used to extract the remaining bytes.
533 remainder_before_exit[state_trans] = state_shift;
534 continue;
535 };
536
537 if (graph.predecessors().at(state_succ).size() <= 1) {
538 state_to_shift[state_succ->name] = state_shift;
539 LOG4("Setting shift value " << state_shift << " B for state "
540 << state_succ->name);
541 }
542
543 // Don't process the subtree if we reached from that part of the tree
544 // because it will be analyzed later
545 if (state_succ == state_child) {
546 LOG4("Skipping transition adjustment for " << state_succ->name
547 << " (will be set later).");
548 continue;
549 }
550
551 // if after adjusting successors we violate the invariant that
552 // all shift values from the outgoing transitions of the successors
553 // to the dominator should remain the same. We need to duplicate the successor
554 // state, and adjust the shift values of the transitions to the duplicated state
555 if (graph.predecessors().at(state_succ).size() > 1) {
556 state_to_duplicate.emplace(state_trans,
557 std::make_pair(state_succ, state_shift));
558 continue;
559 }
560
561 for (auto succ_tr : state_succ->transitions) {
562 if (transition_shift[state_succ->name].count(succ_tr->value) > 0) continue;
563
564 unsigned new_shift = get_transition_shift(state_succ, succ_tr) + state_shift;
565 transition_shift[state_succ->name][succ_tr->value] = new_shift;
566 LOG4("Adding transition { "
567 << succ_tr->value << " } shift value " << new_shift << " B from state "
568 << state_succ->name << " to state "
569 << (succ_tr->next != nullptr ? succ_tr->next->name : "EXIT"));
570 }
571 }
572 }
573
582 void distribute_shift_to_node(const IR::BFN::ParserState *state,
583 const IR::BFN::ParserState *succ,
584 const IR::BFN::Parser *parser, int required_history) {
585 // 1] Identify the deficit absorbed by the recursion or if we already analyzed
586 // a path through the graph
587 BUG_CHECK(state, "Parser state cannot be null!");
588 if (state_to_shift.count(state->name) > 0) {
589 error(
590 "Current path with historic data usage has an intersection with"
591 " a previously analyzed historic data path at node %1%!",
592 state->name);
593 }
594
595 int deficit = required_history;
596 auto graph = parserInfo.graph(parser);
597 auto transitions = graph.transitions(state, succ);
598
599 if (succ != nullptr)
600 LOG5("Transitions size from state " << state->name << " to state " << succ->name
601 << " is " << transitions.size());
602
603 if (transitions.size() > 0 && required_history > 0) {
604 // All transitions should have the same shift value - we will take the first
605 // one
606 unsigned shift_value = get_transition_shift(state, *transitions.begin());
607 deficit = required_history - shift_value;
608 }
609
610 LOG4("Shift distribution for node " << state->name
611 << ", to distribute = " << required_history
612 << " (deficit = " << deficit << " B)");
613
614 // 2] Call recursively to all predecessors to distribute the remaining history
615 // shift - we need to make a call iff we can distribute the value to successors.
616 //
617 // In this stage, there should be one path only to the state with historic data
618 if (deficit > 0) {
619 auto preds = graph.predecessors().at(state);
620 if (preds.size() > 1) {
621 error(
622 "Cannot resolve negative extract because of multiple paths to "
623 "the node %1%",
624 state->name);
625 }
626 distribute_shift_to_node(*preds.begin(), state, parser, deficit);
627 }
628
629 // Check if we reached the starting node - stop if true
630 if (transitions.size() == 0 && !succ) {
631 LOG4("Initial node " << state->name << " has been reached.");
632 return;
633 }
634
635 // 3] The following code assumes that all transition from this state requires the
636 // same transition shift value
637 //
638 // Initial values assumes that we need to borrow the whole transition AND
639 // new transition shift is 0
640 const int old_tr_shift = get_transition_shift(state, *transitions.begin());
641 // Required history can be negative --> difference is the successors's shift value
642 // Required history is positive is the curent historic value plus the shift value
643 int new_state_shift = old_tr_shift + deficit;
644 int new_tr_shift = 0;
645 if (deficit <= 0) {
646 // Deficit is negative --> historic data which are not needed inside the buffer
647 // (we can shift them out)
648 new_tr_shift = -deficit;
649 }
650 Log::TempIndent indent;
651 LOG4("Adjusting shift for state "
652 << state->name << " and transition to state " << succ->name
653 << " (new transition shift = " << new_tr_shift << " B)" << indent);
654 adjust_shift_buffer(state, succ, parser, new_tr_shift, new_state_shift);
655 }
656 };
657
658 struct AdjustShift : public ParserModifier {
659 const CollectNegativeExtractStates &collectNegative;
660
661 explicit AdjustShift(const CollectNegativeExtractStates &cg) : collectNegative(cg) {}
662
663 std::map<const IR::BFN::ParserState *, std::pair<IR::BFN::ParserState *, int>>
664 duplicated_states;
665
666 profile_t init_apply(const IR::Node *node) override {
667 auto rv = ParserModifier::init_apply(node);
668 duplicated_states.clear();
669 return rv;
670 }
671
672 bool preorder(IR::BFN::Transition *transition) override {
673 auto state = findContext<IR::BFN::ParserState>();
674 auto orig_transition = getOriginal()->to<IR::BFN::Transition>();
675 BUG_CHECK(state, "State cannot be null!");
676 BUG_CHECK(orig_transition, "Original IR::BFN::Transition cannot be null!");
677
678 if (collectNegative.transition_shift.count(state->name) &&
679 collectNegative.transition_shift.at(state->name).count(orig_transition->value)) {
680 const auto &tr_map = collectNegative.transition_shift.at(state->name);
681 transition->shift = tr_map.at(orig_transition->value);
682 LOG3("Adjusting transition from " << state->name << ", match { "
683 << orig_transition->value
684 << " } to shift value = " << transition->shift);
685 }
686
687 if (collectNegative.remainder_before_exit.count(orig_transition)) {
688 // The transition exits parser but needs to push a shift to the target
689 // state (which is empty in this case). We generate new auxiliary state
690 // for this purpose.
691 unsigned state_shift = collectNegative.remainder_before_exit.at(orig_transition);
692
693 auto remainder_state = new IR::BFN::ParserState(
694 state->p4States, state->name + "$final_shift", state->gress);
695 transition->next = remainder_state;
696 auto end_transition = new IR::BFN::Transition(match_t(), state_shift);
697 remainder_state->transitions.push_back(end_transition);
698 LOG5("Transition from state "
699 << state->name << " with match value " << orig_transition->value
700 << " leads to exit, adding new state " << remainder_state->name
701 << " to consume " << state_shift << " bytes.");
702 }
703
704 if (collectNegative.state_to_duplicate.count(orig_transition)) {
705 auto [orig_state, state_shift] =
706 collectNegative.state_to_duplicate.at(orig_transition);
707 LOG5("Duplicating transition from state " << state->name << " with match value "
708 << orig_transition->value << " to state "
709 << orig_state->name);
710
711 IR::BFN::ParserState *duplicated_state = nullptr;
712 if (duplicated_states.count(orig_state)) {
713 int prev_state_shift;
714 std::tie(duplicated_state, prev_state_shift) = duplicated_states[orig_state];
715 BUG_CHECK(state_shift == prev_state_shift,
716 "New state shift %1% does not match previous "
717 "value %2% for duplicated state %3%",
718 state_shift, prev_state_shift, state->name);
719 } else {
720 duplicated_state = new IR::BFN::ParserState(
721 orig_state->p4States, orig_state->name + "$dup", orig_state->gress);
722 for (auto tr : orig_state->transitions) {
723 auto new_trans = new IR::BFN::Transition(tr->srcInfo, tr->value,
724 tr->shift + state_shift, tr->next);
725 duplicated_state->transitions.push_back(new_trans);
726 }
727 for (const auto stmt : orig_state->statements) {
728 duplicated_state->statements.push_back(stmt->clone());
729 }
730 duplicated_states.emplace(orig_state,
731 std::make_pair(duplicated_state, state_shift));
732 }
733
734 transition->next = duplicated_state;
735 }
736
737 return true;
738 }
739
740 bool preorder(IR::BFN::PacketRVal *rval) override {
741 auto state = findContext<IR::BFN::ParserState>();
742 auto extract = findContext<IR::BFN::Extract>();
743
744 if (collectNegative.state_to_shift.count(state->name)) {
745 unsigned shift = collectNegative.state_to_shift.at(state->name) * 8;
746 rval->range.lo += shift;
747 rval->range.hi += shift;
748 if (extract) {
749 LOG3("Adjusting field " << extract->dest->field->toString() << " to "
750 << shift / 8 << " byte offset (lo = " << rval->range.lo
751 << ", hi = " << rval->range.hi << ")");
752 }
753 }
754
755 return false;
756 }
757 };
758
760 auto *parserInfo = new CollectParserInfo;
761 auto *collectNegative = new CollectNegativeExtractStates(*parserInfo);
762 auto *collectNegativeOutOfBuffer = new CollectNegativeExtractOutOfBufferStates(*parserInfo);
763
764 addPasses({LOGGING(4) ? new DumpParser("before_resolve_negative_extract") : nullptr,
765 // Step 1: Handle negative extracts that exceed the parser buffer size.
766 // Need to adjust shift and delay extracts until subsequent states.
767 parserInfo, collectNegativeOutOfBuffer,
768 new AdjustShiftOutOfBuffer(*collectNegativeOutOfBuffer),
769 // Step 2: Handle all other negative extracts
770 // Adjusting shift amounts but not delaying extracts.
771 parserInfo, collectNegative, new AdjustShift(*collectNegative),
772 LOGGING(4) ? new DumpParser("after_resolve_negative_extract") : nullptr});
773 }
774};
775
776#endif /* BACKENDS_TOFINO_BF_P4C_PARDE_RESOLVE_NEGATIVE_EXTRACT_H_ */
Definition node.h:95
Definition vector.h:59
Definition ir/pass_manager.h:40
Definition visitor.h:78
Definition cstring.h:85
int byteInputBufferSize() const
The size of input buffer, in bytes.
Definition parde_spec.h:410
Dumps the entire parser graphs (can be used before and also after parser lowering).
Definition dump_parser.h:288
Definition parde_visitor.h:66
Definition parde_visitor.h:78
For extracts with negative source, i.e. source is in an earlier state, adjust the state's shift amoun...
Definition resolve_negative_extract.h:42
void error(const char *format, Args &&...args)
Report an error with the given message.
Definition lib/error.h:51
Definition match.h:36
Definition resolve_negative_extract.h:658
Definition resolve_negative_extract.h:220
std::map< cstring, unsigned > state_to_shift
In-buffer offsets of states.
Definition resolve_negative_extract.h:56
Definition resolve_negative_extract.h:342
std::map< cstring, unsigned > state_to_shift
In-buffer offsets of states.
Definition resolve_negative_extract.h:348
std::map< const IR::BFN::Transition *, std::pair< const IR::BFN::ParserState *, int > > state_to_duplicate
Duplicate the given node and set the shift value.
Definition resolve_negative_extract.h:396
std::map< cstring, std::map< const IR::BFN::ParserMatchValue *, unsigned > > transition_shift
Output shift values for given transitions - key is the source node and value is a map transition -> v...
Definition resolve_negative_extract.h:354
std::map< const IR::BFN::Transition *, unsigned > remainder_before_exit
Transitions exiting parser with unconsumed bytes in the packet buffer.
Definition resolve_negative_extract.h:359