P4C
The P4 Compiler
Loading...
Searching...
No Matches
resolve_negative_extract.h
1
18
19#ifndef BACKENDS_TOFINO_BF_P4C_PARDE_RESOLVE_NEGATIVE_EXTRACT_H_
20#define BACKENDS_TOFINO_BF_P4C_PARDE_RESOLVE_NEGATIVE_EXTRACT_H_
21
22#include <map>
23#include <sstream>
24#include <utility>
25
26#include "backends/tofino/bf-p4c/common/utils.h"
27#include "backends/tofino/bf-p4c/device.h"
28#include "backends/tofino/bf-p4c/parde/dump_parser.h"
29#include "backends/tofino/bf-p4c/parde/parde_visitor.h"
30#include "backends/tofino/bf-p4c/parde/parser_dominator_builder.h"
31#include "backends/tofino/bf-p4c/parde/parser_info.h"
32#include "lib/indent.h"
33#include "lib/ordered_map.h"
34
41struct ResolveNegativeExtract : public PassManager {
49 struct CollectNegativeExtractOutOfBufferStates : public ParserInspector {
50 const CollectParserInfo &parserInfo;
51
55 std::map<cstring, unsigned> state_to_shift;
56
57 explicit CollectNegativeExtractOutOfBufferStates(const CollectParserInfo &pi)
58 : parserInfo(pi) {}
59
60 unsigned max_buff_size = 0;
61
62 bool preorder(const IR::BFN::PacketRVal *rval) override {
63 auto extract = findContext<IR::BFN::Extract>();
64 if (extract && rval->range.lo < 0) {
65 auto state = findContext<IR::BFN::ParserState>();
66 unsigned shift = (-rval->range.lo + 7) / 8;
67 if (shift > max_buff_size) {
68 LOG1("State " << state->name << " requires " << shift << " B shift");
69 historic_states[state] = std::max(historic_states[state], shift);
70 parsers[state] = findContext<IR::BFN::Parser>();
71 }
72 }
73
74 return false;
75 }
76
77 profile_t init_apply(const IR::Node *node) override {
78 auto rv = ParserInspector::init_apply(node);
79 // Initialize all structures
80 max_buff_size = Device::pardeSpec().byteInputBufferSize();
81 state_to_shift.clear();
82 historic_states.clear();
83 parsers.clear();
84 return rv;
85 }
86
87 void end_apply() override {
88 // Required data capture update all node states
89 for (auto kv : historic_states) {
90 // 1] Distribute the required history value and adjust transitions
91 auto state = kv.first;
92 auto max_idx_value = kv.second;
93
94 unsigned delay_shift =
95 delay_shift_from_predecessor(state, parsers[state], max_idx_value);
96 BUG_CHECK(delay_shift,
97 "In parse state %s: a value that is %d B backwards from the current "
98 "parsing position is being accessed/used. Unable to identify an "
99 "amount to delay the previuos shift by to allow access to this data. "
100 "As a possible workaround try moving around the extracts (possibly by "
101 "using methods advance and lookahead or splitting some headers).",
102 state->name, max_idx_value);
103
104 LOG3("State " << state->name << " needs a value " << max_idx_value
105 << "B back and generates a delay shift of " << delay_shift << "B");
106 }
107
108 LOG3("CollectNegativeExtractOutOfBufferStates has finished.");
109 }
110
111 private:
118 std::map<const IR::BFN::ParserState *, unsigned> historic_states;
119
124 std::map<const IR::BFN::ParserState *, const IR::BFN::Parser *> parsers;
125
134 unsigned delay_shift_from_predecessor(const IR::BFN::ParserState *state,
135 const IR::BFN::Parser *parser, int required_history) {
136 BUG_CHECK(state, "Parser state cannot be null!");
137
138 auto graph = parserInfo.graph(parser);
139 auto preds = graph.predecessors().at(state);
140 if (preds.size() > 1) {
141 error(
142 "Cannot resolve negative extract because of multiple paths to "
143 "the node %1%",
144 state->name);
145 }
146 const auto *pred = *preds.begin();
147
148 unsigned curr_shift = 0;
149 std::for_each(pred->transitions.begin(), pred->transitions.end(),
150 [&curr_shift](const auto *tr) {
151 curr_shift = curr_shift > tr->shift ? curr_shift : tr->shift;
152 });
153 unsigned min_shift = required_history;
154 unsigned max_keep = curr_shift - min_shift;
155
156 // For extracts, identify the earliest start byte for each end byte
157 std::map<int, int> end_to_earliest_start;
158 for (const auto *stmt : pred->statements) {
159 if (const auto *extract = stmt->to<IR::BFN::Extract>()) {
160 if (const auto *rval = extract->source->to<IR::BFN::InputBufferRVal>()) {
161 auto range = rval->range;
162 auto hi_byte = range.hiByte();
163 auto lo_byte = range.loByte();
164 if (end_to_earliest_start.count(hi_byte))
165 end_to_earliest_start[hi_byte] =
166 std::min(lo_byte, end_to_earliest_start[hi_byte]);
167 else
168 end_to_earliest_start[hi_byte] = lo_byte;
169 }
170 }
171 }
172
173 // If we don't do any extracts, or if the extracts end before the max_keep value,
174 // then just return the requested min_shift value.
175 if (end_to_earliest_start.size() == 0 ||
176 end_to_earliest_start.rbegin()->first < static_cast<int>(max_keep)) {
177 state_to_shift[pred->name] = min_shift;
178 return min_shift;
179 }
180
181 // Create a vector of end position -> earliest start position where overlapping ranges
182 // are merged. Unused byte positions map to themselves.
183 //
184 // For example, if the map created above is:
185 // [ (2, 1), (3, 2) ]
186 // then the resulting vector is:
187 //
188 // [ 0, 1, 1, 1 ]
189 int max_end = end_to_earliest_start.rbegin()->first;
190 std::vector<int> end_to_earliest_start_merged(max_end + 1);
191 for (int i = 0; i <= max_end; i++) {
192 end_to_earliest_start_merged[i] = i;
193 int min_idx = i;
194 if (end_to_earliest_start.count(i))
195 min_idx =
196 std::min(end_to_earliest_start[i], end_to_earliest_start_merged[min_idx]);
197 for (int j = std::max(0, min_idx); j <= i; j++)
198 end_to_earliest_start_merged[j] = min_idx;
199 }
200
201 // Identify the byte location at/before the max_keep position where we can split the
202 // state
203 if (end_to_earliest_start_merged[max_keep] > 0) {
204 unsigned keep = static_cast<unsigned>(end_to_earliest_start_merged[max_keep]);
205 unsigned delay_shift = curr_shift - keep;
206 if (state_to_shift.count(pred->name))
207 state_to_shift[pred->name] = std::min(delay_shift, state_to_shift[pred->name]);
208 else
209 state_to_shift[pred->name] = delay_shift;
210 return delay_shift;
211 }
212
213 return 0;
214 }
215 };
216
219 struct AdjustShiftOutOfBuffer : public ParserModifier {
220 const CollectNegativeExtractOutOfBufferStates &collectNegative;
221
222 explicit AdjustShiftOutOfBuffer(const CollectNegativeExtractOutOfBufferStates &cg)
223 : collectNegative(cg) {}
224
225 std::map<cstring, std::vector<const IR::BFN::Extract *>> delayed_statements;
226 std::map<cstring, unsigned> state_delay;
227 std::map<cstring, cstring> state_pred;
228
229 unsigned max_buff_size = 0;
230
231 profile_t init_apply(const IR::Node *node) override {
232 auto rv = ParserModifier::init_apply(node);
233 max_buff_size = Device::pardeSpec().byteInputBufferSize();
234 delayed_statements.clear();
235 state_delay.clear();
236 state_pred.clear();
237 return rv;
238 }
239
240 bool preorder(IR::BFN::ParserState *state) override {
241 // Handle shift to be delayed from current state
242 if (collectNegative.state_to_shift.count(state->name)) {
243 // Shift from the current state to delay until child states
244 unsigned delay_shift = collectNegative.state_to_shift.at(state->name);
245
246 // Current shift amount for the state
247 unsigned curr_shift = 0;
248 std::for_each(state->transitions.begin(), state->transitions.end(),
249 [this, state, &curr_shift, delay_shift](const auto *tr) {
250 curr_shift = curr_shift > tr->shift ? curr_shift : tr->shift;
251 if (tr->next) {
252 this->state_delay[tr->next->name] = delay_shift;
253 this->state_pred[tr->next->name] = state->name;
254 }
255 });
256
257 // Shift to be added to the current state
258 unsigned pending_shift =
259 state_delay.count(state->name) ? state_delay.at(state->name) : 0;
260
261 // Split the statements into statements to delay to child states and statements to
262 // keep in current state
264 for (const auto *stmt : state->statements) {
265 bool keep = true;
266 if (const auto *extract = stmt->to<IR::BFN::Extract>()) {
267 if (const auto *rval = extract->source->to<IR::BFN::InputBufferRVal>()) {
268 if (rval->range.hiByte() >= static_cast<int>(max_buff_size)) {
269 auto *rval_clone = rval->clone();
270 rval_clone->range.hi -= (curr_shift - pending_shift) * 8;
271 rval_clone->range.lo -= (curr_shift - pending_shift) * 8;
272 auto *extract_clone = extract->clone();
273 extract_clone->source = rval_clone;
274 delayed_statements[state->name].push_back(extract_clone);
275 keep = false;
276 }
277 }
278 }
279 if (keep) new_statements.push_back(stmt);
280 }
281 state->statements = new_statements;
282 }
283
284 // Add statements delayed from parent state
285 if (state_delay.count(state->name)) {
286 cstring pred = state_pred[state->name];
288 // Clone the delayed statements -- need a unique copy for each state
289 // in case the statements are adjusted (e.g., made into CLOTs).
290 for (const auto *stmt : delayed_statements[pred])
291 new_statements.push_back(stmt->clone());
292 new_statements.insert(new_statements.end(), state->statements.begin(),
293 state->statements.end());
294 state->statements = new_statements;
295 }
296
297 return true;
298 }
299
300 bool preorder(IR::BFN::Transition *transition) override {
301 auto state = findContext<IR::BFN::ParserState>();
302 BUG_CHECK(state, "State cannot be null!");
303
304 if (collectNegative.state_to_shift.count(state->name)) {
305 transition->shift -= collectNegative.state_to_shift.at(state->name);
306 LOG3("Adjusting transition from " << state->name << ", match { "
307 << transition->value
308 << " } to shift value = " << transition->shift);
309 }
310 if (state_delay.count(state->name)) {
311 transition->shift += state_delay.at(state->name);
312 LOG3("Adjusting transition from " << state->name << ", match { "
313 << transition->value
314 << " } to shift value = " << transition->shift);
315 }
316
317 return true;
318 }
319
320 bool preorder(IR::BFN::PacketRVal *rval) override {
321 auto state = findContext<IR::BFN::ParserState>();
322 auto extract = findContext<IR::BFN::Extract>();
323
324 if (state_delay.count(state->name)) {
325 unsigned shift = state_delay.at(state->name) * 8;
326 rval->range.lo += shift;
327 rval->range.hi += shift;
328 if (extract) {
329 LOG3("Adjusting field " << extract->dest->field->toString() << " to "
330 << shift / 8 << " byte offset (lo = " << rval->range.lo
331 << ", hi = " << rval->range.hi << ")");
332 }
333 }
334
335 return false;
336 }
337 };
338
341 struct CollectNegativeExtractStates : public ParserInspector {
342 const CollectParserInfo &parserInfo;
343
347 std::map<cstring, unsigned> state_to_shift;
348
353 std::map<cstring, std::map<const IR::BFN::ParserMatchValue *, unsigned>> transition_shift;
354
358 std::map<const IR::BFN::Transition *, unsigned> remainder_before_exit;
359
394 std::map<const IR::BFN::Transition *, std::pair<const IR::BFN::ParserState *, int>>
396
397 explicit CollectNegativeExtractStates(const CollectParserInfo &pi) : parserInfo(pi) {}
398
399 bool preorder(const IR::BFN::PacketRVal *rval) override {
400 auto extract = findContext<IR::BFN::Extract>();
401 if (extract && rval->range.lo < 0) {
402 auto state = findContext<IR::BFN::ParserState>();
403 unsigned shift = (-rval->range.lo + 7) / 8;
404 // state_to_shift[state->name] = std::max(state_to_shift[state->name], shift);
405 LOG1("State " << state->name << " requires " << shift << " B shift");
406 historic_states[state] = std::max(historic_states[state], shift);
407 parsers[state] = findContext<IR::BFN::Parser>();
408 }
409
410 return false;
411 }
412
413 profile_t init_apply(const IR::Node *node) override {
414 auto rv = ParserInspector::init_apply(node);
415 // Initialize all structures
416 transition_shift.clear();
417 state_to_shift.clear();
418 historic_states.clear();
419 state_to_duplicate.clear();
420 parsers.clear();
421 return rv;
422 }
423
424 void end_apply() override {
425 // Required data capture update all node states
426 const unsigned max_buff_size = Device::pardeSpec().byteInputBufferSize();
427 for (auto kv : historic_states) {
428 // 1] Distribute the required history value and adjust transitions
429 auto state = kv.first;
430 auto max_idx_value = kv.second;
431 BUG_CHECK(max_idx_value <= max_buff_size,
432 "In parse state %s: a value that is %d B backwards from the current "
433 "parsing position is being accessed/used. It is only possible to "
434 "access %d B backwards from the current parsing position. As a "
435 "possible workaround try moving around the extracts (possibly "
436 "by using methods advance and lookahead or splitting some headers).",
437 state->name, max_idx_value, max_buff_size);
438
439 distribute_shift_to_node(state, nullptr, parsers[state], max_idx_value);
440
441 // 2] Add the fix amount of shift data (it should be the same value from nodes)
442 //
443 // It is not a problem if the SHIFT value will not cover the whole state because
444 // the future pass will split the state to get more data to parse data.
445 for (auto trans : state->transitions) {
446 LOG1(" state has transitions " << trans);
447 unsigned shift_value = trans->shift + max_idx_value;
448 transition_shift[state->name][trans->value] = shift_value;
449 }
450 }
451
452 for (auto kv : state_to_shift)
453 LOG3(kv.first << " needs " << kv.second << " bytes of shift");
454
455 for (auto kv : transition_shift) {
456 for (auto tr_config : kv.second) {
457 std::stringstream ss;
458 ss << "Transition with match { " << tr_config.first << " } from state "
459 << kv.first << " needs will be set with the shift value "
460 << tr_config.second;
461 LOG3(ss.str());
462 }
463 }
464
465 LOG3("ResolveNegativeExtract has been finished.");
466 }
467
468 private:
475 std::map<const IR::BFN::ParserState *, unsigned> historic_states;
476
481 std::map<const IR::BFN::ParserState *, const IR::BFN::Parser *> parsers;
482
490 unsigned get_transition_shift(const IR::BFN::ParserState *src,
491 const IR::BFN::Transition *tr) {
492 BUG_CHECK(tr != nullptr, "Transition node cannot be null!");
493 if (transition_shift.count(src->name) &&
494 transition_shift.at(src->name).count(tr->value)) {
495 return transition_shift[src->name][tr->value];
496 }
497
498 return tr->shift;
499 }
500
516 void adjust_shift_buffer(const IR::BFN::ParserState *state,
517 const IR::BFN::ParserState *state_child,
518 const IR::BFN::Parser *parser, unsigned tr_shift,
519 unsigned state_shift) {
520 auto graph = parserInfo.graph(parser);
521 for (auto state_trans : state->transitions) {
522 auto state_succ = state_trans->next;
523 transition_shift[state->name][state_trans->value] = tr_shift;
524 LOG4("Adding transition { " << state_trans->value << " } shift value " << tr_shift
525 << " B from state " << state->name << " to state "
526 << state_succ->name);
527
528 if (!state_succ) {
529 // This transition exits parser, but we need to shift `state_shift` bytes
530 // from the packet. Remember this transition, AdjustShift will add
531 // auxiliary state which is used to extract the remaining bytes.
532 remainder_before_exit[state_trans] = state_shift;
533 continue;
534 };
535
536 if (graph.predecessors().at(state_succ).size() <= 1) {
537 state_to_shift[state_succ->name] = state_shift;
538 LOG4("Setting shift value " << state_shift << " B for state "
539 << state_succ->name);
540 }
541
542 // Don't process the subtree if we reached from that part of the tree
543 // because it will be analyzed later
544 if (state_succ == state_child) {
545 LOG4("Skipping transition adjustment for " << state_succ->name
546 << " (will be set later).");
547 continue;
548 }
549
550 // if after adjusting successors we violate the invariant that
551 // all shift values from the outgoing transitions of the successors
552 // to the dominator should remain the same. We need to duplicate the successor
553 // state, and adjust the shift values of the transitions to the duplicated state
554 if (graph.predecessors().at(state_succ).size() > 1) {
555 state_to_duplicate.emplace(state_trans,
556 std::make_pair(state_succ, state_shift));
557 continue;
558 }
559
560 for (auto succ_tr : state_succ->transitions) {
561 if (transition_shift[state_succ->name].count(succ_tr->value) > 0) continue;
562
563 unsigned new_shift = get_transition_shift(state_succ, succ_tr) + state_shift;
564 transition_shift[state_succ->name][succ_tr->value] = new_shift;
565 LOG4("Adding transition { "
566 << succ_tr->value << " } shift value " << new_shift << " B from state "
567 << state_succ->name << " to state "
568 << (succ_tr->next != nullptr ? succ_tr->next->name.c_str() : "EXIT"));
569 }
570 }
571 }
572
581 void distribute_shift_to_node(const IR::BFN::ParserState *state,
582 const IR::BFN::ParserState *succ,
583 const IR::BFN::Parser *parser, int required_history) {
584 // 1] Identify the deficit absorbed by the recursion or if we already analyzed
585 // a path through the graph
586 BUG_CHECK(state, "Parser state cannot be null!");
587 if (state_to_shift.count(state->name) > 0) {
588 error(
589 "Current path with historic data usage has an intersection with"
590 " a previously analyzed historic data path at node %1%!",
591 state->name);
592 }
593
594 int deficit = required_history;
595 auto graph = parserInfo.graph(parser);
596 auto transitions = graph.transitions(state, succ);
597
598 if (succ != nullptr)
599 LOG5("Transitions size from state " << state->name << " to state " << succ->name
600 << " is " << transitions.size());
601
602 if (transitions.size() > 0 && required_history > 0) {
603 // All transitions should have the same shift value - we will take the first
604 // one
605 unsigned shift_value = get_transition_shift(state, *transitions.begin());
606 deficit = required_history - shift_value;
607 }
608
609 LOG4("Shift distribution for node " << state->name
610 << ", to distribute = " << required_history
611 << " (deficit = " << deficit << " B)");
612
613 // 2] Call recursively to all predecessors to distribute the remaining history
614 // shift - we need to make a call iff we can distribute the value to successors.
615 //
616 // In this stage, there should be one path only to the state with historic data
617 if (deficit > 0) {
618 auto preds = graph.predecessors().at(state);
619 if (preds.size() > 1) {
620 error(
621 "Cannot resolve negative extract because of multiple paths to "
622 "the node %1%",
623 state->name);
624 }
625 distribute_shift_to_node(*preds.begin(), state, parser, deficit);
626 }
627
628 // Check if we reached the starting node - stop if true
629 if (transitions.size() == 0 && !succ) {
630 LOG4("Initial node " << state->name << " has been reached.");
631 return;
632 }
633
634 // 3] The following code assumes that all transition from this state requires the
635 // same transition shift value
636 //
637 // Initial values assumes that we need to borrow the whole transition AND
638 // new transition shift is 0
639 const int old_tr_shift = get_transition_shift(state, *transitions.begin());
640 // Required history can be negative --> difference is the successors's shift value
641 // Required history is positive is the curent historic value plus the shift value
642 int new_state_shift = old_tr_shift + deficit;
643 int new_tr_shift = 0;
644 if (deficit <= 0) {
645 // Deficit is negative --> historic data which are not needed inside the buffer
646 // (we can shift them out)
647 new_tr_shift = -deficit;
648 }
649 Log::TempIndent indent;
650 LOG4("Adjusting shift for state "
651 << state->name << " and transition to state " << succ->name
652 << " (new transition shift = " << new_tr_shift << " B)" << indent);
653 adjust_shift_buffer(state, succ, parser, new_tr_shift, new_state_shift);
654 }
655 };
656
657 struct AdjustShift : public ParserModifier {
658 const CollectNegativeExtractStates &collectNegative;
659
660 explicit AdjustShift(const CollectNegativeExtractStates &cg) : collectNegative(cg) {}
661
662 std::map<const IR::BFN::ParserState *, std::pair<IR::BFN::ParserState *, int>>
663 duplicated_states;
664
665 profile_t init_apply(const IR::Node *node) override {
666 auto rv = ParserModifier::init_apply(node);
667 duplicated_states.clear();
668 return rv;
669 }
670
671 bool preorder(IR::BFN::Transition *transition) override {
672 auto state = findContext<IR::BFN::ParserState>();
673 auto orig_transition = getOriginal()->to<IR::BFN::Transition>();
674 BUG_CHECK(state, "State cannot be null!");
675 BUG_CHECK(orig_transition, "Original IR::BFN::Transition cannot be null!");
676
677 if (collectNegative.transition_shift.count(state->name) &&
678 collectNegative.transition_shift.at(state->name).count(orig_transition->value)) {
679 const auto &tr_map = collectNegative.transition_shift.at(state->name);
680 transition->shift = tr_map.at(orig_transition->value);
681 LOG3("Adjusting transition from " << state->name << ", match { "
682 << orig_transition->value
683 << " } to shift value = " << transition->shift);
684 }
685
686 if (collectNegative.remainder_before_exit.count(orig_transition)) {
687 // The transition exits parser but needs to push a shift to the target
688 // state (which is empty in this case). We generate new auxiliary state
689 // for this purpose.
690 unsigned state_shift = collectNegative.remainder_before_exit.at(orig_transition);
691
692 auto remainder_state = new IR::BFN::ParserState(
693 state->p4States, state->name + "$final_shift", state->gress);
694 transition->next = remainder_state;
695 auto end_transition = new IR::BFN::Transition(match_t(), state_shift);
696 remainder_state->transitions.push_back(end_transition);
697 LOG5("Transition from state "
698 << state->name << " with match value " << orig_transition->value
699 << " leads to exit, adding new state " << remainder_state->name
700 << " to consume " << state_shift << " bytes.");
701 }
702
703 if (collectNegative.state_to_duplicate.count(orig_transition)) {
704 auto [orig_state, state_shift] =
705 collectNegative.state_to_duplicate.at(orig_transition);
706 LOG5("Duplicating transition from state " << state->name << " with match value "
707 << orig_transition->value << " to state "
708 << orig_state->name);
709
710 IR::BFN::ParserState *duplicated_state = nullptr;
711 if (duplicated_states.count(orig_state)) {
712 int prev_state_shift;
713 std::tie(duplicated_state, prev_state_shift) = duplicated_states[orig_state];
714 BUG_CHECK(state_shift == prev_state_shift,
715 "New state shift %1% does not match previous "
716 "value %2% for duplicated state %3%",
717 state_shift, prev_state_shift, state->name);
718 } else {
719 duplicated_state = new IR::BFN::ParserState(
720 orig_state->p4States, orig_state->name + "$dup", orig_state->gress);
721 for (auto tr : orig_state->transitions) {
722 auto new_trans = new IR::BFN::Transition(tr->srcInfo, tr->value,
723 tr->shift + state_shift, tr->next);
724 duplicated_state->transitions.push_back(new_trans);
725 }
726 for (const auto stmt : orig_state->statements) {
727 duplicated_state->statements.push_back(stmt->clone());
728 }
729 duplicated_states.emplace(orig_state,
730 std::make_pair(duplicated_state, state_shift));
731 }
732
733 transition->next = duplicated_state;
734 }
735
736 return true;
737 }
738
739 bool preorder(IR::BFN::PacketRVal *rval) override {
740 auto state = findContext<IR::BFN::ParserState>();
741 auto extract = findContext<IR::BFN::Extract>();
742
743 if (collectNegative.state_to_shift.count(state->name)) {
744 unsigned shift = collectNegative.state_to_shift.at(state->name) * 8;
745 rval->range.lo += shift;
746 rval->range.hi += shift;
747 if (extract) {
748 LOG3("Adjusting field " << extract->dest->field->toString() << " to "
749 << shift / 8 << " byte offset (lo = " << rval->range.lo
750 << ", hi = " << rval->range.hi << ")");
751 }
752 }
753
754 return false;
755 }
756 };
757
758 ResolveNegativeExtract() {
759 auto *parserInfo = new CollectParserInfo;
760 auto *collectNegative = new CollectNegativeExtractStates(*parserInfo);
761 auto *collectNegativeOutOfBuffer = new CollectNegativeExtractOutOfBufferStates(*parserInfo);
762
763 addPasses({LOGGING(4) ? new DumpParser("before_resolve_negative_extract") : nullptr,
764 // Step 1: Handle negative extracts that exceed the parser buffer size.
765 // Need to adjust shift and delay extracts until subsequent states.
766 parserInfo, collectNegativeOutOfBuffer,
767 new AdjustShiftOutOfBuffer(*collectNegativeOutOfBuffer),
768 // Step 2: Handle all other negative extracts
769 // Adjusting shift amounts but not delaying extracts.
770 parserInfo, collectNegative, new AdjustShift(*collectNegative),
771 LOGGING(4) ? new DumpParser("after_resolve_negative_extract") : nullptr});
772 }
773};
774
775#endif /* BACKENDS_TOFINO_BF_P4C_PARDE_RESOLVE_NEGATIVE_EXTRACT_H_ */
Definition node.h:94
Definition vector.h:59
Definition visitor.h:78
Definition cstring.h:85
int byteInputBufferSize() const
The size of input buffer, in bytes.
Definition parde_spec.h:410
Dumps the entire parser graphs (can be used before and also after parser lowering).
Definition dump_parser.h:288
Definition parde_visitor.h:66
Definition parde_visitor.h:78
void error(const char *format, Args &&...args)
Report an error with the given message.
Definition lib/error.h:58
Definition match.h:36
Definition resolve_negative_extract.h:657
Definition resolve_negative_extract.h:219
std::map< cstring, unsigned > state_to_shift
In-buffer offsets of states.
Definition resolve_negative_extract.h:55
Definition resolve_negative_extract.h:341
std::map< cstring, unsigned > state_to_shift
In-buffer offsets of states.
Definition resolve_negative_extract.h:347
std::map< const IR::BFN::Transition *, std::pair< const IR::BFN::ParserState *, int > > state_to_duplicate
Duplicate the given node and set the shift value.
Definition resolve_negative_extract.h:395
std::map< cstring, std::map< const IR::BFN::ParserMatchValue *, unsigned > > transition_shift
Output shift values for given transitions - key is the source node and value is a map transition -> v...
Definition resolve_negative_extract.h:353
std::map< const IR::BFN::Transition *, unsigned > remainder_before_exit
Transitions exiting parser with unconsumed bytes in the packet buffer.
Definition resolve_negative_extract.h:358